commit e40e28d9e43209bc08f3014b375c97ceb4f2b253 Author: kura Date: Wed Nov 6 11:40:07 2024 +0800 init diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..0dacc95 Binary files /dev/null and b/.DS_Store differ diff --git a/convert_image_to_webp.cpp b/convert_image_to_webp.cpp new file mode 100644 index 0000000..104f117 --- /dev/null +++ b/convert_image_to_webp.cpp @@ -0,0 +1,64 @@ +#include +#include +#include +#include "webp/encode.h" // WebP 头文件,用于编码 WebP +#include "stb_image.h" // stb_image 头文件,用于解码 PNG/JPG +#define STB_IMAGE_RESIZE_IMPLEMENTATION +#include "stb_image_resize.h" + +extern "C" { + + // 将输入的 JPG/PNG 数据转换为 WebP,并返回 WebP 数据 + unsigned char *convert_image_to_webp(const uint8_t *input_data, size_t input_size, int target_width, int target_height, float quality_factor, size_t *output_size) { + int width, height, channels; + + + // 使用 stb_image 解码输入图像(强制加载为 RGB 而非 RGBA) + unsigned char *decoded_data = stbi_load_from_memory(input_data, input_size, &width, &height, &channels, 3); // 强制加载为 RGB + + if (!decoded_data) { + return nullptr; // 图像解码失败 + } + + // 如果需要调整尺寸,则进行调整 + unsigned char *resized_data = decoded_data; + if (target_width > 0 && target_height > 0) { + // 分配用于存储调整大小后图像的缓冲区 + resized_data = (unsigned char *)malloc(target_width * target_height * 3); // RGB 3 通道 + + if (!resized_data) { + // 内存分配失败的处理 + fprintf(stderr, "Failed to allocate memory for resized image.\n"); + stbi_image_free(decoded_data); // 释放原始解码数据 + return NULL; + } + + // 使用 stb_image_resize 调整图像大小 + int result = stbir_resize_uint8(decoded_data, width, height, 0, + resized_data, target_width, target_height, 0, 3); + + if (!result) { + // 如果调整大小失败,释放已分配的内存 + fprintf(stderr, "Image resizing failed.\n"); + free(resized_data); + stbi_image_free(decoded_data); + return NULL; + } + } else { + target_width = width; + target_height = height; + } + + // 使用 libwebp 的有损编码函数(WebPEncodeRGB)将 RGB 图像编码为 WebP + unsigned char *webp_output = NULL; + *output_size = WebPEncodeRGB(resized_data, target_width, target_height, target_width * 3, quality_factor, &webp_output); + + // 释放解码后的图像内存 + stbi_image_free(decoded_data); + if (resized_data != decoded_data) { + free(resized_data); // 如果调整了大小,则释放调整大小后的数据 + } + + return webp_output; // 返回 WebP 编码后的数据 + } +} \ No newline at end of file diff --git a/index_ffmpeg.html b/index_ffmpeg.html new file mode 100644 index 0000000..e02c7a5 --- /dev/null +++ b/index_ffmpeg.html @@ -0,0 +1,3 @@ + + \ No newline at end of file diff --git a/libsharpyuv.a b/libsharpyuv.a new file mode 100644 index 0000000..66e2cf2 Binary files /dev/null and b/libsharpyuv.a differ diff --git a/libwebp.a b/libwebp.a new file mode 100644 index 0000000..5e8b1cb Binary files /dev/null and b/libwebp.a differ diff --git a/libwebpdecoder.a b/libwebpdecoder.a new file mode 100644 index 0000000..df6a1ce Binary files /dev/null and b/libwebpdecoder.a differ diff --git a/libwebpdemux.a b/libwebpdemux.a new file mode 100644 index 0000000..6b2da15 Binary files /dev/null and b/libwebpdemux.a differ diff --git a/libwebpmux.a b/libwebpmux.a new file mode 100644 index 0000000..87b7d95 Binary files /dev/null and b/libwebpmux.a differ diff --git a/node_modules/.bin/esbuild b/node_modules/.bin/esbuild new file mode 120000 index 0000000..c83ac07 --- /dev/null +++ b/node_modules/.bin/esbuild @@ -0,0 +1 @@ +../esbuild/bin/esbuild \ No newline at end of file diff --git a/node_modules/.bin/nanoid b/node_modules/.bin/nanoid new file mode 120000 index 0000000..e2be547 --- /dev/null +++ b/node_modules/.bin/nanoid @@ -0,0 +1 @@ +../nanoid/bin/nanoid.cjs \ No newline at end of file diff --git a/node_modules/.bin/rollup b/node_modules/.bin/rollup new file mode 120000 index 0000000..5939621 --- /dev/null +++ b/node_modules/.bin/rollup @@ -0,0 +1 @@ +../rollup/dist/bin/rollup \ No newline at end of file diff --git a/node_modules/.bin/vite b/node_modules/.bin/vite new file mode 120000 index 0000000..6d1e3be --- /dev/null +++ b/node_modules/.bin/vite @@ -0,0 +1 @@ +../vite/bin/vite.js \ No newline at end of file diff --git a/node_modules/.yarn-integrity b/node_modules/.yarn-integrity new file mode 100644 index 0000000..535990f --- /dev/null +++ b/node_modules/.yarn-integrity @@ -0,0 +1,69 @@ +{ + "systemParams": "darwin-arm64-108", + "modulesFolders": [ + "node_modules" + ], + "flags": [], + "linkedModules": [], + "topLevelPatterns": [ + "@ffmpeg/ffmpeg@^0.12.10", + "@ffmpeg/util@^0.12.1", + "vite@^5.4.8" + ], + "lockfileEntries": { + "@esbuild/aix-ppc64@0.21.5": "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f", + "@esbuild/android-arm64@0.21.5": "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052", + "@esbuild/android-arm@0.21.5": "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28", + "@esbuild/android-x64@0.21.5": "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e", + "@esbuild/darwin-arm64@0.21.5": "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a", + "@esbuild/darwin-x64@0.21.5": "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22", + "@esbuild/freebsd-arm64@0.21.5": "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e", + "@esbuild/freebsd-x64@0.21.5": "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261", + "@esbuild/linux-arm64@0.21.5": "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b", + "@esbuild/linux-arm@0.21.5": "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9", + "@esbuild/linux-ia32@0.21.5": "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2", + "@esbuild/linux-loong64@0.21.5": "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df", + "@esbuild/linux-mips64el@0.21.5": "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe", + "@esbuild/linux-ppc64@0.21.5": "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4", + "@esbuild/linux-riscv64@0.21.5": "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc", + "@esbuild/linux-s390x@0.21.5": "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de", + "@esbuild/linux-x64@0.21.5": "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0", + "@esbuild/netbsd-x64@0.21.5": "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047", + "@esbuild/openbsd-x64@0.21.5": "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70", + "@esbuild/sunos-x64@0.21.5": "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b", + "@esbuild/win32-arm64@0.21.5": "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d", + "@esbuild/win32-ia32@0.21.5": "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b", + "@esbuild/win32-x64@0.21.5": "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c", + "@ffmpeg/ffmpeg@^0.12.10": "https://registry.yarnpkg.com/@ffmpeg/ffmpeg/-/ffmpeg-0.12.10.tgz#e3cce21f21f11f33dfc1ec1d5ad5694f4a3073c9", + "@ffmpeg/types@^0.12.2": "https://registry.yarnpkg.com/@ffmpeg/types/-/types-0.12.2.tgz#bc7eef321ae50225c247091f1f23fd3087c6aa1d", + "@ffmpeg/util@^0.12.1": "https://registry.yarnpkg.com/@ffmpeg/util/-/util-0.12.1.tgz#98afa20d7b4c0821eebdb205ddcfa5d07b0a4f53", + "@rollup/rollup-android-arm-eabi@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.24.0.tgz#1661ff5ea9beb362795304cb916049aba7ac9c54", + "@rollup/rollup-android-arm64@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.24.0.tgz#2ffaa91f1b55a0082b8a722525741aadcbd3971e", + "@rollup/rollup-darwin-arm64@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.24.0.tgz#627007221b24b8cc3063703eee0b9177edf49c1f", + "@rollup/rollup-darwin-x64@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.24.0.tgz#0605506142b9e796c370d59c5984ae95b9758724", + "@rollup/rollup-linux-arm-gnueabihf@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.24.0.tgz#62dfd196d4b10c0c2db833897164d2d319ee0cbb", + "@rollup/rollup-linux-arm-musleabihf@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.24.0.tgz#53ce72aeb982f1f34b58b380baafaf6a240fddb3", + "@rollup/rollup-linux-arm64-gnu@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.24.0.tgz#1632990f62a75c74f43e4b14ab3597d7ed416496", + "@rollup/rollup-linux-arm64-musl@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.24.0.tgz#8c03a996efb41e257b414b2e0560b7a21f2d9065", + "@rollup/rollup-linux-powerpc64le-gnu@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.24.0.tgz#5b98729628d5bcc8f7f37b58b04d6845f85c7b5d", + "@rollup/rollup-linux-riscv64-gnu@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.24.0.tgz#48e42e41f4cabf3573cfefcb448599c512e22983", + "@rollup/rollup-linux-s390x-gnu@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.24.0.tgz#e0b4f9a966872cb7d3e21b9e412a4b7efd7f0b58", + "@rollup/rollup-linux-x64-gnu@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.24.0.tgz#78144741993100f47bd3da72fce215e077ae036b", + "@rollup/rollup-linux-x64-musl@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.24.0.tgz#d9fe32971883cd1bd858336bd33a1c3ca6146127", + "@rollup/rollup-win32-arm64-msvc@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.24.0.tgz#71fa3ea369316db703a909c790743972e98afae5", + "@rollup/rollup-win32-ia32-msvc@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.24.0.tgz#653f5989a60658e17d7576a3996deb3902e342e2", + "@rollup/rollup-win32-x64-msvc@4.24.0": "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.24.0.tgz#0574d7e87b44ee8511d08cc7f914bcb802b70818", + "@types/estree@1.0.6": "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50", + "esbuild@^0.21.3": "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d", + "fsevents@~2.3.2": "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6", + "fsevents@~2.3.3": "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6", + "nanoid@^3.3.7": "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8", + "picocolors@^1.1.0": "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.0.tgz#5358b76a78cde483ba5cef6a9dc9671440b27d59", + "postcss@^8.4.43": "https://registry.yarnpkg.com/postcss/-/postcss-8.4.47.tgz#5bf6c9a010f3e724c503bf03ef7947dcb0fea365", + "rollup@^4.20.0": "https://registry.yarnpkg.com/rollup/-/rollup-4.24.0.tgz#c14a3576f20622ea6a5c9cad7caca5e6e9555d05", + "source-map-js@^1.2.1": "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46", + "vite@^5.4.8": "https://registry.yarnpkg.com/vite/-/vite-5.4.8.tgz#af548ce1c211b2785478d3ba3e8da51e39a287e8" + }, + "files": [], + "artifacts": {} +} \ No newline at end of file diff --git a/node_modules/@esbuild/darwin-arm64/README.md b/node_modules/@esbuild/darwin-arm64/README.md new file mode 100644 index 0000000..c2c0398 --- /dev/null +++ b/node_modules/@esbuild/darwin-arm64/README.md @@ -0,0 +1,3 @@ +# esbuild + +This is the macOS ARM 64-bit binary for esbuild, a JavaScript bundler and minifier. See https://github.com/evanw/esbuild for details. diff --git a/node_modules/@esbuild/darwin-arm64/bin/esbuild b/node_modules/@esbuild/darwin-arm64/bin/esbuild new file mode 100755 index 0000000..e81b7da Binary files /dev/null and b/node_modules/@esbuild/darwin-arm64/bin/esbuild differ diff --git a/node_modules/@esbuild/darwin-arm64/package.json b/node_modules/@esbuild/darwin-arm64/package.json new file mode 100644 index 0000000..6eea7ea --- /dev/null +++ b/node_modules/@esbuild/darwin-arm64/package.json @@ -0,0 +1,20 @@ +{ + "name": "@esbuild/darwin-arm64", + "version": "0.21.5", + "description": "The macOS ARM 64-bit binary for esbuild, a JavaScript bundler.", + "repository": { + "type": "git", + "url": "git+https://github.com/evanw/esbuild.git" + }, + "license": "MIT", + "preferUnplugged": true, + "engines": { + "node": ">=12" + }, + "os": [ + "darwin" + ], + "cpu": [ + "arm64" + ] +} diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/classes.d.ts b/node_modules/@ffmpeg/ffmpeg/dist/esm/classes.d.ts new file mode 100644 index 0000000..205c94e --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/classes.d.ts @@ -0,0 +1,145 @@ +import { FSNode, FFMessageLoadConfig, OK, IsFirst, LogEventCallback, ProgressEventCallback, FileData, FFFSType, FFFSMountOptions, FFFSPath } from "./types.js"; +type FFMessageOptions = { + signal?: AbortSignal; +}; +/** + * Provides APIs to interact with ffmpeg web worker. + * + * @example + * ```ts + * const ffmpeg = new FFmpeg(); + * ``` + */ +export declare class FFmpeg { + #private; + loaded: boolean; + /** + * Listen to log or prgress events from `ffmpeg.exec()`. + * + * @example + * ```ts + * ffmpeg.on("log", ({ type, message }) => { + * // ... + * }) + * ``` + * + * @example + * ```ts + * ffmpeg.on("progress", ({ progress, time }) => { + * // ... + * }) + * ``` + * + * @remarks + * - log includes output to stdout and stderr. + * - The progress events are accurate only when the length of + * input and output video/audio file are the same. + * + * @category FFmpeg + */ + on(event: "log", callback: LogEventCallback): void; + on(event: "progress", callback: ProgressEventCallback): void; + /** + * Unlisten to log or prgress events from `ffmpeg.exec()`. + * + * @category FFmpeg + */ + off(event: "log", callback: LogEventCallback): void; + off(event: "progress", callback: ProgressEventCallback): void; + /** + * Loads ffmpeg-core inside web worker. It is required to call this method first + * as it initializes WebAssembly and other essential variables. + * + * @category FFmpeg + * @returns `true` if ffmpeg core is loaded for the first time. + */ + load: ({ classWorkerURL, ...config }?: FFMessageLoadConfig, { signal }?: FFMessageOptions) => Promise; + /** + * Execute ffmpeg command. + * + * @remarks + * To avoid common I/O issues, ["-nostdin", "-y"] are prepended to the args + * by default. + * + * @example + * ```ts + * const ffmpeg = new FFmpeg(); + * await ffmpeg.load(); + * await ffmpeg.writeFile("video.avi", ...); + * // ffmpeg -i video.avi video.mp4 + * await ffmpeg.exec(["-i", "video.avi", "video.mp4"]); + * const data = ffmpeg.readFile("video.mp4"); + * ``` + * + * @returns `0` if no error, `!= 0` if timeout (1) or error. + * @category FFmpeg + */ + exec: (args: string[], timeout?: number, { signal }?: FFMessageOptions) => Promise; + /** + * Terminate all ongoing API calls and terminate web worker. + * `FFmpeg.load()` must be called again before calling any other APIs. + * + * @category FFmpeg + */ + terminate: () => void; + /** + * Write data to ffmpeg.wasm. + * + * @example + * ```ts + * const ffmpeg = new FFmpeg(); + * await ffmpeg.load(); + * await ffmpeg.writeFile("video.avi", await fetchFile("../video.avi")); + * await ffmpeg.writeFile("text.txt", "hello world"); + * ``` + * + * @category File System + */ + writeFile: (path: string, data: FileData, { signal }?: FFMessageOptions) => Promise; + mount: (fsType: FFFSType, options: FFFSMountOptions, mountPoint: FFFSPath) => Promise; + unmount: (mountPoint: FFFSPath) => Promise; + /** + * Read data from ffmpeg.wasm. + * + * @example + * ```ts + * const ffmpeg = new FFmpeg(); + * await ffmpeg.load(); + * const data = await ffmpeg.readFile("video.mp4"); + * ``` + * + * @category File System + */ + readFile: (path: string, encoding?: string, { signal }?: FFMessageOptions) => Promise; + /** + * Delete a file. + * + * @category File System + */ + deleteFile: (path: string, { signal }?: FFMessageOptions) => Promise; + /** + * Rename a file or directory. + * + * @category File System + */ + rename: (oldPath: string, newPath: string, { signal }?: FFMessageOptions) => Promise; + /** + * Create a directory. + * + * @category File System + */ + createDir: (path: string, { signal }?: FFMessageOptions) => Promise; + /** + * List directory contents. + * + * @category File System + */ + listDir: (path: string, { signal }?: FFMessageOptions) => Promise; + /** + * Delete an empty directory. + * + * @category File System + */ + deleteDir: (path: string, { signal }?: FFMessageOptions) => Promise; +} +export {}; diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/classes.js b/node_modules/@ffmpeg/ffmpeg/dist/esm/classes.js new file mode 100644 index 0000000..e490b7a --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/classes.js @@ -0,0 +1,277 @@ +import { FFMessageType } from "./const.js"; +import { getMessageID } from "./utils.js"; +import { ERROR_TERMINATED, ERROR_NOT_LOADED } from "./errors.js"; +/** + * Provides APIs to interact with ffmpeg web worker. + * + * @example + * ```ts + * const ffmpeg = new FFmpeg(); + * ``` + */ +export class FFmpeg { + #worker = null; + /** + * #resolves and #rejects tracks Promise resolves and rejects to + * be called when we receive message from web worker. + */ + #resolves = {}; + #rejects = {}; + #logEventCallbacks = []; + #progressEventCallbacks = []; + loaded = false; + /** + * register worker message event handlers. + */ + #registerHandlers = () => { + if (this.#worker) { + this.#worker.onmessage = ({ data: { id, type, data }, }) => { + switch (type) { + case FFMessageType.LOAD: + this.loaded = true; + this.#resolves[id](data); + break; + case FFMessageType.MOUNT: + case FFMessageType.UNMOUNT: + case FFMessageType.EXEC: + case FFMessageType.WRITE_FILE: + case FFMessageType.READ_FILE: + case FFMessageType.DELETE_FILE: + case FFMessageType.RENAME: + case FFMessageType.CREATE_DIR: + case FFMessageType.LIST_DIR: + case FFMessageType.DELETE_DIR: + this.#resolves[id](data); + break; + case FFMessageType.LOG: + this.#logEventCallbacks.forEach((f) => f(data)); + break; + case FFMessageType.PROGRESS: + this.#progressEventCallbacks.forEach((f) => f(data)); + break; + case FFMessageType.ERROR: + this.#rejects[id](data); + break; + } + delete this.#resolves[id]; + delete this.#rejects[id]; + }; + } + }; + /** + * Generic function to send messages to web worker. + */ + #send = ({ type, data }, trans = [], signal) => { + if (!this.#worker) { + return Promise.reject(ERROR_NOT_LOADED); + } + return new Promise((resolve, reject) => { + const id = getMessageID(); + this.#worker && this.#worker.postMessage({ id, type, data }, trans); + this.#resolves[id] = resolve; + this.#rejects[id] = reject; + signal?.addEventListener("abort", () => { + reject(new DOMException(`Message # ${id} was aborted`, "AbortError")); + }, { once: true }); + }); + }; + on(event, callback) { + if (event === "log") { + this.#logEventCallbacks.push(callback); + } + else if (event === "progress") { + this.#progressEventCallbacks.push(callback); + } + } + off(event, callback) { + if (event === "log") { + this.#logEventCallbacks = this.#logEventCallbacks.filter((f) => f !== callback); + } + else if (event === "progress") { + this.#progressEventCallbacks = this.#progressEventCallbacks.filter((f) => f !== callback); + } + } + /** + * Loads ffmpeg-core inside web worker. It is required to call this method first + * as it initializes WebAssembly and other essential variables. + * + * @category FFmpeg + * @returns `true` if ffmpeg core is loaded for the first time. + */ + load = ({ classWorkerURL, ...config } = {}, { signal } = {}) => { + if (!this.#worker) { + this.#worker = classWorkerURL ? + new Worker(new URL(classWorkerURL, import.meta.url), { + type: "module", + }) : + // We need to duplicated the code here to enable webpack + // to bundle worekr.js here. + new Worker(new URL("./worker.js", import.meta.url), { + type: "module", + }); + this.#registerHandlers(); + } + return this.#send({ + type: FFMessageType.LOAD, + data: config, + }, undefined, signal); + }; + /** + * Execute ffmpeg command. + * + * @remarks + * To avoid common I/O issues, ["-nostdin", "-y"] are prepended to the args + * by default. + * + * @example + * ```ts + * const ffmpeg = new FFmpeg(); + * await ffmpeg.load(); + * await ffmpeg.writeFile("video.avi", ...); + * // ffmpeg -i video.avi video.mp4 + * await ffmpeg.exec(["-i", "video.avi", "video.mp4"]); + * const data = ffmpeg.readFile("video.mp4"); + * ``` + * + * @returns `0` if no error, `!= 0` if timeout (1) or error. + * @category FFmpeg + */ + exec = ( + /** ffmpeg command line args */ + args, + /** + * milliseconds to wait before stopping the command execution. + * + * @defaultValue -1 + */ + timeout = -1, { signal } = {}) => this.#send({ + type: FFMessageType.EXEC, + data: { args, timeout }, + }, undefined, signal); + /** + * Terminate all ongoing API calls and terminate web worker. + * `FFmpeg.load()` must be called again before calling any other APIs. + * + * @category FFmpeg + */ + terminate = () => { + const ids = Object.keys(this.#rejects); + // rejects all incomplete Promises. + for (const id of ids) { + this.#rejects[id](ERROR_TERMINATED); + delete this.#rejects[id]; + delete this.#resolves[id]; + } + if (this.#worker) { + this.#worker.terminate(); + this.#worker = null; + this.loaded = false; + } + }; + /** + * Write data to ffmpeg.wasm. + * + * @example + * ```ts + * const ffmpeg = new FFmpeg(); + * await ffmpeg.load(); + * await ffmpeg.writeFile("video.avi", await fetchFile("../video.avi")); + * await ffmpeg.writeFile("text.txt", "hello world"); + * ``` + * + * @category File System + */ + writeFile = (path, data, { signal } = {}) => { + const trans = []; + if (data instanceof Uint8Array) { + trans.push(data.buffer); + } + return this.#send({ + type: FFMessageType.WRITE_FILE, + data: { path, data }, + }, trans, signal); + }; + mount = (fsType, options, mountPoint) => { + const trans = []; + return this.#send({ + type: FFMessageType.MOUNT, + data: { fsType, options, mountPoint }, + }, trans); + }; + unmount = (mountPoint) => { + const trans = []; + return this.#send({ + type: FFMessageType.UNMOUNT, + data: { mountPoint }, + }, trans); + }; + /** + * Read data from ffmpeg.wasm. + * + * @example + * ```ts + * const ffmpeg = new FFmpeg(); + * await ffmpeg.load(); + * const data = await ffmpeg.readFile("video.mp4"); + * ``` + * + * @category File System + */ + readFile = (path, + /** + * File content encoding, supports two encodings: + * - utf8: read file as text file, return data in string type. + * - binary: read file as binary file, return data in Uint8Array type. + * + * @defaultValue binary + */ + encoding = "binary", { signal } = {}) => this.#send({ + type: FFMessageType.READ_FILE, + data: { path, encoding }, + }, undefined, signal); + /** + * Delete a file. + * + * @category File System + */ + deleteFile = (path, { signal } = {}) => this.#send({ + type: FFMessageType.DELETE_FILE, + data: { path }, + }, undefined, signal); + /** + * Rename a file or directory. + * + * @category File System + */ + rename = (oldPath, newPath, { signal } = {}) => this.#send({ + type: FFMessageType.RENAME, + data: { oldPath, newPath }, + }, undefined, signal); + /** + * Create a directory. + * + * @category File System + */ + createDir = (path, { signal } = {}) => this.#send({ + type: FFMessageType.CREATE_DIR, + data: { path }, + }, undefined, signal); + /** + * List directory contents. + * + * @category File System + */ + listDir = (path, { signal } = {}) => this.#send({ + type: FFMessageType.LIST_DIR, + data: { path }, + }, undefined, signal); + /** + * Delete an empty directory. + * + * @category File System + */ + deleteDir = (path, { signal } = {}) => this.#send({ + type: FFMessageType.DELETE_DIR, + data: { path }, + }, undefined, signal); +} diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/const.d.ts b/node_modules/@ffmpeg/ffmpeg/dist/esm/const.d.ts new file mode 100644 index 0000000..97978fb --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/const.d.ts @@ -0,0 +1,21 @@ +export declare const MIME_TYPE_JAVASCRIPT = "text/javascript"; +export declare const MIME_TYPE_WASM = "application/wasm"; +export declare const CORE_VERSION = "0.12.6"; +export declare const CORE_URL: string; +export declare enum FFMessageType { + LOAD = "LOAD", + EXEC = "EXEC", + WRITE_FILE = "WRITE_FILE", + READ_FILE = "READ_FILE", + DELETE_FILE = "DELETE_FILE", + RENAME = "RENAME", + CREATE_DIR = "CREATE_DIR", + LIST_DIR = "LIST_DIR", + DELETE_DIR = "DELETE_DIR", + ERROR = "ERROR", + DOWNLOAD = "DOWNLOAD", + PROGRESS = "PROGRESS", + LOG = "LOG", + MOUNT = "MOUNT", + UNMOUNT = "UNMOUNT" +} diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/const.js b/node_modules/@ffmpeg/ffmpeg/dist/esm/const.js new file mode 100644 index 0000000..35cc93f --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/const.js @@ -0,0 +1,22 @@ +export const MIME_TYPE_JAVASCRIPT = "text/javascript"; +export const MIME_TYPE_WASM = "application/wasm"; +export const CORE_VERSION = "0.12.6"; +export const CORE_URL = `https://unpkg.com/@ffmpeg/core@${CORE_VERSION}/dist/umd/ffmpeg-core.js`; +export var FFMessageType; +(function (FFMessageType) { + FFMessageType["LOAD"] = "LOAD"; + FFMessageType["EXEC"] = "EXEC"; + FFMessageType["WRITE_FILE"] = "WRITE_FILE"; + FFMessageType["READ_FILE"] = "READ_FILE"; + FFMessageType["DELETE_FILE"] = "DELETE_FILE"; + FFMessageType["RENAME"] = "RENAME"; + FFMessageType["CREATE_DIR"] = "CREATE_DIR"; + FFMessageType["LIST_DIR"] = "LIST_DIR"; + FFMessageType["DELETE_DIR"] = "DELETE_DIR"; + FFMessageType["ERROR"] = "ERROR"; + FFMessageType["DOWNLOAD"] = "DOWNLOAD"; + FFMessageType["PROGRESS"] = "PROGRESS"; + FFMessageType["LOG"] = "LOG"; + FFMessageType["MOUNT"] = "MOUNT"; + FFMessageType["UNMOUNT"] = "UNMOUNT"; +})(FFMessageType || (FFMessageType = {})); diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/empty.d.mts b/node_modules/@ffmpeg/ffmpeg/dist/esm/empty.d.mts new file mode 100644 index 0000000..1b5f8f2 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/empty.d.mts @@ -0,0 +1,3 @@ +export declare class FFmpeg { + constructor(); +} diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/empty.mjs b/node_modules/@ffmpeg/ffmpeg/dist/esm/empty.mjs new file mode 100644 index 0000000..b16b65c --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/empty.mjs @@ -0,0 +1,6 @@ +// File to be imported in node enviroments +export class FFmpeg { + constructor() { + throw new Error("ffmpeg.wasm does not support nodejs"); + } +} diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/errors.d.ts b/node_modules/@ffmpeg/ffmpeg/dist/esm/errors.d.ts new file mode 100644 index 0000000..e8ee0d9 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/errors.d.ts @@ -0,0 +1,4 @@ +export declare const ERROR_UNKNOWN_MESSAGE_TYPE: Error; +export declare const ERROR_NOT_LOADED: Error; +export declare const ERROR_TERMINATED: Error; +export declare const ERROR_IMPORT_FAILURE: Error; diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/errors.js b/node_modules/@ffmpeg/ffmpeg/dist/esm/errors.js new file mode 100644 index 0000000..409f515 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/errors.js @@ -0,0 +1,4 @@ +export const ERROR_UNKNOWN_MESSAGE_TYPE = new Error("unknown message type"); +export const ERROR_NOT_LOADED = new Error("ffmpeg is not loaded, call `await ffmpeg.load()` first"); +export const ERROR_TERMINATED = new Error("called FFmpeg.terminate()"); +export const ERROR_IMPORT_FAILURE = new Error("failed to import ffmpeg-core.js"); diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/index.d.ts b/node_modules/@ffmpeg/ffmpeg/dist/esm/index.d.ts new file mode 100644 index 0000000..b84e9db --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/index.d.ts @@ -0,0 +1 @@ +export * from "./classes.js"; diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/index.js b/node_modules/@ffmpeg/ffmpeg/dist/esm/index.js new file mode 100644 index 0000000..b84e9db --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/index.js @@ -0,0 +1 @@ +export * from "./classes.js"; diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/types.d.ts b/node_modules/@ffmpeg/ffmpeg/dist/esm/types.d.ts new file mode 100644 index 0000000..8b0eda3 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/types.d.ts @@ -0,0 +1,131 @@ +export type FFFSPath = string; +/** + * ffmpeg-core loading configuration. + */ +export interface FFMessageLoadConfig { + /** + * `ffmpeg-core.js` URL. + * + * @defaultValue `https://unpkg.com/@ffmpeg/core@${CORE_VERSION}/dist/umd/ffmpeg-core.js`; + */ + coreURL?: string; + /** + * `ffmpeg-core.wasm` URL. + * + * @defaultValue `https://unpkg.com/@ffmpeg/core@${CORE_VERSION}/dist/umd/ffmpeg-core.wasm`; + */ + wasmURL?: string; + /** + * `ffmpeg-core.worker.js` URL. This worker is spawned when using multithread version of ffmpeg-core. + * + * @ref: https://ffmpegwasm.netlify.app/docs/overview#architecture + * @defaultValue `https://unpkg.com/@ffmpeg/core-mt@${CORE_VERSION}/dist/umd/ffmpeg-core.worker.js`; + */ + workerURL?: string; + /** + * `ffmpeg.worker.js` URL. This worker is spawned when FFmpeg.load() is called, it is an essential worker and usually you don't need to update this config. + * + * @ref: https://ffmpegwasm.netlify.app/docs/overview#architecture + * @defaultValue `./worker.js` + */ + classWorkerURL?: string; +} +export interface FFMessageExecData { + args: string[]; + timeout?: number; +} +export interface FFMessageWriteFileData { + path: FFFSPath; + data: FileData; +} +export interface FFMessageReadFileData { + path: FFFSPath; + encoding: string; +} +export interface FFMessageDeleteFileData { + path: FFFSPath; +} +export interface FFMessageRenameData { + oldPath: FFFSPath; + newPath: FFFSPath; +} +export interface FFMessageCreateDirData { + path: FFFSPath; +} +export interface FFMessageListDirData { + path: FFFSPath; +} +/** + * @remarks + * Only deletes empty directory. + */ +export interface FFMessageDeleteDirData { + path: FFFSPath; +} +export declare enum FFFSType { + MEMFS = "MEMFS", + NODEFS = "NODEFS", + NODERAWFS = "NODERAWFS", + IDBFS = "IDBFS", + WORKERFS = "WORKERFS", + PROXYFS = "PROXYFS" +} +export type WorkerFSFileEntry = File; +export interface WorkerFSBlobEntry { + name: string; + data: Blob; +} +export interface WorkerFSMountData { + blobs?: WorkerFSBlobEntry[]; + files?: WorkerFSFileEntry[]; +} +export type FFFSMountOptions = WorkerFSMountData; +export interface FFMessageMountData { + fsType: FFFSType; + options: FFFSMountOptions; + mountPoint: FFFSPath; +} +export interface FFMessageUnmountData { + mountPoint: FFFSPath; +} +export type FFMessageData = FFMessageLoadConfig | FFMessageExecData | FFMessageWriteFileData | FFMessageReadFileData | FFMessageDeleteFileData | FFMessageRenameData | FFMessageCreateDirData | FFMessageListDirData | FFMessageDeleteDirData | FFMessageMountData | FFMessageUnmountData; +export interface Message { + type: string; + data?: FFMessageData; +} +export interface FFMessage extends Message { + id: number; +} +export interface FFMessageEvent extends MessageEvent { + data: FFMessage; +} +export interface LogEvent { + type: string; + message: string; +} +export interface ProgressEvent { + progress: number; + time: number; +} +export type ExitCode = number; +export type ErrorMessage = string; +export type FileData = Uint8Array | string; +export type IsFirst = boolean; +export type OK = boolean; +export interface FSNode { + name: string; + isDir: boolean; +} +export type CallbackData = FileData | ExitCode | ErrorMessage | LogEvent | ProgressEvent | IsFirst | OK | Error | FSNode[] | undefined; +export interface Callbacks { + [id: number | string]: (data: CallbackData) => void; +} +export type LogEventCallback = (event: LogEvent) => void; +export type ProgressEventCallback = (event: ProgressEvent) => void; +export interface FFMessageEventCallback { + data: { + id: number; + type: string; + data: CallbackData; + }; +} diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/types.js b/node_modules/@ffmpeg/ffmpeg/dist/esm/types.js new file mode 100644 index 0000000..76a0690 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/types.js @@ -0,0 +1,9 @@ +export var FFFSType; +(function (FFFSType) { + FFFSType["MEMFS"] = "MEMFS"; + FFFSType["NODEFS"] = "NODEFS"; + FFFSType["NODERAWFS"] = "NODERAWFS"; + FFFSType["IDBFS"] = "IDBFS"; + FFFSType["WORKERFS"] = "WORKERFS"; + FFFSType["PROXYFS"] = "PROXYFS"; +})(FFFSType || (FFFSType = {})); diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/utils.d.ts b/node_modules/@ffmpeg/ffmpeg/dist/esm/utils.d.ts new file mode 100644 index 0000000..a980fe6 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/utils.d.ts @@ -0,0 +1,4 @@ +/** + * Generate an unique message ID. + */ +export declare const getMessageID: () => number; diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/utils.js b/node_modules/@ffmpeg/ffmpeg/dist/esm/utils.js new file mode 100644 index 0000000..7dd12b0 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/utils.js @@ -0,0 +1,7 @@ +/** + * Generate an unique message ID. + */ +export const getMessageID = (() => { + let messageID = 0; + return () => messageID++; +})(); diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/worker.d.ts b/node_modules/@ffmpeg/ffmpeg/dist/esm/worker.d.ts new file mode 100644 index 0000000..1f14e35 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/worker.d.ts @@ -0,0 +1,9 @@ +/// +/// +/// +import type { FFmpegCoreModuleFactory } from "@ffmpeg/types"; +declare global { + interface WorkerGlobalScope { + createFFmpegCore: FFmpegCoreModuleFactory; + } +} diff --git a/node_modules/@ffmpeg/ffmpeg/dist/esm/worker.js b/node_modules/@ffmpeg/ffmpeg/dist/esm/worker.js new file mode 100644 index 0000000..57bd90c --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/esm/worker.js @@ -0,0 +1,151 @@ +/// +/// +/// +import { CORE_URL, FFMessageType } from "./const.js"; +import { ERROR_UNKNOWN_MESSAGE_TYPE, ERROR_NOT_LOADED, ERROR_IMPORT_FAILURE, } from "./errors.js"; +let ffmpeg; +const load = async ({ coreURL: _coreURL, wasmURL: _wasmURL, workerURL: _workerURL, }) => { + const first = !ffmpeg; + try { + if (!_coreURL) + _coreURL = CORE_URL; + // when web worker type is `classic`. + importScripts(_coreURL); + } + catch { + if (!_coreURL) + _coreURL = CORE_URL.replace('/umd/', '/esm/'); + // when web worker type is `module`. + self.createFFmpegCore = (await import( + /* webpackIgnore: true */ /* @vite-ignore */ _coreURL)).default; + if (!self.createFFmpegCore) { + throw ERROR_IMPORT_FAILURE; + } + } + const coreURL = _coreURL; + const wasmURL = _wasmURL ? _wasmURL : _coreURL.replace(/.js$/g, ".wasm"); + const workerURL = _workerURL + ? _workerURL + : _coreURL.replace(/.js$/g, ".worker.js"); + ffmpeg = await self.createFFmpegCore({ + // Fix `Overload resolution failed.` when using multi-threaded ffmpeg-core. + // Encoded wasmURL and workerURL in the URL as a hack to fix locateFile issue. + mainScriptUrlOrBlob: `${coreURL}#${btoa(JSON.stringify({ wasmURL, workerURL }))}`, + }); + ffmpeg.setLogger((data) => self.postMessage({ type: FFMessageType.LOG, data })); + ffmpeg.setProgress((data) => self.postMessage({ + type: FFMessageType.PROGRESS, + data, + })); + return first; +}; +const exec = ({ args, timeout = -1 }) => { + ffmpeg.setTimeout(timeout); + ffmpeg.exec(...args); + const ret = ffmpeg.ret; + ffmpeg.reset(); + return ret; +}; +const writeFile = ({ path, data }) => { + ffmpeg.FS.writeFile(path, data); + return true; +}; +const readFile = ({ path, encoding }) => ffmpeg.FS.readFile(path, { encoding }); +// TODO: check if deletion works. +const deleteFile = ({ path }) => { + ffmpeg.FS.unlink(path); + return true; +}; +const rename = ({ oldPath, newPath }) => { + ffmpeg.FS.rename(oldPath, newPath); + return true; +}; +// TODO: check if creation works. +const createDir = ({ path }) => { + ffmpeg.FS.mkdir(path); + return true; +}; +const listDir = ({ path }) => { + const names = ffmpeg.FS.readdir(path); + const nodes = []; + for (const name of names) { + const stat = ffmpeg.FS.stat(`${path}/${name}`); + const isDir = ffmpeg.FS.isDir(stat.mode); + nodes.push({ name, isDir }); + } + return nodes; +}; +// TODO: check if deletion works. +const deleteDir = ({ path }) => { + ffmpeg.FS.rmdir(path); + return true; +}; +const mount = ({ fsType, options, mountPoint }) => { + const str = fsType; + const fs = ffmpeg.FS.filesystems[str]; + if (!fs) + return false; + ffmpeg.FS.mount(fs, options, mountPoint); + return true; +}; +const unmount = ({ mountPoint }) => { + ffmpeg.FS.unmount(mountPoint); + return true; +}; +self.onmessage = async ({ data: { id, type, data: _data }, }) => { + const trans = []; + let data; + try { + if (type !== FFMessageType.LOAD && !ffmpeg) + throw ERROR_NOT_LOADED; // eslint-disable-line + switch (type) { + case FFMessageType.LOAD: + data = await load(_data); + break; + case FFMessageType.EXEC: + data = exec(_data); + break; + case FFMessageType.WRITE_FILE: + data = writeFile(_data); + break; + case FFMessageType.READ_FILE: + data = readFile(_data); + break; + case FFMessageType.DELETE_FILE: + data = deleteFile(_data); + break; + case FFMessageType.RENAME: + data = rename(_data); + break; + case FFMessageType.CREATE_DIR: + data = createDir(_data); + break; + case FFMessageType.LIST_DIR: + data = listDir(_data); + break; + case FFMessageType.DELETE_DIR: + data = deleteDir(_data); + break; + case FFMessageType.MOUNT: + data = mount(_data); + break; + case FFMessageType.UNMOUNT: + data = unmount(_data); + break; + default: + throw ERROR_UNKNOWN_MESSAGE_TYPE; + } + } + catch (e) { + self.postMessage({ + id, + type: FFMessageType.ERROR, + data: e.toString(), + }); + return; + } + if (data instanceof Uint8Array) { + trans.push(data.buffer); + } + self.postMessage({ id, type, data }, trans); +}; diff --git a/node_modules/@ffmpeg/ffmpeg/dist/umd/814.ffmpeg.js b/node_modules/@ffmpeg/ffmpeg/dist/umd/814.ffmpeg.js new file mode 100644 index 0000000..a82c120 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/umd/814.ffmpeg.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.FFmpegWASM=t():e.FFmpegWASM=t()}(self,(()=>(()=>{"use strict";const e="https://unpkg.com/@ffmpeg/core@0.12.6/dist/umd/ffmpeg-core.js";var t;!function(e){e.LOAD="LOAD",e.EXEC="EXEC",e.WRITE_FILE="WRITE_FILE",e.READ_FILE="READ_FILE",e.DELETE_FILE="DELETE_FILE",e.RENAME="RENAME",e.CREATE_DIR="CREATE_DIR",e.LIST_DIR="LIST_DIR",e.DELETE_DIR="DELETE_DIR",e.ERROR="ERROR",e.DOWNLOAD="DOWNLOAD",e.PROGRESS="PROGRESS",e.LOG="LOG",e.MOUNT="MOUNT",e.UNMOUNT="UNMOUNT"}(t||(t={}));const r=new Error("unknown message type"),a=new Error("ffmpeg is not loaded, call `await ffmpeg.load()` first"),s=(new Error("called FFmpeg.terminate()"),new Error("failed to import ffmpeg-core.js"));let o;return self.onmessage=async({data:{id:n,type:E,data:c}})=>{const i=[];let p;try{if(E!==t.LOAD&&!o)throw a;switch(E){case t.LOAD:p=await(async({coreURL:r,wasmURL:a,workerURL:n})=>{const E=!o;try{r||(r=e),importScripts(r)}catch{if(r||(r=e.replace("/umd/","/esm/")),self.createFFmpegCore=(await import(r)).default,!self.createFFmpegCore)throw s}const c=r,i=a||r.replace(/.js$/g,".wasm"),p=n||r.replace(/.js$/g,".worker.js");return o=await self.createFFmpegCore({mainScriptUrlOrBlob:`${c}#${btoa(JSON.stringify({wasmURL:i,workerURL:p}))}`}),o.setLogger((e=>self.postMessage({type:t.LOG,data:e}))),o.setProgress((e=>self.postMessage({type:t.PROGRESS,data:e}))),E})(c);break;case t.EXEC:p=(({args:e,timeout:t=-1})=>{o.setTimeout(t),o.exec(...e);const r=o.ret;return o.reset(),r})(c);break;case t.WRITE_FILE:p=(({path:e,data:t})=>(o.FS.writeFile(e,t),!0))(c);break;case t.READ_FILE:p=(({path:e,encoding:t})=>o.FS.readFile(e,{encoding:t}))(c);break;case t.DELETE_FILE:p=(({path:e})=>(o.FS.unlink(e),!0))(c);break;case t.RENAME:p=(({oldPath:e,newPath:t})=>(o.FS.rename(e,t),!0))(c);break;case t.CREATE_DIR:p=(({path:e})=>(o.FS.mkdir(e),!0))(c);break;case t.LIST_DIR:p=(({path:e})=>{const t=o.FS.readdir(e),r=[];for(const a of t){const t=o.FS.stat(`${e}/${a}`),s=o.FS.isDir(t.mode);r.push({name:a,isDir:s})}return r})(c);break;case t.DELETE_DIR:p=(({path:e})=>(o.FS.rmdir(e),!0))(c);break;case t.MOUNT:p=(({fsType:e,options:t,mountPoint:r})=>{const a=e,s=o.FS.filesystems[a];return!!s&&(o.FS.mount(s,t,r),!0)})(c);break;case t.UNMOUNT:p=(({mountPoint:e})=>(o.FS.unmount(e),!0))(c);break;default:throw r}}catch(e){return void self.postMessage({id:n,type:t.ERROR,data:e.toString()})}p instanceof Uint8Array&&i.push(p.buffer),self.postMessage({id:n,type:E,data:p},i)},{}})())); +//# sourceMappingURL=814.ffmpeg.js.map \ No newline at end of file diff --git a/node_modules/@ffmpeg/ffmpeg/dist/umd/814.ffmpeg.js.map b/node_modules/@ffmpeg/ffmpeg/dist/umd/814.ffmpeg.js.map new file mode 100644 index 0000000..192de85 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/umd/814.ffmpeg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"814.ffmpeg.js","mappings":"CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,GAAIH,GACe,iBAAZC,QACdA,QAAoB,WAAID,IAExBD,EAAiB,WAAIC,GACtB,CATD,CASGK,MAAM,I,mBCTF,MAGMC,EAAW,gEACjB,IAAIC,GACX,SAAWA,GACPA,EAAoB,KAAI,OACxBA,EAAoB,KAAI,OACxBA,EAA0B,WAAI,aAC9BA,EAAyB,UAAI,YAC7BA,EAA2B,YAAI,cAC/BA,EAAsB,OAAI,SAC1BA,EAA0B,WAAI,aAC9BA,EAAwB,SAAI,WAC5BA,EAA0B,WAAI,aAC9BA,EAAqB,MAAI,QACzBA,EAAwB,SAAI,WAC5BA,EAAwB,SAAI,WAC5BA,EAAmB,IAAI,MACvBA,EAAqB,MAAI,QACzBA,EAAuB,QAAI,SAC9B,CAhBD,CAgBGA,IAAkBA,EAAgB,CAAC,ICrB/B,MAAMC,EAA6B,IAAIC,MAAM,wBACvCC,EAAmB,IAAID,MAAM,0DAE7BE,GADmB,IAAIF,MAAM,6BACN,IAAIA,MAAM,oCCE9C,IAAIG,E,OAyFJP,KAAKQ,UAAYC,OAASC,MAAQC,KAAIC,OAAMF,KAAMG,OAC9C,MAAMC,EAAQ,GACd,IAAIJ,EACJ,IACI,GAAIE,IAASV,EAAca,OAASR,EAChC,MAAMF,EACV,OAAQO,GACJ,KAAKV,EAAca,KACfL,OAhGHD,QAASO,QAASC,EAAUC,QAASC,EAAUC,UAAWC,MACnE,MAAMC,GAASf,EACf,IACSU,IACDA,EAAWhB,GAEfsB,cAAcN,EAClB,CACA,MAMI,GALKA,IACDA,EAAWhB,EAASuB,QAAQ,QAAS,UAEzCxB,KAAKyB,wBAA0BC,OACcT,IAAWU,SACnD3B,KAAKyB,iBACN,MAAMnB,CAEd,CACA,MAAMU,EAAUC,EACVC,EAAUC,GAAsBF,EAASO,QAAQ,QAAS,SAC1DJ,EAAYC,GAEZJ,EAASO,QAAQ,QAAS,cAWhC,OAVAjB,QAAeP,KAAKyB,iBAAiB,CAGjCG,oBAAqB,GAAGZ,KAAWa,KAAKC,KAAKC,UAAU,CAAEb,UAASE,mBAEtEb,EAAOyB,WAAWtB,GAASV,KAAKiC,YAAY,CAAErB,KAAMV,EAAcgC,IAAKxB,WACvEH,EAAO4B,aAAazB,GAASV,KAAKiC,YAAY,CAC1CrB,KAAMV,EAAckC,SACpB1B,WAEGY,CAAK,EA+Dae,CAAKxB,GAClB,MACJ,KAAKX,EAAcoC,KACf5B,EAhEH,GAAG6B,OAAMC,WAAU,MAC5BjC,EAAOkC,WAAWD,GAClBjC,EAAOmC,QAAQH,GACf,MAAMI,EAAMpC,EAAOoC,IAEnB,OADApC,EAAOqC,QACAD,CAAG,EA2DSD,CAAK7B,GACZ,MACJ,KAAKX,EAAc2C,WACfnC,EA5DE,GAAGoC,OAAMpC,WACvBH,EAAOwC,GAAGC,UAAUF,EAAMpC,IACnB,GA0DYsC,CAAUnC,GACjB,MACJ,KAAKX,EAAc+C,UACfvC,EA3DC,GAAGoC,OAAMI,cAAe3C,EAAOwC,GAAGI,SAASL,EAAM,CAAEI,aA2D7CC,CAAStC,GAChB,MACJ,KAAKX,EAAckD,YACf1C,EA5DG,GAAGoC,WAClBvC,EAAOwC,GAAGM,OAAOP,IACV,GA0DYQ,CAAWzC,GAClB,MACJ,KAAKX,EAAcqD,OACf7C,EA3DD,GAAG8C,UAASC,cACvBlD,EAAOwC,GAAGW,OAAOF,EAASC,IACnB,GAyDYC,CAAO7C,GACd,MACJ,KAAKX,EAAcyD,WACfjD,EAzDE,GAAGoC,WACjBvC,EAAOwC,GAAGa,MAAMd,IACT,GAuDYe,CAAUhD,GACjB,MACJ,KAAKX,EAAc4D,SACfpD,EAxDA,GAAGoC,WACf,MAAMiB,EAAQxD,EAAOwC,GAAGiB,QAAQlB,GAC1BmB,EAAQ,GACd,IAAK,MAAMC,KAAQH,EAAO,CACtB,MAAMI,EAAO5D,EAAOwC,GAAGoB,KAAK,GAAGrB,KAAQoB,KACjCE,EAAQ7D,EAAOwC,GAAGqB,MAAMD,EAAKE,MACnCJ,EAAMK,KAAK,CAAEJ,OAAME,SACvB,CACA,OAAOH,CAAK,EAgDOM,CAAQ1D,GACf,MACJ,KAAKX,EAAcsE,WACf9D,EAhDE,GAAGoC,WACjBvC,EAAOwC,GAAG0B,MAAM3B,IACT,GA8CY4B,CAAU7D,GACjB,MACJ,KAAKX,EAAcyE,MACfjE,EA/CF,GAAGkE,SAAQC,UAASC,iBAC9B,MAAMC,EAAMH,EACNI,EAAKzE,EAAOwC,GAAGkC,YAAYF,GACjC,QAAKC,IAELzE,EAAOwC,GAAGmC,MAAMF,EAAIH,EAASC,IACtB,EAAI,EAyCQI,CAAMrE,GACb,MACJ,KAAKX,EAAciF,QACfzE,EA1CA,GAAGoE,iBACfvE,EAAOwC,GAAGqC,QAAQN,IACX,GAwCYM,CAAQvE,GACf,MACJ,QACI,MAAMV,EAElB,CACA,MAAOkF,GAMH,YALArF,KAAKiC,YAAY,CACbtB,KACAC,KAAMV,EAAcoF,MACpB5E,KAAM2E,EAAEE,YAGhB,CACI7E,aAAgB8E,YAChB1E,EAAMwD,KAAK5D,EAAK+E,QAEpBzF,KAAKiC,YAAY,CAAEtB,KAAIC,OAAMF,QAAQI,EAAM,E","sources":["webpack://FFmpegWASM/webpack/universalModuleDefinition","webpack://FFmpegWASM/./dist/esm/const.js","webpack://FFmpegWASM/./dist/esm/errors.js","webpack://FFmpegWASM/./dist/esm/worker.js"],"sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"FFmpegWASM\"] = factory();\n\telse\n\t\troot[\"FFmpegWASM\"] = factory();\n})(self, () => {\nreturn ","export const MIME_TYPE_JAVASCRIPT = \"text/javascript\";\nexport const MIME_TYPE_WASM = \"application/wasm\";\nexport const CORE_VERSION = \"0.12.6\";\nexport const CORE_URL = `https://unpkg.com/@ffmpeg/core@${CORE_VERSION}/dist/umd/ffmpeg-core.js`;\nexport var FFMessageType;\n(function (FFMessageType) {\n FFMessageType[\"LOAD\"] = \"LOAD\";\n FFMessageType[\"EXEC\"] = \"EXEC\";\n FFMessageType[\"WRITE_FILE\"] = \"WRITE_FILE\";\n FFMessageType[\"READ_FILE\"] = \"READ_FILE\";\n FFMessageType[\"DELETE_FILE\"] = \"DELETE_FILE\";\n FFMessageType[\"RENAME\"] = \"RENAME\";\n FFMessageType[\"CREATE_DIR\"] = \"CREATE_DIR\";\n FFMessageType[\"LIST_DIR\"] = \"LIST_DIR\";\n FFMessageType[\"DELETE_DIR\"] = \"DELETE_DIR\";\n FFMessageType[\"ERROR\"] = \"ERROR\";\n FFMessageType[\"DOWNLOAD\"] = \"DOWNLOAD\";\n FFMessageType[\"PROGRESS\"] = \"PROGRESS\";\n FFMessageType[\"LOG\"] = \"LOG\";\n FFMessageType[\"MOUNT\"] = \"MOUNT\";\n FFMessageType[\"UNMOUNT\"] = \"UNMOUNT\";\n})(FFMessageType || (FFMessageType = {}));\n","export const ERROR_UNKNOWN_MESSAGE_TYPE = new Error(\"unknown message type\");\nexport const ERROR_NOT_LOADED = new Error(\"ffmpeg is not loaded, call `await ffmpeg.load()` first\");\nexport const ERROR_TERMINATED = new Error(\"called FFmpeg.terminate()\");\nexport const ERROR_IMPORT_FAILURE = new Error(\"failed to import ffmpeg-core.js\");\n","/// \n/// \n/// \nimport { CORE_URL, FFMessageType } from \"./const.js\";\nimport { ERROR_UNKNOWN_MESSAGE_TYPE, ERROR_NOT_LOADED, ERROR_IMPORT_FAILURE, } from \"./errors.js\";\nlet ffmpeg;\nconst load = async ({ coreURL: _coreURL, wasmURL: _wasmURL, workerURL: _workerURL, }) => {\n const first = !ffmpeg;\n try {\n if (!_coreURL)\n _coreURL = CORE_URL;\n // when web worker type is `classic`.\n importScripts(_coreURL);\n }\n catch {\n if (!_coreURL)\n _coreURL = CORE_URL.replace('/umd/', '/esm/');\n // when web worker type is `module`.\n self.createFFmpegCore = (await import(\n /* webpackIgnore: true */ /* @vite-ignore */ _coreURL)).default;\n if (!self.createFFmpegCore) {\n throw ERROR_IMPORT_FAILURE;\n }\n }\n const coreURL = _coreURL;\n const wasmURL = _wasmURL ? _wasmURL : _coreURL.replace(/.js$/g, \".wasm\");\n const workerURL = _workerURL\n ? _workerURL\n : _coreURL.replace(/.js$/g, \".worker.js\");\n ffmpeg = await self.createFFmpegCore({\n // Fix `Overload resolution failed.` when using multi-threaded ffmpeg-core.\n // Encoded wasmURL and workerURL in the URL as a hack to fix locateFile issue.\n mainScriptUrlOrBlob: `${coreURL}#${btoa(JSON.stringify({ wasmURL, workerURL }))}`,\n });\n ffmpeg.setLogger((data) => self.postMessage({ type: FFMessageType.LOG, data }));\n ffmpeg.setProgress((data) => self.postMessage({\n type: FFMessageType.PROGRESS,\n data,\n }));\n return first;\n};\nconst exec = ({ args, timeout = -1 }) => {\n ffmpeg.setTimeout(timeout);\n ffmpeg.exec(...args);\n const ret = ffmpeg.ret;\n ffmpeg.reset();\n return ret;\n};\nconst writeFile = ({ path, data }) => {\n ffmpeg.FS.writeFile(path, data);\n return true;\n};\nconst readFile = ({ path, encoding }) => ffmpeg.FS.readFile(path, { encoding });\n// TODO: check if deletion works.\nconst deleteFile = ({ path }) => {\n ffmpeg.FS.unlink(path);\n return true;\n};\nconst rename = ({ oldPath, newPath }) => {\n ffmpeg.FS.rename(oldPath, newPath);\n return true;\n};\n// TODO: check if creation works.\nconst createDir = ({ path }) => {\n ffmpeg.FS.mkdir(path);\n return true;\n};\nconst listDir = ({ path }) => {\n const names = ffmpeg.FS.readdir(path);\n const nodes = [];\n for (const name of names) {\n const stat = ffmpeg.FS.stat(`${path}/${name}`);\n const isDir = ffmpeg.FS.isDir(stat.mode);\n nodes.push({ name, isDir });\n }\n return nodes;\n};\n// TODO: check if deletion works.\nconst deleteDir = ({ path }) => {\n ffmpeg.FS.rmdir(path);\n return true;\n};\nconst mount = ({ fsType, options, mountPoint }) => {\n const str = fsType;\n const fs = ffmpeg.FS.filesystems[str];\n if (!fs)\n return false;\n ffmpeg.FS.mount(fs, options, mountPoint);\n return true;\n};\nconst unmount = ({ mountPoint }) => {\n ffmpeg.FS.unmount(mountPoint);\n return true;\n};\nself.onmessage = async ({ data: { id, type, data: _data }, }) => {\n const trans = [];\n let data;\n try {\n if (type !== FFMessageType.LOAD && !ffmpeg)\n throw ERROR_NOT_LOADED; // eslint-disable-line\n switch (type) {\n case FFMessageType.LOAD:\n data = await load(_data);\n break;\n case FFMessageType.EXEC:\n data = exec(_data);\n break;\n case FFMessageType.WRITE_FILE:\n data = writeFile(_data);\n break;\n case FFMessageType.READ_FILE:\n data = readFile(_data);\n break;\n case FFMessageType.DELETE_FILE:\n data = deleteFile(_data);\n break;\n case FFMessageType.RENAME:\n data = rename(_data);\n break;\n case FFMessageType.CREATE_DIR:\n data = createDir(_data);\n break;\n case FFMessageType.LIST_DIR:\n data = listDir(_data);\n break;\n case FFMessageType.DELETE_DIR:\n data = deleteDir(_data);\n break;\n case FFMessageType.MOUNT:\n data = mount(_data);\n break;\n case FFMessageType.UNMOUNT:\n data = unmount(_data);\n break;\n default:\n throw ERROR_UNKNOWN_MESSAGE_TYPE;\n }\n }\n catch (e) {\n self.postMessage({\n id,\n type: FFMessageType.ERROR,\n data: e.toString(),\n });\n return;\n }\n if (data instanceof Uint8Array) {\n trans.push(data.buffer);\n }\n self.postMessage({ id, type, data }, trans);\n};\n"],"names":["root","factory","exports","module","define","amd","self","CORE_URL","FFMessageType","ERROR_UNKNOWN_MESSAGE_TYPE","Error","ERROR_NOT_LOADED","ERROR_IMPORT_FAILURE","ffmpeg","onmessage","async","data","id","type","_data","trans","LOAD","coreURL","_coreURL","wasmURL","_wasmURL","workerURL","_workerURL","first","importScripts","replace","createFFmpegCore","import","default","mainScriptUrlOrBlob","btoa","JSON","stringify","setLogger","postMessage","LOG","setProgress","PROGRESS","load","EXEC","args","timeout","setTimeout","exec","ret","reset","WRITE_FILE","path","FS","writeFile","READ_FILE","encoding","readFile","DELETE_FILE","unlink","deleteFile","RENAME","oldPath","newPath","rename","CREATE_DIR","mkdir","createDir","LIST_DIR","names","readdir","nodes","name","stat","isDir","mode","push","listDir","DELETE_DIR","rmdir","deleteDir","MOUNT","fsType","options","mountPoint","str","fs","filesystems","mount","UNMOUNT","unmount","e","ERROR","toString","Uint8Array","buffer"],"sourceRoot":""} \ No newline at end of file diff --git a/node_modules/@ffmpeg/ffmpeg/dist/umd/ffmpeg.js b/node_modules/@ffmpeg/ffmpeg/dist/umd/ffmpeg.js new file mode 100644 index 0000000..05cdf7a --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/umd/ffmpeg.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.FFmpegWASM=t():e.FFmpegWASM=t()}(self,(()=>(()=>{"use strict";var e={m:{},d:(t,s)=>{for(var r in s)e.o(s,r)&&!e.o(t,r)&&Object.defineProperty(t,r,{enumerable:!0,get:s[r]})},u:e=>e+".ffmpeg.js"};e.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),e.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),e.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{var t;e.g.importScripts&&(t=e.g.location+"");var s=e.g.document;if(!t&&s&&(s.currentScript&&(t=s.currentScript.src),!t)){var r=s.getElementsByTagName("script");if(r.length)for(var a=r.length-1;a>-1&&!t;)t=r[a--].src}if(!t)throw new Error("Automatic publicPath is not supported in this browser");t=t.replace(/#.*$/,"").replace(/\?.*$/,"").replace(/\/[^\/]+$/,"/"),e.p=t})(),e.b=document.baseURI||self.location.href;var t,s={};e.r(s),e.d(s,{FFmpeg:()=>i}),function(e){e.LOAD="LOAD",e.EXEC="EXEC",e.WRITE_FILE="WRITE_FILE",e.READ_FILE="READ_FILE",e.DELETE_FILE="DELETE_FILE",e.RENAME="RENAME",e.CREATE_DIR="CREATE_DIR",e.LIST_DIR="LIST_DIR",e.DELETE_DIR="DELETE_DIR",e.ERROR="ERROR",e.DOWNLOAD="DOWNLOAD",e.PROGRESS="PROGRESS",e.LOG="LOG",e.MOUNT="MOUNT",e.UNMOUNT="UNMOUNT"}(t||(t={}));const r=(()=>{let e=0;return()=>e++})(),a=(new Error("unknown message type"),new Error("ffmpeg is not loaded, call `await ffmpeg.load()` first")),o=new Error("called FFmpeg.terminate()");new Error("failed to import ffmpeg-core.js");class i{#e=null;#t={};#s={};#r=[];#a=[];loaded=!1;#o=()=>{this.#e&&(this.#e.onmessage=({data:{id:e,type:s,data:r}})=>{switch(s){case t.LOAD:this.loaded=!0,this.#t[e](r);break;case t.MOUNT:case t.UNMOUNT:case t.EXEC:case t.WRITE_FILE:case t.READ_FILE:case t.DELETE_FILE:case t.RENAME:case t.CREATE_DIR:case t.LIST_DIR:case t.DELETE_DIR:this.#t[e](r);break;case t.LOG:this.#r.forEach((e=>e(r)));break;case t.PROGRESS:this.#a.forEach((e=>e(r)));break;case t.ERROR:this.#s[e](r)}delete this.#t[e],delete this.#s[e]})};#i=({type:e,data:t},s=[],o)=>this.#e?new Promise(((a,i)=>{const n=r();this.#e&&this.#e.postMessage({id:n,type:e,data:t},s),this.#t[n]=a,this.#s[n]=i,o?.addEventListener("abort",(()=>{i(new DOMException(`Message # ${n} was aborted`,"AbortError"))}),{once:!0})})):Promise.reject(a);on(e,t){"log"===e?this.#r.push(t):"progress"===e&&this.#a.push(t)}off(e,t){"log"===e?this.#r=this.#r.filter((e=>e!==t)):"progress"===e&&(this.#a=this.#a.filter((e=>e!==t)))}load=({classWorkerURL:s,...r}={},{signal:a}={})=>(this.#e||(this.#e=s?new Worker(new URL(s,"file:///home/jeromewu/ffmpeg.wasm/packages/ffmpeg/dist/esm/classes.js"),{type:"module"}):new Worker(new URL(e.p+e.u(814),e.b),{type:void 0}),this.#o()),this.#i({type:t.LOAD,data:r},void 0,a));exec=(e,s=-1,{signal:r}={})=>this.#i({type:t.EXEC,data:{args:e,timeout:s}},void 0,r);terminate=()=>{const e=Object.keys(this.#s);for(const t of e)this.#s[t](o),delete this.#s[t],delete this.#t[t];this.#e&&(this.#e.terminate(),this.#e=null,this.loaded=!1)};writeFile=(e,s,{signal:r}={})=>{const a=[];return s instanceof Uint8Array&&a.push(s.buffer),this.#i({type:t.WRITE_FILE,data:{path:e,data:s}},a,r)};mount=(e,s,r)=>this.#i({type:t.MOUNT,data:{fsType:e,options:s,mountPoint:r}},[]);unmount=e=>this.#i({type:t.UNMOUNT,data:{mountPoint:e}},[]);readFile=(e,s="binary",{signal:r}={})=>this.#i({type:t.READ_FILE,data:{path:e,encoding:s}},void 0,r);deleteFile=(e,{signal:s}={})=>this.#i({type:t.DELETE_FILE,data:{path:e}},void 0,s);rename=(e,s,{signal:r}={})=>this.#i({type:t.RENAME,data:{oldPath:e,newPath:s}},void 0,r);createDir=(e,{signal:s}={})=>this.#i({type:t.CREATE_DIR,data:{path:e}},void 0,s);listDir=(e,{signal:s}={})=>this.#i({type:t.LIST_DIR,data:{path:e}},void 0,s);deleteDir=(e,{signal:s}={})=>this.#i({type:t.DELETE_DIR,data:{path:e}},void 0,s)}return s})())); +//# sourceMappingURL=ffmpeg.js.map \ No newline at end of file diff --git a/node_modules/@ffmpeg/ffmpeg/dist/umd/ffmpeg.js.map b/node_modules/@ffmpeg/ffmpeg/dist/umd/ffmpeg.js.map new file mode 100644 index 0000000..d328f05 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/dist/umd/ffmpeg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ffmpeg.js","mappings":"CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,GAAIH,GACe,iBAAZC,QACdA,QAAoB,WAAID,IAExBD,EAAiB,WAAIC,GACtB,CATD,CASGK,MAAM,I,uBCRLC,EAAsB,CAG1BA,E,GCHAA,EAAwB,CAACL,EAASM,KACjC,IAAI,IAAIC,KAAOD,EACXD,EAAoBG,EAAEF,EAAYC,KAASF,EAAoBG,EAAER,EAASO,IAC5EE,OAAOC,eAAeV,EAASO,EAAK,CAAEI,YAAY,EAAMC,IAAKN,EAAWC,IAE1E,ECLDF,EAAyBQ,GAEZA,EAAU,cCHvBR,EAAoBS,EAAI,WACvB,GAA0B,iBAAfC,WAAyB,OAAOA,WAC3C,IACC,OAAOC,MAAQ,IAAIC,SAAS,cAAb,EAChB,CAAE,MAAOC,GACR,GAAsB,iBAAXC,OAAqB,OAAOA,MACxC,CACA,CAPuB,GCAxBd,EAAoBG,EAAI,CAACY,EAAKC,IAAUZ,OAAOa,UAAUC,eAAeC,KAAKJ,EAAKC,GCClFhB,EAAoBoB,EAAKzB,IACH,oBAAX0B,QAA0BA,OAAOC,aAC1ClB,OAAOC,eAAeV,EAAS0B,OAAOC,YAAa,CAAEC,MAAO,WAE7DnB,OAAOC,eAAeV,EAAS,aAAc,CAAE4B,OAAO,GAAO,E,MCL9D,IAAIC,EACAxB,EAAoBS,EAAEgB,gBAAeD,EAAYxB,EAAoBS,EAAEiB,SAAW,IACtF,IAAIC,EAAW3B,EAAoBS,EAAEkB,SACrC,IAAKH,GAAaG,IACbA,EAASC,gBACZJ,EAAYG,EAASC,cAAcC,MAC/BL,GAAW,CACf,IAAIM,EAAUH,EAASI,qBAAqB,UAC5C,GAAGD,EAAQE,OAEV,IADA,IAAIC,EAAIH,EAAQE,OAAS,EAClBC,GAAK,IAAMT,GAAWA,EAAYM,EAAQG,KAAKJ,GAExD,CAID,IAAKL,EAAW,MAAM,IAAIU,MAAM,yDAChCV,EAAYA,EAAUW,QAAQ,OAAQ,IAAIA,QAAQ,QAAS,IAAIA,QAAQ,YAAa,KACpFnC,EAAoBoC,EAAIZ,C,KClBxBxB,EAAoBqC,EAAIV,SAASW,SAAWvC,KAAK2B,SAASa,K,ICI/CC,E,kCACX,SAAWA,GACPA,EAAoB,KAAI,OACxBA,EAAoB,KAAI,OACxBA,EAA0B,WAAI,aAC9BA,EAAyB,UAAI,YAC7BA,EAA2B,YAAI,cAC/BA,EAAsB,OAAI,SAC1BA,EAA0B,WAAI,aAC9BA,EAAwB,SAAI,WAC5BA,EAA0B,WAAI,aAC9BA,EAAqB,MAAI,QACzBA,EAAwB,SAAI,WAC5BA,EAAwB,SAAI,WAC5BA,EAAmB,IAAI,MACvBA,EAAqB,MAAI,QACzBA,EAAuB,QAAI,SAC9B,CAhBD,CAgBGA,IAAkBA,EAAgB,CAAC,IClB/B,MAAMC,EAAe,MACxB,IAAIC,EAAY,EAChB,MAAO,IAAMA,GAChB,EAH2B,GCFfC,GAD6B,IAAIT,MAAM,wBACpB,IAAIA,MAAM,2DAC7BU,EAAmB,IAAIV,MAAM,6BACN,IAAIA,MAAM,mCCQvC,MAAMW,EACT,GAAU,KAKV,GAAY,CAAC,EACb,GAAW,CAAC,EACZ,GAAqB,GACrB,GAA0B,GAC1BC,QAAS,EAIT,GAAoB,KACZnC,MAAK,IACLA,MAAK,EAAQoC,UAAY,EAAGC,MAAQC,KAAIC,OAAMF,YAC1C,OAAQE,GACJ,KAAKV,EAAcW,KACfxC,KAAKmC,QAAS,EACdnC,MAAK,EAAUsC,GAAID,GACnB,MACJ,KAAKR,EAAcY,MACnB,KAAKZ,EAAca,QACnB,KAAKb,EAAcc,KACnB,KAAKd,EAAce,WACnB,KAAKf,EAAcgB,UACnB,KAAKhB,EAAciB,YACnB,KAAKjB,EAAckB,OACnB,KAAKlB,EAAcmB,WACnB,KAAKnB,EAAcoB,SACnB,KAAKpB,EAAcqB,WACflD,MAAK,EAAUsC,GAAID,GACnB,MACJ,KAAKR,EAAcsB,IACfnD,MAAK,EAAmBoD,SAASC,GAAMA,EAAEhB,KACzC,MACJ,KAAKR,EAAcyB,SACftD,MAAK,EAAwBoD,SAASC,GAAMA,EAAEhB,KAC9C,MACJ,KAAKR,EAAc0B,MACfvD,MAAK,EAASsC,GAAID,UAGnBrC,MAAK,EAAUsC,UACftC,MAAK,EAASsC,EAAG,EAEhC,EAKJ,GAAQ,EAAGC,OAAMF,QAAQmB,EAAQ,GAAIC,IAC5BzD,MAAK,EAGH,IAAI0D,SAAQ,CAACC,EAASC,KACzB,MAAMtB,EAAKR,IACX9B,MAAK,GAAWA,MAAK,EAAQ6D,YAAY,CAAEvB,KAAIC,OAAMF,QAAQmB,GAC7DxD,MAAK,EAAUsC,GAAMqB,EACrB3D,MAAK,EAASsC,GAAMsB,EACpBH,GAAQK,iBAAiB,SAAS,KAC9BF,EAAO,IAAIG,aAAa,aAAazB,gBAAkB,cAAc,GACtE,CAAE0B,MAAM,GAAO,IATXN,QAAQE,OAAO5B,GAY9B,EAAAiC,CAAGC,EAAOC,GACQ,QAAVD,EACAlE,MAAK,EAAmBoE,KAAKD,GAEd,aAAVD,GACLlE,MAAK,EAAwBoE,KAAKD,EAE1C,CACA,GAAAE,CAAIH,EAAOC,GACO,QAAVD,EACAlE,MAAK,EAAqBA,MAAK,EAAmBsE,QAAQjB,GAAMA,IAAMc,IAEvD,aAAVD,IACLlE,MAAK,EAA0BA,MAAK,EAAwBsE,QAAQjB,GAAMA,IAAMc,IAExF,CAQAI,KAAO,EAAGC,oBAAmBC,GAAW,CAAC,GAAKhB,UAAW,CAAC,KACjDzD,MAAK,IACNA,MAAK,EAAUwE,EACX,IAAIE,OAAO,IAAIC,IAAIH,EAAgB,yEAAkB,CACjDjC,KAAM,WAIV,IAAImC,OAAO,IAAIC,IAAI,kBAAiC,CAChDpC,UAAM,IAEdvC,MAAK,KAEFA,MAAK,EAAM,CACduC,KAAMV,EAAcW,KACpBH,KAAMoC,QACPG,EAAWnB,IAsBlBoB,KAAO,CAEPC,EAMAC,GAAU,GAAMtB,UAAW,CAAC,IAAMzD,MAAK,EAAM,CACzCuC,KAAMV,EAAcc,KACpBN,KAAM,CAAEyC,OAAMC,iBACfH,EAAWnB,GAOduB,UAAY,KACR,MAAMC,EAAMxF,OAAOyF,KAAKlF,MAAK,GAE7B,IAAK,MAAMsC,KAAM2C,EACbjF,MAAK,EAASsC,GAAIL,UACXjC,MAAK,EAASsC,UACdtC,MAAK,EAAUsC,GAEtBtC,MAAK,IACLA,MAAK,EAAQgF,YACbhF,MAAK,EAAU,KACfA,KAAKmC,QAAS,EAClB,EAeJgD,UAAY,CAACC,EAAM/C,GAAQoB,UAAW,CAAC,KACnC,MAAMD,EAAQ,GAId,OAHInB,aAAgBgD,YAChB7B,EAAMY,KAAK/B,EAAKiD,QAEbtF,MAAK,EAAM,CACduC,KAAMV,EAAce,WACpBP,KAAM,CAAE+C,OAAM/C,SACfmB,EAAOC,EAAO,EAErB8B,MAAQ,CAACC,EAAQC,EAASC,IAEf1F,MAAK,EAAM,CACduC,KAAMV,EAAcY,MACpBJ,KAAM,CAAEmD,SAAQC,UAASC,eAHf,IAMlBC,QAAWD,GAEA1F,MAAK,EAAM,CACduC,KAAMV,EAAca,QACpBL,KAAM,CAAEqD,eAHE,IAkBlBE,SAAW,CAACR,EAQZS,EAAW,UAAYpC,UAAW,CAAC,IAAMzD,MAAK,EAAM,CAChDuC,KAAMV,EAAcgB,UACpBR,KAAM,CAAE+C,OAAMS,kBACfjB,EAAWnB,GAMdqC,WAAa,CAACV,GAAQ3B,UAAW,CAAC,IAAMzD,MAAK,EAAM,CAC/CuC,KAAMV,EAAciB,YACpBT,KAAM,CAAE+C,cACTR,EAAWnB,GAMdsC,OAAS,CAACC,EAASC,GAAWxC,UAAW,CAAC,IAAMzD,MAAK,EAAM,CACvDuC,KAAMV,EAAckB,OACpBV,KAAM,CAAE2D,UAASC,iBAClBrB,EAAWnB,GAMdyC,UAAY,CAACd,GAAQ3B,UAAW,CAAC,IAAMzD,MAAK,EAAM,CAC9CuC,KAAMV,EAAcmB,WACpBX,KAAM,CAAE+C,cACTR,EAAWnB,GAMd0C,QAAU,CAACf,GAAQ3B,UAAW,CAAC,IAAMzD,MAAK,EAAM,CAC5CuC,KAAMV,EAAcoB,SACpBZ,KAAM,CAAE+C,cACTR,EAAWnB,GAMd2C,UAAY,CAAChB,GAAQ3B,UAAW,CAAC,IAAMzD,MAAK,EAAM,CAC9CuC,KAAMV,EAAcqB,WACpBb,KAAM,CAAE+C,cACTR,EAAWnB,G","sources":["webpack://FFmpegWASM/webpack/universalModuleDefinition","webpack://FFmpegWASM/webpack/bootstrap","webpack://FFmpegWASM/webpack/runtime/define property getters","webpack://FFmpegWASM/webpack/runtime/get javascript chunk filename","webpack://FFmpegWASM/webpack/runtime/global","webpack://FFmpegWASM/webpack/runtime/hasOwnProperty shorthand","webpack://FFmpegWASM/webpack/runtime/make namespace object","webpack://FFmpegWASM/webpack/runtime/publicPath","webpack://FFmpegWASM/webpack/runtime/jsonp chunk loading","webpack://FFmpegWASM/./dist/esm/const.js","webpack://FFmpegWASM/./dist/esm/utils.js","webpack://FFmpegWASM/./dist/esm/errors.js","webpack://FFmpegWASM/./dist/esm/classes.js"],"sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"FFmpegWASM\"] = factory();\n\telse\n\t\troot[\"FFmpegWASM\"] = factory();\n})(self, () => {\nreturn ","// The require scope\nvar __webpack_require__ = {};\n\n// expose the modules object (__webpack_modules__)\n__webpack_require__.m = __webpack_modules__;\n\n","// define getter functions for harmony exports\n__webpack_require__.d = (exports, definition) => {\n\tfor(var key in definition) {\n\t\tif(__webpack_require__.o(definition, key) && !__webpack_require__.o(exports, key)) {\n\t\t\tObject.defineProperty(exports, key, { enumerable: true, get: definition[key] });\n\t\t}\n\t}\n};","// This function allow to reference async chunks\n__webpack_require__.u = (chunkId) => {\n\t// return url for filenames based on template\n\treturn \"\" + chunkId + \".ffmpeg.js\";\n};","__webpack_require__.g = (function() {\n\tif (typeof globalThis === 'object') return globalThis;\n\ttry {\n\t\treturn this || new Function('return this')();\n\t} catch (e) {\n\t\tif (typeof window === 'object') return window;\n\t}\n})();","__webpack_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop))","// define __esModule on exports\n__webpack_require__.r = (exports) => {\n\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n\t}\n\tObject.defineProperty(exports, '__esModule', { value: true });\n};","var scriptUrl;\nif (__webpack_require__.g.importScripts) scriptUrl = __webpack_require__.g.location + \"\";\nvar document = __webpack_require__.g.document;\nif (!scriptUrl && document) {\n\tif (document.currentScript)\n\t\tscriptUrl = document.currentScript.src;\n\tif (!scriptUrl) {\n\t\tvar scripts = document.getElementsByTagName(\"script\");\n\t\tif(scripts.length) {\n\t\t\tvar i = scripts.length - 1;\n\t\t\twhile (i > -1 && !scriptUrl) scriptUrl = scripts[i--].src;\n\t\t}\n\t}\n}\n// When supporting browsers where an automatic publicPath is not supported you must specify an output.publicPath manually via configuration\n// or pass an empty string (\"\") and set the __webpack_public_path__ variable from your code to use your own logic.\nif (!scriptUrl) throw new Error(\"Automatic publicPath is not supported in this browser\");\nscriptUrl = scriptUrl.replace(/#.*$/, \"\").replace(/\\?.*$/, \"\").replace(/\\/[^\\/]+$/, \"/\");\n__webpack_require__.p = scriptUrl;","__webpack_require__.b = document.baseURI || self.location.href;\n\n// object to store loaded and loading chunks\n// undefined = chunk not loaded, null = chunk preloaded/prefetched\n// [resolve, reject, Promise] = chunk loading, 0 = chunk loaded\nvar installedChunks = {\n\t179: 0\n};\n\n// no chunk on demand loading\n\n// no prefetching\n\n// no preloaded\n\n// no HMR\n\n// no HMR manifest\n\n// no on chunks loaded\n\n// no jsonp function","export const MIME_TYPE_JAVASCRIPT = \"text/javascript\";\nexport const MIME_TYPE_WASM = \"application/wasm\";\nexport const CORE_VERSION = \"0.12.6\";\nexport const CORE_URL = `https://unpkg.com/@ffmpeg/core@${CORE_VERSION}/dist/umd/ffmpeg-core.js`;\nexport var FFMessageType;\n(function (FFMessageType) {\n FFMessageType[\"LOAD\"] = \"LOAD\";\n FFMessageType[\"EXEC\"] = \"EXEC\";\n FFMessageType[\"WRITE_FILE\"] = \"WRITE_FILE\";\n FFMessageType[\"READ_FILE\"] = \"READ_FILE\";\n FFMessageType[\"DELETE_FILE\"] = \"DELETE_FILE\";\n FFMessageType[\"RENAME\"] = \"RENAME\";\n FFMessageType[\"CREATE_DIR\"] = \"CREATE_DIR\";\n FFMessageType[\"LIST_DIR\"] = \"LIST_DIR\";\n FFMessageType[\"DELETE_DIR\"] = \"DELETE_DIR\";\n FFMessageType[\"ERROR\"] = \"ERROR\";\n FFMessageType[\"DOWNLOAD\"] = \"DOWNLOAD\";\n FFMessageType[\"PROGRESS\"] = \"PROGRESS\";\n FFMessageType[\"LOG\"] = \"LOG\";\n FFMessageType[\"MOUNT\"] = \"MOUNT\";\n FFMessageType[\"UNMOUNT\"] = \"UNMOUNT\";\n})(FFMessageType || (FFMessageType = {}));\n","/**\n * Generate an unique message ID.\n */\nexport const getMessageID = (() => {\n let messageID = 0;\n return () => messageID++;\n})();\n","export const ERROR_UNKNOWN_MESSAGE_TYPE = new Error(\"unknown message type\");\nexport const ERROR_NOT_LOADED = new Error(\"ffmpeg is not loaded, call `await ffmpeg.load()` first\");\nexport const ERROR_TERMINATED = new Error(\"called FFmpeg.terminate()\");\nexport const ERROR_IMPORT_FAILURE = new Error(\"failed to import ffmpeg-core.js\");\n","import { FFMessageType } from \"./const.js\";\nimport { getMessageID } from \"./utils.js\";\nimport { ERROR_TERMINATED, ERROR_NOT_LOADED } from \"./errors.js\";\n/**\n * Provides APIs to interact with ffmpeg web worker.\n *\n * @example\n * ```ts\n * const ffmpeg = new FFmpeg();\n * ```\n */\nexport class FFmpeg {\n #worker = null;\n /**\n * #resolves and #rejects tracks Promise resolves and rejects to\n * be called when we receive message from web worker.\n */\n #resolves = {};\n #rejects = {};\n #logEventCallbacks = [];\n #progressEventCallbacks = [];\n loaded = false;\n /**\n * register worker message event handlers.\n */\n #registerHandlers = () => {\n if (this.#worker) {\n this.#worker.onmessage = ({ data: { id, type, data }, }) => {\n switch (type) {\n case FFMessageType.LOAD:\n this.loaded = true;\n this.#resolves[id](data);\n break;\n case FFMessageType.MOUNT:\n case FFMessageType.UNMOUNT:\n case FFMessageType.EXEC:\n case FFMessageType.WRITE_FILE:\n case FFMessageType.READ_FILE:\n case FFMessageType.DELETE_FILE:\n case FFMessageType.RENAME:\n case FFMessageType.CREATE_DIR:\n case FFMessageType.LIST_DIR:\n case FFMessageType.DELETE_DIR:\n this.#resolves[id](data);\n break;\n case FFMessageType.LOG:\n this.#logEventCallbacks.forEach((f) => f(data));\n break;\n case FFMessageType.PROGRESS:\n this.#progressEventCallbacks.forEach((f) => f(data));\n break;\n case FFMessageType.ERROR:\n this.#rejects[id](data);\n break;\n }\n delete this.#resolves[id];\n delete this.#rejects[id];\n };\n }\n };\n /**\n * Generic function to send messages to web worker.\n */\n #send = ({ type, data }, trans = [], signal) => {\n if (!this.#worker) {\n return Promise.reject(ERROR_NOT_LOADED);\n }\n return new Promise((resolve, reject) => {\n const id = getMessageID();\n this.#worker && this.#worker.postMessage({ id, type, data }, trans);\n this.#resolves[id] = resolve;\n this.#rejects[id] = reject;\n signal?.addEventListener(\"abort\", () => {\n reject(new DOMException(`Message # ${id} was aborted`, \"AbortError\"));\n }, { once: true });\n });\n };\n on(event, callback) {\n if (event === \"log\") {\n this.#logEventCallbacks.push(callback);\n }\n else if (event === \"progress\") {\n this.#progressEventCallbacks.push(callback);\n }\n }\n off(event, callback) {\n if (event === \"log\") {\n this.#logEventCallbacks = this.#logEventCallbacks.filter((f) => f !== callback);\n }\n else if (event === \"progress\") {\n this.#progressEventCallbacks = this.#progressEventCallbacks.filter((f) => f !== callback);\n }\n }\n /**\n * Loads ffmpeg-core inside web worker. It is required to call this method first\n * as it initializes WebAssembly and other essential variables.\n *\n * @category FFmpeg\n * @returns `true` if ffmpeg core is loaded for the first time.\n */\n load = ({ classWorkerURL, ...config } = {}, { signal } = {}) => {\n if (!this.#worker) {\n this.#worker = classWorkerURL ?\n new Worker(new URL(classWorkerURL, import.meta.url), {\n type: \"module\",\n }) :\n // We need to duplicated the code here to enable webpack\n // to bundle worekr.js here.\n new Worker(new URL(\"./worker.js\", import.meta.url), {\n type: \"module\",\n });\n this.#registerHandlers();\n }\n return this.#send({\n type: FFMessageType.LOAD,\n data: config,\n }, undefined, signal);\n };\n /**\n * Execute ffmpeg command.\n *\n * @remarks\n * To avoid common I/O issues, [\"-nostdin\", \"-y\"] are prepended to the args\n * by default.\n *\n * @example\n * ```ts\n * const ffmpeg = new FFmpeg();\n * await ffmpeg.load();\n * await ffmpeg.writeFile(\"video.avi\", ...);\n * // ffmpeg -i video.avi video.mp4\n * await ffmpeg.exec([\"-i\", \"video.avi\", \"video.mp4\"]);\n * const data = ffmpeg.readFile(\"video.mp4\");\n * ```\n *\n * @returns `0` if no error, `!= 0` if timeout (1) or error.\n * @category FFmpeg\n */\n exec = (\n /** ffmpeg command line args */\n args, \n /**\n * milliseconds to wait before stopping the command execution.\n *\n * @defaultValue -1\n */\n timeout = -1, { signal } = {}) => this.#send({\n type: FFMessageType.EXEC,\n data: { args, timeout },\n }, undefined, signal);\n /**\n * Terminate all ongoing API calls and terminate web worker.\n * `FFmpeg.load()` must be called again before calling any other APIs.\n *\n * @category FFmpeg\n */\n terminate = () => {\n const ids = Object.keys(this.#rejects);\n // rejects all incomplete Promises.\n for (const id of ids) {\n this.#rejects[id](ERROR_TERMINATED);\n delete this.#rejects[id];\n delete this.#resolves[id];\n }\n if (this.#worker) {\n this.#worker.terminate();\n this.#worker = null;\n this.loaded = false;\n }\n };\n /**\n * Write data to ffmpeg.wasm.\n *\n * @example\n * ```ts\n * const ffmpeg = new FFmpeg();\n * await ffmpeg.load();\n * await ffmpeg.writeFile(\"video.avi\", await fetchFile(\"../video.avi\"));\n * await ffmpeg.writeFile(\"text.txt\", \"hello world\");\n * ```\n *\n * @category File System\n */\n writeFile = (path, data, { signal } = {}) => {\n const trans = [];\n if (data instanceof Uint8Array) {\n trans.push(data.buffer);\n }\n return this.#send({\n type: FFMessageType.WRITE_FILE,\n data: { path, data },\n }, trans, signal);\n };\n mount = (fsType, options, mountPoint) => {\n const trans = [];\n return this.#send({\n type: FFMessageType.MOUNT,\n data: { fsType, options, mountPoint },\n }, trans);\n };\n unmount = (mountPoint) => {\n const trans = [];\n return this.#send({\n type: FFMessageType.UNMOUNT,\n data: { mountPoint },\n }, trans);\n };\n /**\n * Read data from ffmpeg.wasm.\n *\n * @example\n * ```ts\n * const ffmpeg = new FFmpeg();\n * await ffmpeg.load();\n * const data = await ffmpeg.readFile(\"video.mp4\");\n * ```\n *\n * @category File System\n */\n readFile = (path, \n /**\n * File content encoding, supports two encodings:\n * - utf8: read file as text file, return data in string type.\n * - binary: read file as binary file, return data in Uint8Array type.\n *\n * @defaultValue binary\n */\n encoding = \"binary\", { signal } = {}) => this.#send({\n type: FFMessageType.READ_FILE,\n data: { path, encoding },\n }, undefined, signal);\n /**\n * Delete a file.\n *\n * @category File System\n */\n deleteFile = (path, { signal } = {}) => this.#send({\n type: FFMessageType.DELETE_FILE,\n data: { path },\n }, undefined, signal);\n /**\n * Rename a file or directory.\n *\n * @category File System\n */\n rename = (oldPath, newPath, { signal } = {}) => this.#send({\n type: FFMessageType.RENAME,\n data: { oldPath, newPath },\n }, undefined, signal);\n /**\n * Create a directory.\n *\n * @category File System\n */\n createDir = (path, { signal } = {}) => this.#send({\n type: FFMessageType.CREATE_DIR,\n data: { path },\n }, undefined, signal);\n /**\n * List directory contents.\n *\n * @category File System\n */\n listDir = (path, { signal } = {}) => this.#send({\n type: FFMessageType.LIST_DIR,\n data: { path },\n }, undefined, signal);\n /**\n * Delete an empty directory.\n *\n * @category File System\n */\n deleteDir = (path, { signal } = {}) => this.#send({\n type: FFMessageType.DELETE_DIR,\n data: { path },\n }, undefined, signal);\n}\n"],"names":["root","factory","exports","module","define","amd","self","__webpack_require__","definition","key","o","Object","defineProperty","enumerable","get","chunkId","g","globalThis","this","Function","e","window","obj","prop","prototype","hasOwnProperty","call","r","Symbol","toStringTag","value","scriptUrl","importScripts","location","document","currentScript","src","scripts","getElementsByTagName","length","i","Error","replace","p","b","baseURI","href","FFMessageType","getMessageID","messageID","ERROR_NOT_LOADED","ERROR_TERMINATED","FFmpeg","loaded","onmessage","data","id","type","LOAD","MOUNT","UNMOUNT","EXEC","WRITE_FILE","READ_FILE","DELETE_FILE","RENAME","CREATE_DIR","LIST_DIR","DELETE_DIR","LOG","forEach","f","PROGRESS","ERROR","trans","signal","Promise","resolve","reject","postMessage","addEventListener","DOMException","once","on","event","callback","push","off","filter","load","classWorkerURL","config","Worker","URL","undefined","exec","args","timeout","terminate","ids","keys","writeFile","path","Uint8Array","buffer","mount","fsType","options","mountPoint","unmount","readFile","encoding","deleteFile","rename","oldPath","newPath","createDir","listDir","deleteDir"],"sourceRoot":""} \ No newline at end of file diff --git a/node_modules/@ffmpeg/ffmpeg/package.json b/node_modules/@ffmpeg/ffmpeg/package.json new file mode 100644 index 0000000..8dbc8c0 --- /dev/null +++ b/node_modules/@ffmpeg/ffmpeg/package.json @@ -0,0 +1,64 @@ +{ + "name": "@ffmpeg/ffmpeg", + "version": "0.12.10", + "description": "FFmpeg WebAssembly version for browser", + "main": "./dist/umd/ffmpeg.js", + "types": "./dist/esm/index.d.ts", + "exports": { + ".": { + "types": "./dist/esm/index.d.ts", + "node": "./dist/esm/empty.mjs", + "default": { + "import": "./dist/esm/index.js", + "require": "./dist/umd/ffmpeg.js" + } + } + }, + "scripts": { + "dev": "webpack -w --mode development", + "lint": "eslint src", + "clean": "rimraf dist", + "build:esm": "tsc -p tsconfig.esm.json", + "build:umd": "webpack", + "build": "npm run clean && npm run build:esm && npm run build:umd", + "prepublishOnly": "npm run build" + }, + "files": [ + "dist", + "types/ffmpeg.d.ts" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/ffmpegwasm/ffmpeg.wasm.git" + }, + "keywords": [ + "ffmpeg", + "WebAssembly", + "video", + "audio", + "transcode" + ], + "author": "Jerome Wu ", + "license": "MIT", + "bugs": { + "url": "https://github.com/ffmpegwasm/ffmpeg.wasm/issues" + }, + "engines": { + "node": ">=18.x" + }, + "homepage": "https://github.com/ffmpegwasm/ffmpeg.wasm#readme", + "publishConfig": { + "access": "public" + }, + "devDependencies": { + "@typescript-eslint/eslint-plugin": "^6.1.0", + "@typescript-eslint/parser": "^6.1.0", + "eslint": "^8.45.0", + "rimraf": "^5.0.1", + "typescript": "^5.1.6", + "webpack-cli": "^5.1.4" + }, + "dependencies": { + "@ffmpeg/types": "^0.12.2" + } +} diff --git a/node_modules/@ffmpeg/types/package.json b/node_modules/@ffmpeg/types/package.json new file mode 100644 index 0000000..65397a3 --- /dev/null +++ b/node_modules/@ffmpeg/types/package.json @@ -0,0 +1,32 @@ +{ + "name": "@ffmpeg/types", + "version": "0.12.2", + "description": "ffmpeg.wasm types", + "types": "types", + "files": [ + "types" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/ffmpegwasm/ffmpeg.wasm.git" + }, + "keywords": [ + "ffmpeg", + "WebAssembly", + "video", + "audio", + "transcode" + ], + "author": "Jerome Wu ", + "license": "MIT", + "bugs": { + "url": "https://github.com/ffmpegwasm/ffmpeg.wasm/issues" + }, + "engines": { + "node": ">=16.x" + }, + "homepage": "https://github.com/ffmpegwasm/ffmpeg.wasm#readme", + "publishConfig": { + "access": "public" + } +} diff --git a/node_modules/@ffmpeg/types/types/index.d.ts b/node_modules/@ffmpeg/types/types/index.d.ts new file mode 100644 index 0000000..d3cd4fb --- /dev/null +++ b/node_modules/@ffmpeg/types/types/index.d.ts @@ -0,0 +1,131 @@ +// TODO: Add lint and test. + +export type Pointer = number; + +export type StringPointer = Pointer; +export type StringArrayPointer = Pointer; +export type DateString = string; + +/** + * Options for readFile. + * + * @see [Emscripten File System API](https://emscripten.org/docs/api_reference/Filesystem-API.html#FS.readFile) + * @category File System + */ +export interface ReadFileOptions { + /** encoding of the file, must be `binary` or `utf8` */ + encdoing: string; +} + +/** + * Describes attributes of a node. (a.k.a file, directory) + * + * @see [Emscripten File System API](https://emscripten.org/docs/api_reference/Filesystem-API.html#FS.stat) + * @category File System + */ +export interface Stat { + dev: number; + ino: number; + mode: number; + nlink: number; + uid: number; + gid: number; + rdev: number; + size: number; + atime: DateString; + mtime: DateString; + ctime: DateString; + blksize: number; + blocks: number; +} + +export interface FSFilesystemWORKERFS { + +} + +export interface FSFilesystemMEMFS { + +} + +export interface FSFilesystems { + WORKERFS: FSFilesystemWORKERFS; + MEMFS: FSFilesystemMEMFS; +} + +export type FSFilesystem = +| FSFilesystemWORKERFS +| FSFilesystemMEMFS; + +/** + * Functions to interact with Emscripten FS library. + * + * @see [Emscripten File System API](https://emscripten.org/docs/api_reference/Filesystem-API.html) + * @category File System + */ +export interface FS { + mkdir: (path: string) => void; + rmdir: (path: string) => void; + rename: (oldPath: string, newPath: string) => void; + writeFile: (path: string, data: Uint8Array | string) => void; + readFile: (path: string, opts: OptionReadFile) => Uint8Array | string; + readdir: (path: string) => string[]; + unlink: (path: string) => void; + stat: (path: string) => Stat; + /** mode is a numeric notation of permission, @see [Numeric Notation](https://en.wikipedia.org/wiki/File-system_permissions#Numeric_notation) */ + isFile: (mode: number) => boolean; + /** mode is a numeric notation of permission, @see [Numeric Notation](https://en.wikipedia.org/wiki/File-system_permissions#Numeric_notation) */ + isDir: (mode: number) => boolean; + mount: (fileSystemType: FSFilesystem, data: WorkerFSMountConfig, path: string) => void; + unmount: (path: string) => void; + filesystems: FSFilesystems; +} + +/** + * Arguments passed to setLogger callback function. + */ +export interface Log { + /** file descriptor of the log, must be `stdout` or `stderr` */ + type: string; + message: string; +} + +/** + * Arguments passed to setProgress callback function. + */ +export interface Progress { + /** progress of the operation, interval = [0, 1] */ + progress: number; + /** time of transcoded media in microseconds, ex: if a video is 10 seconds long, when time is 1000000 means 1 second of the video is transcoded already. */ + time: number; +} + +/** + * FFmpeg core module, an object to interact with ffmpeg. + */ +export interface FFmpegCoreModule { + /** default arguments prepend when running exec() */ + DEFAULT_ARGS: string[]; + FS: FS; + NULL: Pointer; + SIZE_I32: number; + + /** return code of the ffmpeg exec, error when ret != 0 */ + ret: number; + timeout: number; + mainScriptUrlOrBlob: string; + + exec: (...args: string[]) => number; + reset: () => void; + setLogger: (logger: (log: Log) => void) => void; + setTimeout: (timeout: number) => void; + setProgress: (handler: (progress: Progress) => void) => void; + + locateFile: (path: string, prefix: string) => string; +} + +/** + * Factory of FFmpegCoreModule. + */ +export type FFmpegCoreModuleFactory = ( + moduleOverrides?: Partial +) => Promise; diff --git a/node_modules/@ffmpeg/util/dist/cjs/const.d.ts b/node_modules/@ffmpeg/util/dist/cjs/const.d.ts new file mode 100644 index 0000000..e4a71b1 --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/cjs/const.d.ts @@ -0,0 +1 @@ +export declare const HeaderContentLength = "Content-Length"; diff --git a/node_modules/@ffmpeg/util/dist/cjs/const.js b/node_modules/@ffmpeg/util/dist/cjs/const.js new file mode 100644 index 0000000..9acb10e --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/cjs/const.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HeaderContentLength = void 0; +exports.HeaderContentLength = "Content-Length"; diff --git a/node_modules/@ffmpeg/util/dist/cjs/errors.d.ts b/node_modules/@ffmpeg/util/dist/cjs/errors.d.ts new file mode 100644 index 0000000..1cc685f --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/cjs/errors.d.ts @@ -0,0 +1,2 @@ +export declare const ERROR_RESPONSE_BODY_READER: Error; +export declare const ERROR_INCOMPLETED_DOWNLOAD: Error; diff --git a/node_modules/@ffmpeg/util/dist/cjs/errors.js b/node_modules/@ffmpeg/util/dist/cjs/errors.js new file mode 100644 index 0000000..e4a5fff --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/cjs/errors.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ERROR_INCOMPLETED_DOWNLOAD = exports.ERROR_RESPONSE_BODY_READER = void 0; +exports.ERROR_RESPONSE_BODY_READER = new Error("failed to get response body reader"); +exports.ERROR_INCOMPLETED_DOWNLOAD = new Error("failed to complete download"); diff --git a/node_modules/@ffmpeg/util/dist/cjs/index.d.ts b/node_modules/@ffmpeg/util/dist/cjs/index.d.ts new file mode 100644 index 0000000..fb0ed59 --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/cjs/index.d.ts @@ -0,0 +1,50 @@ +import { ProgressCallback } from "./types.js"; +/** + * An util function to fetch data from url string, base64, URL, File or Blob format. + * + * Examples: + * ```ts + * // URL + * await fetchFile("http://localhost:3000/video.mp4"); + * // base64 + * await fetchFile("data:;base64,wL2dvYWwgbW9yZ..."); + * // URL + * await fetchFile(new URL("video.mp4", import.meta.url)); + * // File + * fileInput.addEventListener('change', (e) => { + * await fetchFile(e.target.files[0]); + * }); + * // Blob + * const blob = new Blob(...); + * await fetchFile(blob); + * ``` + */ +export declare const fetchFile: (file?: string | File | Blob) => Promise; +/** + * importScript dynamically import a script, useful when you + * want to use different versions of ffmpeg.wasm based on environment. + * + * Example: + * + * ```ts + * await importScript("http://localhost:3000/ffmpeg.js"); + * ``` + */ +export declare const importScript: (url: string) => Promise; +/** + * Download content of a URL with progress. + * + * Progress only works when Content-Length is provided by the server. + * + */ +export declare const downloadWithProgress: (url: string | URL, cb?: ProgressCallback) => Promise; +/** + * toBlobURL fetches data from an URL and return a blob URL. + * + * Example: + * + * ```ts + * await toBlobURL("http://localhost:3000/ffmpeg.js", "text/javascript"); + * ``` + */ +export declare const toBlobURL: (url: string, mimeType: string, progress?: boolean, cb?: ProgressCallback) => Promise; diff --git a/node_modules/@ffmpeg/util/dist/cjs/index.js b/node_modules/@ffmpeg/util/dist/cjs/index.js new file mode 100644 index 0000000..59f01cf --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/cjs/index.js @@ -0,0 +1,173 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toBlobURL = exports.downloadWithProgress = exports.importScript = exports.fetchFile = void 0; +const errors_js_1 = require("./errors.js"); +const const_js_1 = require("./const.js"); +const readFromBlobOrFile = (blob) => new Promise((resolve, reject) => { + const fileReader = new FileReader(); + fileReader.onload = () => { + const { result } = fileReader; + if (result instanceof ArrayBuffer) { + resolve(new Uint8Array(result)); + } + else { + resolve(new Uint8Array()); + } + }; + fileReader.onerror = (event) => { + var _a, _b; + reject(Error(`File could not be read! Code=${((_b = (_a = event === null || event === void 0 ? void 0 : event.target) === null || _a === void 0 ? void 0 : _a.error) === null || _b === void 0 ? void 0 : _b.code) || -1}`)); + }; + fileReader.readAsArrayBuffer(blob); +}); +/** + * An util function to fetch data from url string, base64, URL, File or Blob format. + * + * Examples: + * ```ts + * // URL + * await fetchFile("http://localhost:3000/video.mp4"); + * // base64 + * await fetchFile("data:;base64,wL2dvYWwgbW9yZ..."); + * // URL + * await fetchFile(new URL("video.mp4", import.meta.url)); + * // File + * fileInput.addEventListener('change', (e) => { + * await fetchFile(e.target.files[0]); + * }); + * // Blob + * const blob = new Blob(...); + * await fetchFile(blob); + * ``` + */ +const fetchFile = (file) => __awaiter(void 0, void 0, void 0, function* () { + let data; + if (typeof file === "string") { + /* From base64 format */ + if (/data:_data\/([a-zA-Z]*);base64,([^"]*)/.test(file)) { + data = atob(file.split(",")[1]) + .split("") + .map((c) => c.charCodeAt(0)); + /* From remote server/URL */ + } + else { + data = yield (yield fetch(file)).arrayBuffer(); + } + } + else if (file instanceof URL) { + data = yield (yield fetch(file)).arrayBuffer(); + } + else if (file instanceof File || file instanceof Blob) { + data = yield readFromBlobOrFile(file); + } + else { + return new Uint8Array(); + } + return new Uint8Array(data); +}); +exports.fetchFile = fetchFile; +/** + * importScript dynamically import a script, useful when you + * want to use different versions of ffmpeg.wasm based on environment. + * + * Example: + * + * ```ts + * await importScript("http://localhost:3000/ffmpeg.js"); + * ``` + */ +const importScript = (url) => __awaiter(void 0, void 0, void 0, function* () { + return new Promise((resolve) => { + const script = document.createElement("script"); + const eventHandler = () => { + script.removeEventListener("load", eventHandler); + resolve(); + }; + script.src = url; + script.type = "text/javascript"; + script.addEventListener("load", eventHandler); + document.getElementsByTagName("head")[0].appendChild(script); + }); +}); +exports.importScript = importScript; +/** + * Download content of a URL with progress. + * + * Progress only works when Content-Length is provided by the server. + * + */ +const downloadWithProgress = (url, cb) => __awaiter(void 0, void 0, void 0, function* () { + var _a; + const resp = yield fetch(url); + let buf; + try { + // Set total to -1 to indicate that there is not Content-Type Header. + const total = parseInt(resp.headers.get(const_js_1.HeaderContentLength) || "-1"); + const reader = (_a = resp.body) === null || _a === void 0 ? void 0 : _a.getReader(); + if (!reader) + throw errors_js_1.ERROR_RESPONSE_BODY_READER; + const chunks = []; + let received = 0; + for (;;) { + const { done, value } = yield reader.read(); + const delta = value ? value.length : 0; + if (done) { + if (total != -1 && total !== received) + throw errors_js_1.ERROR_INCOMPLETED_DOWNLOAD; + cb && cb({ url, total, received, delta, done }); + break; + } + chunks.push(value); + received += delta; + cb && cb({ url, total, received, delta, done }); + } + const data = new Uint8Array(received); + let position = 0; + for (const chunk of chunks) { + data.set(chunk, position); + position += chunk.length; + } + buf = data.buffer; + } + catch (e) { + console.log(`failed to send download progress event: `, e); + // Fetch arrayBuffer directly when it is not possible to get progress. + buf = yield resp.arrayBuffer(); + cb && + cb({ + url, + total: buf.byteLength, + received: buf.byteLength, + delta: 0, + done: true, + }); + } + return buf; +}); +exports.downloadWithProgress = downloadWithProgress; +/** + * toBlobURL fetches data from an URL and return a blob URL. + * + * Example: + * + * ```ts + * await toBlobURL("http://localhost:3000/ffmpeg.js", "text/javascript"); + * ``` + */ +const toBlobURL = (url, mimeType, progress = false, cb) => __awaiter(void 0, void 0, void 0, function* () { + const buf = progress + ? yield (0, exports.downloadWithProgress)(url, cb) + : yield (yield fetch(url)).arrayBuffer(); + const blob = new Blob([buf], { type: mimeType }); + return URL.createObjectURL(blob); +}); +exports.toBlobURL = toBlobURL; diff --git a/node_modules/@ffmpeg/util/dist/cjs/types.d.ts b/node_modules/@ffmpeg/util/dist/cjs/types.d.ts new file mode 100644 index 0000000..4d420ef --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/cjs/types.d.ts @@ -0,0 +1,8 @@ +export interface DownloadProgressEvent { + url: string | URL; + total: number; + received: number; + delta: number; + done: boolean; +} +export type ProgressCallback = (event: DownloadProgressEvent) => void; diff --git a/node_modules/@ffmpeg/util/dist/cjs/types.js b/node_modules/@ffmpeg/util/dist/cjs/types.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/cjs/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@ffmpeg/util/dist/esm/const.d.ts b/node_modules/@ffmpeg/util/dist/esm/const.d.ts new file mode 100644 index 0000000..e4a71b1 --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/esm/const.d.ts @@ -0,0 +1 @@ +export declare const HeaderContentLength = "Content-Length"; diff --git a/node_modules/@ffmpeg/util/dist/esm/const.js b/node_modules/@ffmpeg/util/dist/esm/const.js new file mode 100644 index 0000000..5d0cd85 --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/esm/const.js @@ -0,0 +1 @@ +export const HeaderContentLength = "Content-Length"; diff --git a/node_modules/@ffmpeg/util/dist/esm/errors.d.ts b/node_modules/@ffmpeg/util/dist/esm/errors.d.ts new file mode 100644 index 0000000..1cc685f --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/esm/errors.d.ts @@ -0,0 +1,2 @@ +export declare const ERROR_RESPONSE_BODY_READER: Error; +export declare const ERROR_INCOMPLETED_DOWNLOAD: Error; diff --git a/node_modules/@ffmpeg/util/dist/esm/errors.js b/node_modules/@ffmpeg/util/dist/esm/errors.js new file mode 100644 index 0000000..57250d8 --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/esm/errors.js @@ -0,0 +1,2 @@ +export const ERROR_RESPONSE_BODY_READER = new Error("failed to get response body reader"); +export const ERROR_INCOMPLETED_DOWNLOAD = new Error("failed to complete download"); diff --git a/node_modules/@ffmpeg/util/dist/esm/index.d.ts b/node_modules/@ffmpeg/util/dist/esm/index.d.ts new file mode 100644 index 0000000..fb0ed59 --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/esm/index.d.ts @@ -0,0 +1,50 @@ +import { ProgressCallback } from "./types.js"; +/** + * An util function to fetch data from url string, base64, URL, File or Blob format. + * + * Examples: + * ```ts + * // URL + * await fetchFile("http://localhost:3000/video.mp4"); + * // base64 + * await fetchFile("data:;base64,wL2dvYWwgbW9yZ..."); + * // URL + * await fetchFile(new URL("video.mp4", import.meta.url)); + * // File + * fileInput.addEventListener('change', (e) => { + * await fetchFile(e.target.files[0]); + * }); + * // Blob + * const blob = new Blob(...); + * await fetchFile(blob); + * ``` + */ +export declare const fetchFile: (file?: string | File | Blob) => Promise; +/** + * importScript dynamically import a script, useful when you + * want to use different versions of ffmpeg.wasm based on environment. + * + * Example: + * + * ```ts + * await importScript("http://localhost:3000/ffmpeg.js"); + * ``` + */ +export declare const importScript: (url: string) => Promise; +/** + * Download content of a URL with progress. + * + * Progress only works when Content-Length is provided by the server. + * + */ +export declare const downloadWithProgress: (url: string | URL, cb?: ProgressCallback) => Promise; +/** + * toBlobURL fetches data from an URL and return a blob URL. + * + * Example: + * + * ```ts + * await toBlobURL("http://localhost:3000/ffmpeg.js", "text/javascript"); + * ``` + */ +export declare const toBlobURL: (url: string, mimeType: string, progress?: boolean, cb?: ProgressCallback) => Promise; diff --git a/node_modules/@ffmpeg/util/dist/esm/index.js b/node_modules/@ffmpeg/util/dist/esm/index.js new file mode 100644 index 0000000..d96a383 --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/esm/index.js @@ -0,0 +1,153 @@ +import { ERROR_RESPONSE_BODY_READER, ERROR_INCOMPLETED_DOWNLOAD, } from "./errors.js"; +import { HeaderContentLength } from "./const.js"; +const readFromBlobOrFile = (blob) => new Promise((resolve, reject) => { + const fileReader = new FileReader(); + fileReader.onload = () => { + const { result } = fileReader; + if (result instanceof ArrayBuffer) { + resolve(new Uint8Array(result)); + } + else { + resolve(new Uint8Array()); + } + }; + fileReader.onerror = (event) => { + reject(Error(`File could not be read! Code=${event?.target?.error?.code || -1}`)); + }; + fileReader.readAsArrayBuffer(blob); +}); +/** + * An util function to fetch data from url string, base64, URL, File or Blob format. + * + * Examples: + * ```ts + * // URL + * await fetchFile("http://localhost:3000/video.mp4"); + * // base64 + * await fetchFile("data:;base64,wL2dvYWwgbW9yZ..."); + * // URL + * await fetchFile(new URL("video.mp4", import.meta.url)); + * // File + * fileInput.addEventListener('change', (e) => { + * await fetchFile(e.target.files[0]); + * }); + * // Blob + * const blob = new Blob(...); + * await fetchFile(blob); + * ``` + */ +export const fetchFile = async (file) => { + let data; + if (typeof file === "string") { + /* From base64 format */ + if (/data:_data\/([a-zA-Z]*);base64,([^"]*)/.test(file)) { + data = atob(file.split(",")[1]) + .split("") + .map((c) => c.charCodeAt(0)); + /* From remote server/URL */ + } + else { + data = await (await fetch(file)).arrayBuffer(); + } + } + else if (file instanceof URL) { + data = await (await fetch(file)).arrayBuffer(); + } + else if (file instanceof File || file instanceof Blob) { + data = await readFromBlobOrFile(file); + } + else { + return new Uint8Array(); + } + return new Uint8Array(data); +}; +/** + * importScript dynamically import a script, useful when you + * want to use different versions of ffmpeg.wasm based on environment. + * + * Example: + * + * ```ts + * await importScript("http://localhost:3000/ffmpeg.js"); + * ``` + */ +export const importScript = async (url) => new Promise((resolve) => { + const script = document.createElement("script"); + const eventHandler = () => { + script.removeEventListener("load", eventHandler); + resolve(); + }; + script.src = url; + script.type = "text/javascript"; + script.addEventListener("load", eventHandler); + document.getElementsByTagName("head")[0].appendChild(script); +}); +/** + * Download content of a URL with progress. + * + * Progress only works when Content-Length is provided by the server. + * + */ +export const downloadWithProgress = async (url, cb) => { + const resp = await fetch(url); + let buf; + try { + // Set total to -1 to indicate that there is not Content-Type Header. + const total = parseInt(resp.headers.get(HeaderContentLength) || "-1"); + const reader = resp.body?.getReader(); + if (!reader) + throw ERROR_RESPONSE_BODY_READER; + const chunks = []; + let received = 0; + for (;;) { + const { done, value } = await reader.read(); + const delta = value ? value.length : 0; + if (done) { + if (total != -1 && total !== received) + throw ERROR_INCOMPLETED_DOWNLOAD; + cb && cb({ url, total, received, delta, done }); + break; + } + chunks.push(value); + received += delta; + cb && cb({ url, total, received, delta, done }); + } + const data = new Uint8Array(received); + let position = 0; + for (const chunk of chunks) { + data.set(chunk, position); + position += chunk.length; + } + buf = data.buffer; + } + catch (e) { + console.log(`failed to send download progress event: `, e); + // Fetch arrayBuffer directly when it is not possible to get progress. + buf = await resp.arrayBuffer(); + cb && + cb({ + url, + total: buf.byteLength, + received: buf.byteLength, + delta: 0, + done: true, + }); + } + return buf; +}; +/** + * toBlobURL fetches data from an URL and return a blob URL. + * + * Example: + * + * ```ts + * await toBlobURL("http://localhost:3000/ffmpeg.js", "text/javascript"); + * ``` + */ +export const toBlobURL = async (url, mimeType, progress = false, cb) => { + const buf = progress + ? await downloadWithProgress(url, cb) + : await (await fetch(url)).arrayBuffer(); + const blob = new Blob([buf], { type: mimeType }); + return URL.createObjectURL(blob); +}; diff --git a/node_modules/@ffmpeg/util/dist/esm/types.d.ts b/node_modules/@ffmpeg/util/dist/esm/types.d.ts new file mode 100644 index 0000000..4d420ef --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/esm/types.d.ts @@ -0,0 +1,8 @@ +export interface DownloadProgressEvent { + url: string | URL; + total: number; + received: number; + delta: number; + done: boolean; +} +export type ProgressCallback = (event: DownloadProgressEvent) => void; diff --git a/node_modules/@ffmpeg/util/dist/esm/types.js b/node_modules/@ffmpeg/util/dist/esm/types.js new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/esm/types.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@ffmpeg/util/dist/umd/index.js b/node_modules/@ffmpeg/util/dist/umd/index.js new file mode 100644 index 0000000..e3fae0b --- /dev/null +++ b/node_modules/@ffmpeg/util/dist/umd/index.js @@ -0,0 +1 @@ +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.FFmpegUtil=t():e.FFmpegUtil=t()}(self,(()=>(()=>{"use strict";var e={591:(e,t)=>{Object.defineProperty(t,"__esModule",{value:!0}),t.HeaderContentLength=void 0,t.HeaderContentLength="Content-Length"},431:(e,t)=>{Object.defineProperty(t,"__esModule",{value:!0}),t.ERROR_INCOMPLETED_DOWNLOAD=t.ERROR_RESPONSE_BODY_READER=void 0,t.ERROR_RESPONSE_BODY_READER=new Error("failed to get response body reader"),t.ERROR_INCOMPLETED_DOWNLOAD=new Error("failed to complete download")},915:function(e,t,o){var r=this&&this.__awaiter||function(e,t,o,r){return new(o||(o=Promise))((function(n,i){function d(e){try{l(r.next(e))}catch(e){i(e)}}function a(e){try{l(r.throw(e))}catch(e){i(e)}}function l(e){var t;e.done?n(e.value):(t=e.value,t instanceof o?t:new o((function(e){e(t)}))).then(d,a)}l((r=r.apply(e,t||[])).next())}))};Object.defineProperty(t,"__esModule",{value:!0}),t.toBlobURL=t.downloadWithProgress=t.importScript=t.fetchFile=void 0;const n=o(431),i=o(591);t.fetchFile=e=>r(void 0,void 0,void 0,(function*(){let t;if("string"==typeof e)t=/data:_data\/([a-zA-Z]*);base64,([^"]*)/.test(e)?atob(e.split(",")[1]).split("").map((e=>e.charCodeAt(0))):yield(yield fetch(e)).arrayBuffer();else if(e instanceof URL)t=yield(yield fetch(e)).arrayBuffer();else{if(!(e instanceof File||e instanceof Blob))return new Uint8Array;t=yield(o=e,new Promise(((e,t)=>{const r=new FileReader;r.onload=()=>{const{result:t}=r;t instanceof ArrayBuffer?e(new Uint8Array(t)):e(new Uint8Array)},r.onerror=e=>{var o,r;t(Error(`File could not be read! Code=${(null===(r=null===(o=null==e?void 0:e.target)||void 0===o?void 0:o.error)||void 0===r?void 0:r.code)||-1}`))},r.readAsArrayBuffer(o)})))}var o;return new Uint8Array(t)})),t.importScript=e=>r(void 0,void 0,void 0,(function*(){return new Promise((t=>{const o=document.createElement("script"),r=()=>{o.removeEventListener("load",r),t()};o.src=e,o.type="text/javascript",o.addEventListener("load",r),document.getElementsByTagName("head")[0].appendChild(o)}))})),t.downloadWithProgress=(e,t)=>r(void 0,void 0,void 0,(function*(){var o;const r=yield fetch(e);let d;try{const a=parseInt(r.headers.get(i.HeaderContentLength)||"-1"),l=null===(o=r.body)||void 0===o?void 0:o.getReader();if(!l)throw n.ERROR_RESPONSE_BODY_READER;const c=[];let s=0;for(;;){const{done:o,value:r}=yield l.read(),i=r?r.length:0;if(o){if(-1!=a&&a!==s)throw n.ERROR_INCOMPLETED_DOWNLOAD;t&&t({url:e,total:a,received:s,delta:i,done:o});break}c.push(r),s+=i,t&&t({url:e,total:a,received:s,delta:i,done:o})}const f=new Uint8Array(s);let u=0;for(const e of c)f.set(e,u),u+=e.length;d=f.buffer}catch(o){console.log("failed to send download progress event: ",o),d=yield r.arrayBuffer(),t&&t({url:e,total:d.byteLength,received:d.byteLength,delta:0,done:!0})}return d})),t.toBlobURL=(e,o,n=!1,i)=>r(void 0,void 0,void 0,(function*(){const r=n?yield(0,t.downloadWithProgress)(e,i):yield(yield fetch(e)).arrayBuffer(),d=new Blob([r],{type:o});return URL.createObjectURL(d)}))}},t={};return function o(r){var n=t[r];if(void 0!==n)return n.exports;var i=t[r]={exports:{}};return e[r].call(i.exports,i,i.exports,o),i.exports}(915)})())); \ No newline at end of file diff --git a/node_modules/@ffmpeg/util/package.json b/node_modules/@ffmpeg/util/package.json new file mode 100644 index 0000000..3afedb4 --- /dev/null +++ b/node_modules/@ffmpeg/util/package.json @@ -0,0 +1,56 @@ +{ + "name": "@ffmpeg/util", + "version": "0.12.1", + "description": "browser utils for @ffmpeg/*", + "main": "./dist/cjs/index.js", + "types": "./dist/cjs/index.d.ts", + "exports": { + ".": { + "types": "./dist/cjs/index.d.ts", + "import": "./dist/esm/index.js", + "require": "./dist/cjs/index.js" + } + }, + "scripts": { + "dev": "tsc -p tsconfig-esm.json --watch", + "lint": "eslint src", + "clean": "rimraf dist", + "build:esm": "tsc -p tsconfig.esm.json", + "build:umd": "tsc -p tsconfig.cjs.json && webpack", + "build": "npm run clean && npm run build:esm && npm run build:umd", + "prepublishOnly": "npm run build" + }, + "files": [ + "dist" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/ffmpegwasm/ffmpeg.wasm.git" + }, + "keywords": [ + "ffmpeg", + "video", + "audio", + "transcode" + ], + "author": "Jerome Wu ", + "license": "MIT", + "bugs": { + "url": "https://github.com/ffmpegwasm/ffmpeg.wasm/issues" + }, + "engines": { + "node": ">=18.x" + }, + "homepage": "https://github.com/ffmpegwasm/ffmpeg.wasm#readme", + "publishConfig": { + "access": "public" + }, + "devDependencies": { + "@typescript-eslint/eslint-plugin": "^6.1.0", + "@typescript-eslint/parser": "^6.1.0", + "eslint": "^8.45.0", + "rimraf": "^5.0.1", + "typescript": "^5.1.6", + "webpack-cli": "^5.1.4" + } +} diff --git a/node_modules/@rollup/rollup-darwin-arm64/README.md b/node_modules/@rollup/rollup-darwin-arm64/README.md new file mode 100644 index 0000000..c29619c --- /dev/null +++ b/node_modules/@rollup/rollup-darwin-arm64/README.md @@ -0,0 +1,3 @@ +# `@rollup/rollup-darwin-arm64` + +This is the **aarch64-apple-darwin** binary for `rollup` diff --git a/node_modules/@rollup/rollup-darwin-arm64/package.json b/node_modules/@rollup/rollup-darwin-arm64/package.json new file mode 100644 index 0000000..9f52e7e --- /dev/null +++ b/node_modules/@rollup/rollup-darwin-arm64/package.json @@ -0,0 +1,19 @@ +{ + "name": "@rollup/rollup-darwin-arm64", + "version": "4.24.0", + "os": [ + "darwin" + ], + "cpu": [ + "arm64" + ], + "files": [ + "rollup.darwin-arm64.node" + ], + "description": "Native bindings for Rollup", + "author": "Lukas Taegert-Atkinson", + "homepage": "https://rollupjs.org/", + "license": "MIT", + "repository": "rollup/rollup", + "main": "./rollup.darwin-arm64.node" +} \ No newline at end of file diff --git a/node_modules/@rollup/rollup-darwin-arm64/rollup.darwin-arm64.node b/node_modules/@rollup/rollup-darwin-arm64/rollup.darwin-arm64.node new file mode 100644 index 0000000..dc16dfa Binary files /dev/null and b/node_modules/@rollup/rollup-darwin-arm64/rollup.darwin-arm64.node differ diff --git a/node_modules/@types/estree/LICENSE b/node_modules/@types/estree/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/estree/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/estree/README.md b/node_modules/@types/estree/README.md new file mode 100644 index 0000000..5a6f4c8 --- /dev/null +++ b/node_modules/@types/estree/README.md @@ -0,0 +1,15 @@ +# Installation +> `npm install --save @types/estree` + +# Summary +This package contains type definitions for estree (https://github.com/estree/estree). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/estree. + +### Additional Details + * Last updated: Wed, 18 Sep 2024 09:37:00 GMT + * Dependencies: none + +# Credits +These definitions were written by [RReverser](https://github.com/RReverser). diff --git a/node_modules/@types/estree/flow.d.ts b/node_modules/@types/estree/flow.d.ts new file mode 100644 index 0000000..9d001a9 --- /dev/null +++ b/node_modules/@types/estree/flow.d.ts @@ -0,0 +1,167 @@ +declare namespace ESTree { + interface FlowTypeAnnotation extends Node {} + + interface FlowBaseTypeAnnotation extends FlowTypeAnnotation {} + + interface FlowLiteralTypeAnnotation extends FlowTypeAnnotation, Literal {} + + interface FlowDeclaration extends Declaration {} + + interface AnyTypeAnnotation extends FlowBaseTypeAnnotation {} + + interface ArrayTypeAnnotation extends FlowTypeAnnotation { + elementType: FlowTypeAnnotation; + } + + interface BooleanLiteralTypeAnnotation extends FlowLiteralTypeAnnotation {} + + interface BooleanTypeAnnotation extends FlowBaseTypeAnnotation {} + + interface ClassImplements extends Node { + id: Identifier; + typeParameters?: TypeParameterInstantiation | null; + } + + interface ClassProperty { + key: Expression; + value?: Expression | null; + typeAnnotation?: TypeAnnotation | null; + computed: boolean; + static: boolean; + } + + interface DeclareClass extends FlowDeclaration { + id: Identifier; + typeParameters?: TypeParameterDeclaration | null; + body: ObjectTypeAnnotation; + extends: InterfaceExtends[]; + } + + interface DeclareFunction extends FlowDeclaration { + id: Identifier; + } + + interface DeclareModule extends FlowDeclaration { + id: Literal | Identifier; + body: BlockStatement; + } + + interface DeclareVariable extends FlowDeclaration { + id: Identifier; + } + + interface FunctionTypeAnnotation extends FlowTypeAnnotation { + params: FunctionTypeParam[]; + returnType: FlowTypeAnnotation; + rest?: FunctionTypeParam | null; + typeParameters?: TypeParameterDeclaration | null; + } + + interface FunctionTypeParam { + name: Identifier; + typeAnnotation: FlowTypeAnnotation; + optional: boolean; + } + + interface GenericTypeAnnotation extends FlowTypeAnnotation { + id: Identifier | QualifiedTypeIdentifier; + typeParameters?: TypeParameterInstantiation | null; + } + + interface InterfaceExtends extends Node { + id: Identifier | QualifiedTypeIdentifier; + typeParameters?: TypeParameterInstantiation | null; + } + + interface InterfaceDeclaration extends FlowDeclaration { + id: Identifier; + typeParameters?: TypeParameterDeclaration | null; + extends: InterfaceExtends[]; + body: ObjectTypeAnnotation; + } + + interface IntersectionTypeAnnotation extends FlowTypeAnnotation { + types: FlowTypeAnnotation[]; + } + + interface MixedTypeAnnotation extends FlowBaseTypeAnnotation {} + + interface NullableTypeAnnotation extends FlowTypeAnnotation { + typeAnnotation: TypeAnnotation; + } + + interface NumberLiteralTypeAnnotation extends FlowLiteralTypeAnnotation {} + + interface NumberTypeAnnotation extends FlowBaseTypeAnnotation {} + + interface StringLiteralTypeAnnotation extends FlowLiteralTypeAnnotation {} + + interface StringTypeAnnotation extends FlowBaseTypeAnnotation {} + + interface TupleTypeAnnotation extends FlowTypeAnnotation { + types: FlowTypeAnnotation[]; + } + + interface TypeofTypeAnnotation extends FlowTypeAnnotation { + argument: FlowTypeAnnotation; + } + + interface TypeAlias extends FlowDeclaration { + id: Identifier; + typeParameters?: TypeParameterDeclaration | null; + right: FlowTypeAnnotation; + } + + interface TypeAnnotation extends Node { + typeAnnotation: FlowTypeAnnotation; + } + + interface TypeCastExpression extends Expression { + expression: Expression; + typeAnnotation: TypeAnnotation; + } + + interface TypeParameterDeclaration extends Node { + params: Identifier[]; + } + + interface TypeParameterInstantiation extends Node { + params: FlowTypeAnnotation[]; + } + + interface ObjectTypeAnnotation extends FlowTypeAnnotation { + properties: ObjectTypeProperty[]; + indexers: ObjectTypeIndexer[]; + callProperties: ObjectTypeCallProperty[]; + } + + interface ObjectTypeCallProperty extends Node { + value: FunctionTypeAnnotation; + static: boolean; + } + + interface ObjectTypeIndexer extends Node { + id: Identifier; + key: FlowTypeAnnotation; + value: FlowTypeAnnotation; + static: boolean; + } + + interface ObjectTypeProperty extends Node { + key: Expression; + value: FlowTypeAnnotation; + optional: boolean; + static: boolean; + } + + interface QualifiedTypeIdentifier extends Node { + qualification: Identifier | QualifiedTypeIdentifier; + id: Identifier; + } + + interface UnionTypeAnnotation extends FlowTypeAnnotation { + types: FlowTypeAnnotation[]; + } + + interface VoidTypeAnnotation extends FlowBaseTypeAnnotation {} +} diff --git a/node_modules/@types/estree/index.d.ts b/node_modules/@types/estree/index.d.ts new file mode 100644 index 0000000..81a351f --- /dev/null +++ b/node_modules/@types/estree/index.d.ts @@ -0,0 +1,684 @@ +// This definition file follows a somewhat unusual format. ESTree allows +// runtime type checks based on the `type` parameter. In order to explain this +// to typescript we want to use discriminated union types: +// https://github.com/Microsoft/TypeScript/pull/9163 +// +// For ESTree this is a bit tricky because the high level interfaces like +// Node or Function are pulling double duty. We want to pass common fields down +// to the interfaces that extend them (like Identifier or +// ArrowFunctionExpression), but you can't extend a type union or enforce +// common fields on them. So we've split the high level interfaces into two +// types, a base type which passes down inherited fields, and a type union of +// all types which extend the base type. Only the type union is exported, and +// the union is how other types refer to the collection of inheriting types. +// +// This makes the definitions file here somewhat more difficult to maintain, +// but it has the notable advantage of making ESTree much easier to use as +// an end user. + +export interface BaseNodeWithoutComments { + // Every leaf interface that extends BaseNode must specify a type property. + // The type property should be a string literal. For example, Identifier + // has: `type: "Identifier"` + type: string; + loc?: SourceLocation | null | undefined; + range?: [number, number] | undefined; +} + +export interface BaseNode extends BaseNodeWithoutComments { + leadingComments?: Comment[] | undefined; + trailingComments?: Comment[] | undefined; +} + +export interface NodeMap { + AssignmentProperty: AssignmentProperty; + CatchClause: CatchClause; + Class: Class; + ClassBody: ClassBody; + Expression: Expression; + Function: Function; + Identifier: Identifier; + Literal: Literal; + MethodDefinition: MethodDefinition; + ModuleDeclaration: ModuleDeclaration; + ModuleSpecifier: ModuleSpecifier; + Pattern: Pattern; + PrivateIdentifier: PrivateIdentifier; + Program: Program; + Property: Property; + PropertyDefinition: PropertyDefinition; + SpreadElement: SpreadElement; + Statement: Statement; + Super: Super; + SwitchCase: SwitchCase; + TemplateElement: TemplateElement; + VariableDeclarator: VariableDeclarator; +} + +export type Node = NodeMap[keyof NodeMap]; + +export interface Comment extends BaseNodeWithoutComments { + type: "Line" | "Block"; + value: string; +} + +export interface SourceLocation { + source?: string | null | undefined; + start: Position; + end: Position; +} + +export interface Position { + /** >= 1 */ + line: number; + /** >= 0 */ + column: number; +} + +export interface Program extends BaseNode { + type: "Program"; + sourceType: "script" | "module"; + body: Array; + comments?: Comment[] | undefined; +} + +export interface Directive extends BaseNode { + type: "ExpressionStatement"; + expression: Literal; + directive: string; +} + +export interface BaseFunction extends BaseNode { + params: Pattern[]; + generator?: boolean | undefined; + async?: boolean | undefined; + // The body is either BlockStatement or Expression because arrow functions + // can have a body that's either. FunctionDeclarations and + // FunctionExpressions have only BlockStatement bodies. + body: BlockStatement | Expression; +} + +export type Function = FunctionDeclaration | FunctionExpression | ArrowFunctionExpression; + +export type Statement = + | ExpressionStatement + | BlockStatement + | StaticBlock + | EmptyStatement + | DebuggerStatement + | WithStatement + | ReturnStatement + | LabeledStatement + | BreakStatement + | ContinueStatement + | IfStatement + | SwitchStatement + | ThrowStatement + | TryStatement + | WhileStatement + | DoWhileStatement + | ForStatement + | ForInStatement + | ForOfStatement + | Declaration; + +export interface BaseStatement extends BaseNode {} + +export interface EmptyStatement extends BaseStatement { + type: "EmptyStatement"; +} + +export interface BlockStatement extends BaseStatement { + type: "BlockStatement"; + body: Statement[]; + innerComments?: Comment[] | undefined; +} + +export interface StaticBlock extends Omit { + type: "StaticBlock"; +} + +export interface ExpressionStatement extends BaseStatement { + type: "ExpressionStatement"; + expression: Expression; +} + +export interface IfStatement extends BaseStatement { + type: "IfStatement"; + test: Expression; + consequent: Statement; + alternate?: Statement | null | undefined; +} + +export interface LabeledStatement extends BaseStatement { + type: "LabeledStatement"; + label: Identifier; + body: Statement; +} + +export interface BreakStatement extends BaseStatement { + type: "BreakStatement"; + label?: Identifier | null | undefined; +} + +export interface ContinueStatement extends BaseStatement { + type: "ContinueStatement"; + label?: Identifier | null | undefined; +} + +export interface WithStatement extends BaseStatement { + type: "WithStatement"; + object: Expression; + body: Statement; +} + +export interface SwitchStatement extends BaseStatement { + type: "SwitchStatement"; + discriminant: Expression; + cases: SwitchCase[]; +} + +export interface ReturnStatement extends BaseStatement { + type: "ReturnStatement"; + argument?: Expression | null | undefined; +} + +export interface ThrowStatement extends BaseStatement { + type: "ThrowStatement"; + argument: Expression; +} + +export interface TryStatement extends BaseStatement { + type: "TryStatement"; + block: BlockStatement; + handler?: CatchClause | null | undefined; + finalizer?: BlockStatement | null | undefined; +} + +export interface WhileStatement extends BaseStatement { + type: "WhileStatement"; + test: Expression; + body: Statement; +} + +export interface DoWhileStatement extends BaseStatement { + type: "DoWhileStatement"; + body: Statement; + test: Expression; +} + +export interface ForStatement extends BaseStatement { + type: "ForStatement"; + init?: VariableDeclaration | Expression | null | undefined; + test?: Expression | null | undefined; + update?: Expression | null | undefined; + body: Statement; +} + +export interface BaseForXStatement extends BaseStatement { + left: VariableDeclaration | Pattern; + right: Expression; + body: Statement; +} + +export interface ForInStatement extends BaseForXStatement { + type: "ForInStatement"; +} + +export interface DebuggerStatement extends BaseStatement { + type: "DebuggerStatement"; +} + +export type Declaration = FunctionDeclaration | VariableDeclaration | ClassDeclaration; + +export interface BaseDeclaration extends BaseStatement {} + +export interface MaybeNamedFunctionDeclaration extends BaseFunction, BaseDeclaration { + type: "FunctionDeclaration"; + /** It is null when a function declaration is a part of the `export default function` statement */ + id: Identifier | null; + body: BlockStatement; +} + +export interface FunctionDeclaration extends MaybeNamedFunctionDeclaration { + id: Identifier; +} + +export interface VariableDeclaration extends BaseDeclaration { + type: "VariableDeclaration"; + declarations: VariableDeclarator[]; + kind: "var" | "let" | "const"; +} + +export interface VariableDeclarator extends BaseNode { + type: "VariableDeclarator"; + id: Pattern; + init?: Expression | null | undefined; +} + +export interface ExpressionMap { + ArrayExpression: ArrayExpression; + ArrowFunctionExpression: ArrowFunctionExpression; + AssignmentExpression: AssignmentExpression; + AwaitExpression: AwaitExpression; + BinaryExpression: BinaryExpression; + CallExpression: CallExpression; + ChainExpression: ChainExpression; + ClassExpression: ClassExpression; + ConditionalExpression: ConditionalExpression; + FunctionExpression: FunctionExpression; + Identifier: Identifier; + ImportExpression: ImportExpression; + Literal: Literal; + LogicalExpression: LogicalExpression; + MemberExpression: MemberExpression; + MetaProperty: MetaProperty; + NewExpression: NewExpression; + ObjectExpression: ObjectExpression; + SequenceExpression: SequenceExpression; + TaggedTemplateExpression: TaggedTemplateExpression; + TemplateLiteral: TemplateLiteral; + ThisExpression: ThisExpression; + UnaryExpression: UnaryExpression; + UpdateExpression: UpdateExpression; + YieldExpression: YieldExpression; +} + +export type Expression = ExpressionMap[keyof ExpressionMap]; + +export interface BaseExpression extends BaseNode {} + +export type ChainElement = SimpleCallExpression | MemberExpression; + +export interface ChainExpression extends BaseExpression { + type: "ChainExpression"; + expression: ChainElement; +} + +export interface ThisExpression extends BaseExpression { + type: "ThisExpression"; +} + +export interface ArrayExpression extends BaseExpression { + type: "ArrayExpression"; + elements: Array; +} + +export interface ObjectExpression extends BaseExpression { + type: "ObjectExpression"; + properties: Array; +} + +export interface PrivateIdentifier extends BaseNode { + type: "PrivateIdentifier"; + name: string; +} + +export interface Property extends BaseNode { + type: "Property"; + key: Expression | PrivateIdentifier; + value: Expression | Pattern; // Could be an AssignmentProperty + kind: "init" | "get" | "set"; + method: boolean; + shorthand: boolean; + computed: boolean; +} + +export interface PropertyDefinition extends BaseNode { + type: "PropertyDefinition"; + key: Expression | PrivateIdentifier; + value?: Expression | null | undefined; + computed: boolean; + static: boolean; +} + +export interface FunctionExpression extends BaseFunction, BaseExpression { + id?: Identifier | null | undefined; + type: "FunctionExpression"; + body: BlockStatement; +} + +export interface SequenceExpression extends BaseExpression { + type: "SequenceExpression"; + expressions: Expression[]; +} + +export interface UnaryExpression extends BaseExpression { + type: "UnaryExpression"; + operator: UnaryOperator; + prefix: true; + argument: Expression; +} + +export interface BinaryExpression extends BaseExpression { + type: "BinaryExpression"; + operator: BinaryOperator; + left: Expression | PrivateIdentifier; + right: Expression; +} + +export interface AssignmentExpression extends BaseExpression { + type: "AssignmentExpression"; + operator: AssignmentOperator; + left: Pattern | MemberExpression; + right: Expression; +} + +export interface UpdateExpression extends BaseExpression { + type: "UpdateExpression"; + operator: UpdateOperator; + argument: Expression; + prefix: boolean; +} + +export interface LogicalExpression extends BaseExpression { + type: "LogicalExpression"; + operator: LogicalOperator; + left: Expression; + right: Expression; +} + +export interface ConditionalExpression extends BaseExpression { + type: "ConditionalExpression"; + test: Expression; + alternate: Expression; + consequent: Expression; +} + +export interface BaseCallExpression extends BaseExpression { + callee: Expression | Super; + arguments: Array; +} +export type CallExpression = SimpleCallExpression | NewExpression; + +export interface SimpleCallExpression extends BaseCallExpression { + type: "CallExpression"; + optional: boolean; +} + +export interface NewExpression extends BaseCallExpression { + type: "NewExpression"; +} + +export interface MemberExpression extends BaseExpression, BasePattern { + type: "MemberExpression"; + object: Expression | Super; + property: Expression | PrivateIdentifier; + computed: boolean; + optional: boolean; +} + +export type Pattern = Identifier | ObjectPattern | ArrayPattern | RestElement | AssignmentPattern | MemberExpression; + +export interface BasePattern extends BaseNode {} + +export interface SwitchCase extends BaseNode { + type: "SwitchCase"; + test?: Expression | null | undefined; + consequent: Statement[]; +} + +export interface CatchClause extends BaseNode { + type: "CatchClause"; + param: Pattern | null; + body: BlockStatement; +} + +export interface Identifier extends BaseNode, BaseExpression, BasePattern { + type: "Identifier"; + name: string; +} + +export type Literal = SimpleLiteral | RegExpLiteral | BigIntLiteral; + +export interface SimpleLiteral extends BaseNode, BaseExpression { + type: "Literal"; + value: string | boolean | number | null; + raw?: string | undefined; +} + +export interface RegExpLiteral extends BaseNode, BaseExpression { + type: "Literal"; + value?: RegExp | null | undefined; + regex: { + pattern: string; + flags: string; + }; + raw?: string | undefined; +} + +export interface BigIntLiteral extends BaseNode, BaseExpression { + type: "Literal"; + value?: bigint | null | undefined; + bigint: string; + raw?: string | undefined; +} + +export type UnaryOperator = "-" | "+" | "!" | "~" | "typeof" | "void" | "delete"; + +export type BinaryOperator = + | "==" + | "!=" + | "===" + | "!==" + | "<" + | "<=" + | ">" + | ">=" + | "<<" + | ">>" + | ">>>" + | "+" + | "-" + | "*" + | "/" + | "%" + | "**" + | "|" + | "^" + | "&" + | "in" + | "instanceof"; + +export type LogicalOperator = "||" | "&&" | "??"; + +export type AssignmentOperator = + | "=" + | "+=" + | "-=" + | "*=" + | "/=" + | "%=" + | "**=" + | "<<=" + | ">>=" + | ">>>=" + | "|=" + | "^=" + | "&=" + | "||=" + | "&&=" + | "??="; + +export type UpdateOperator = "++" | "--"; + +export interface ForOfStatement extends BaseForXStatement { + type: "ForOfStatement"; + await: boolean; +} + +export interface Super extends BaseNode { + type: "Super"; +} + +export interface SpreadElement extends BaseNode { + type: "SpreadElement"; + argument: Expression; +} + +export interface ArrowFunctionExpression extends BaseExpression, BaseFunction { + type: "ArrowFunctionExpression"; + expression: boolean; + body: BlockStatement | Expression; +} + +export interface YieldExpression extends BaseExpression { + type: "YieldExpression"; + argument?: Expression | null | undefined; + delegate: boolean; +} + +export interface TemplateLiteral extends BaseExpression { + type: "TemplateLiteral"; + quasis: TemplateElement[]; + expressions: Expression[]; +} + +export interface TaggedTemplateExpression extends BaseExpression { + type: "TaggedTemplateExpression"; + tag: Expression; + quasi: TemplateLiteral; +} + +export interface TemplateElement extends BaseNode { + type: "TemplateElement"; + tail: boolean; + value: { + /** It is null when the template literal is tagged and the text has an invalid escape (e.g. - tag`\unicode and \u{55}`) */ + cooked?: string | null | undefined; + raw: string; + }; +} + +export interface AssignmentProperty extends Property { + value: Pattern; + kind: "init"; + method: boolean; // false +} + +export interface ObjectPattern extends BasePattern { + type: "ObjectPattern"; + properties: Array; +} + +export interface ArrayPattern extends BasePattern { + type: "ArrayPattern"; + elements: Array; +} + +export interface RestElement extends BasePattern { + type: "RestElement"; + argument: Pattern; +} + +export interface AssignmentPattern extends BasePattern { + type: "AssignmentPattern"; + left: Pattern; + right: Expression; +} + +export type Class = ClassDeclaration | ClassExpression; +export interface BaseClass extends BaseNode { + superClass?: Expression | null | undefined; + body: ClassBody; +} + +export interface ClassBody extends BaseNode { + type: "ClassBody"; + body: Array; +} + +export interface MethodDefinition extends BaseNode { + type: "MethodDefinition"; + key: Expression | PrivateIdentifier; + value: FunctionExpression; + kind: "constructor" | "method" | "get" | "set"; + computed: boolean; + static: boolean; +} + +export interface MaybeNamedClassDeclaration extends BaseClass, BaseDeclaration { + type: "ClassDeclaration"; + /** It is null when a class declaration is a part of the `export default class` statement */ + id: Identifier | null; +} + +export interface ClassDeclaration extends MaybeNamedClassDeclaration { + id: Identifier; +} + +export interface ClassExpression extends BaseClass, BaseExpression { + type: "ClassExpression"; + id?: Identifier | null | undefined; +} + +export interface MetaProperty extends BaseExpression { + type: "MetaProperty"; + meta: Identifier; + property: Identifier; +} + +export type ModuleDeclaration = + | ImportDeclaration + | ExportNamedDeclaration + | ExportDefaultDeclaration + | ExportAllDeclaration; +export interface BaseModuleDeclaration extends BaseNode {} + +export type ModuleSpecifier = ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier | ExportSpecifier; +export interface BaseModuleSpecifier extends BaseNode { + local: Identifier; +} + +export interface ImportDeclaration extends BaseModuleDeclaration { + type: "ImportDeclaration"; + specifiers: Array; + source: Literal; +} + +export interface ImportSpecifier extends BaseModuleSpecifier { + type: "ImportSpecifier"; + imported: Identifier | Literal; +} + +export interface ImportExpression extends BaseExpression { + type: "ImportExpression"; + source: Expression; +} + +export interface ImportDefaultSpecifier extends BaseModuleSpecifier { + type: "ImportDefaultSpecifier"; +} + +export interface ImportNamespaceSpecifier extends BaseModuleSpecifier { + type: "ImportNamespaceSpecifier"; +} + +export interface ExportNamedDeclaration extends BaseModuleDeclaration { + type: "ExportNamedDeclaration"; + declaration?: Declaration | null | undefined; + specifiers: ExportSpecifier[]; + source?: Literal | null | undefined; +} + +export interface ExportSpecifier extends Omit { + type: "ExportSpecifier"; + local: Identifier | Literal; + exported: Identifier | Literal; +} + +export interface ExportDefaultDeclaration extends BaseModuleDeclaration { + type: "ExportDefaultDeclaration"; + declaration: MaybeNamedFunctionDeclaration | MaybeNamedClassDeclaration | Expression; +} + +export interface ExportAllDeclaration extends BaseModuleDeclaration { + type: "ExportAllDeclaration"; + exported: Identifier | Literal | null; + source: Literal; +} + +export interface AwaitExpression extends BaseExpression { + type: "AwaitExpression"; + argument: Expression; +} diff --git a/node_modules/@types/estree/package.json b/node_modules/@types/estree/package.json new file mode 100644 index 0000000..f410761 --- /dev/null +++ b/node_modules/@types/estree/package.json @@ -0,0 +1,26 @@ +{ + "name": "@types/estree", + "version": "1.0.6", + "description": "TypeScript definitions for estree", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/estree", + "license": "MIT", + "contributors": [ + { + "name": "RReverser", + "githubUsername": "RReverser", + "url": "https://github.com/RReverser" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/estree" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "0310b41994a6f8d7530af6c53d47d8b227f32925e43718507fdb1178e05006b1", + "typeScriptVersion": "4.8", + "nonNpm": true +} \ No newline at end of file diff --git a/node_modules/esbuild/LICENSE.md b/node_modules/esbuild/LICENSE.md new file mode 100644 index 0000000..2027e8d --- /dev/null +++ b/node_modules/esbuild/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Evan Wallace + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/esbuild/README.md b/node_modules/esbuild/README.md new file mode 100644 index 0000000..93863d1 --- /dev/null +++ b/node_modules/esbuild/README.md @@ -0,0 +1,3 @@ +# esbuild + +This is a JavaScript bundler and minifier. See https://github.com/evanw/esbuild and the [JavaScript API documentation](https://esbuild.github.io/api/) for details. diff --git a/node_modules/esbuild/bin/esbuild b/node_modules/esbuild/bin/esbuild new file mode 100755 index 0000000..971ac09 --- /dev/null +++ b/node_modules/esbuild/bin/esbuild @@ -0,0 +1,220 @@ +#!/usr/bin/env node +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); + +// lib/npm/node-platform.ts +var fs = require("fs"); +var os = require("os"); +var path = require("path"); +var ESBUILD_BINARY_PATH = process.env.ESBUILD_BINARY_PATH || ESBUILD_BINARY_PATH; +var isValidBinaryPath = (x) => !!x && x !== "/usr/bin/esbuild"; +var packageDarwin_arm64 = "@esbuild/darwin-arm64"; +var packageDarwin_x64 = "@esbuild/darwin-x64"; +var knownWindowsPackages = { + "win32 arm64 LE": "@esbuild/win32-arm64", + "win32 ia32 LE": "@esbuild/win32-ia32", + "win32 x64 LE": "@esbuild/win32-x64" +}; +var knownUnixlikePackages = { + "aix ppc64 BE": "@esbuild/aix-ppc64", + "android arm64 LE": "@esbuild/android-arm64", + "darwin arm64 LE": "@esbuild/darwin-arm64", + "darwin x64 LE": "@esbuild/darwin-x64", + "freebsd arm64 LE": "@esbuild/freebsd-arm64", + "freebsd x64 LE": "@esbuild/freebsd-x64", + "linux arm LE": "@esbuild/linux-arm", + "linux arm64 LE": "@esbuild/linux-arm64", + "linux ia32 LE": "@esbuild/linux-ia32", + "linux mips64el LE": "@esbuild/linux-mips64el", + "linux ppc64 LE": "@esbuild/linux-ppc64", + "linux riscv64 LE": "@esbuild/linux-riscv64", + "linux s390x BE": "@esbuild/linux-s390x", + "linux x64 LE": "@esbuild/linux-x64", + "linux loong64 LE": "@esbuild/linux-loong64", + "netbsd x64 LE": "@esbuild/netbsd-x64", + "openbsd x64 LE": "@esbuild/openbsd-x64", + "sunos x64 LE": "@esbuild/sunos-x64" +}; +var knownWebAssemblyFallbackPackages = { + "android arm LE": "@esbuild/android-arm", + "android x64 LE": "@esbuild/android-x64" +}; +function pkgAndSubpathForCurrentPlatform() { + let pkg; + let subpath; + let isWASM2 = false; + let platformKey = `${process.platform} ${os.arch()} ${os.endianness()}`; + if (platformKey in knownWindowsPackages) { + pkg = knownWindowsPackages[platformKey]; + subpath = "esbuild.exe"; + } else if (platformKey in knownUnixlikePackages) { + pkg = knownUnixlikePackages[platformKey]; + subpath = "bin/esbuild"; + } else if (platformKey in knownWebAssemblyFallbackPackages) { + pkg = knownWebAssemblyFallbackPackages[platformKey]; + subpath = "bin/esbuild"; + isWASM2 = true; + } else { + throw new Error(`Unsupported platform: ${platformKey}`); + } + return { pkg, subpath, isWASM: isWASM2 }; +} +function pkgForSomeOtherPlatform() { + const libMainJS = require.resolve("esbuild"); + const nodeModulesDirectory = path.dirname(path.dirname(path.dirname(libMainJS))); + if (path.basename(nodeModulesDirectory) === "node_modules") { + for (const unixKey in knownUnixlikePackages) { + try { + const pkg = knownUnixlikePackages[unixKey]; + if (fs.existsSync(path.join(nodeModulesDirectory, pkg))) return pkg; + } catch { + } + } + for (const windowsKey in knownWindowsPackages) { + try { + const pkg = knownWindowsPackages[windowsKey]; + if (fs.existsSync(path.join(nodeModulesDirectory, pkg))) return pkg; + } catch { + } + } + } + return null; +} +function downloadedBinPath(pkg, subpath) { + const esbuildLibDir = path.dirname(require.resolve("esbuild")); + return path.join(esbuildLibDir, `downloaded-${pkg.replace("/", "-")}-${path.basename(subpath)}`); +} +function generateBinPath() { + if (isValidBinaryPath(ESBUILD_BINARY_PATH)) { + if (!fs.existsSync(ESBUILD_BINARY_PATH)) { + console.warn(`[esbuild] Ignoring bad configuration: ESBUILD_BINARY_PATH=${ESBUILD_BINARY_PATH}`); + } else { + return { binPath: ESBUILD_BINARY_PATH, isWASM: false }; + } + } + const { pkg, subpath, isWASM: isWASM2 } = pkgAndSubpathForCurrentPlatform(); + let binPath2; + try { + binPath2 = require.resolve(`${pkg}/${subpath}`); + } catch (e) { + binPath2 = downloadedBinPath(pkg, subpath); + if (!fs.existsSync(binPath2)) { + try { + require.resolve(pkg); + } catch { + const otherPkg = pkgForSomeOtherPlatform(); + if (otherPkg) { + let suggestions = ` +Specifically the "${otherPkg}" package is present but this platform +needs the "${pkg}" package instead. People often get into this +situation by installing esbuild on Windows or macOS and copying "node_modules" +into a Docker image that runs Linux, or by copying "node_modules" between +Windows and WSL environments. + +If you are installing with npm, you can try not copying the "node_modules" +directory when you copy the files over, and running "npm ci" or "npm install" +on the destination platform after the copy. Or you could consider using yarn +instead of npm which has built-in support for installing a package on multiple +platforms simultaneously. + +If you are installing with yarn, you can try listing both this platform and the +other platform in your ".yarnrc.yml" file using the "supportedArchitectures" +feature: https://yarnpkg.com/configuration/yarnrc/#supportedArchitectures +Keep in mind that this means multiple copies of esbuild will be present. +`; + if (pkg === packageDarwin_x64 && otherPkg === packageDarwin_arm64 || pkg === packageDarwin_arm64 && otherPkg === packageDarwin_x64) { + suggestions = ` +Specifically the "${otherPkg}" package is present but this platform +needs the "${pkg}" package instead. People often get into this +situation by installing esbuild with npm running inside of Rosetta 2 and then +trying to use it with node running outside of Rosetta 2, or vice versa (Rosetta +2 is Apple's on-the-fly x86_64-to-arm64 translation service). + +If you are installing with npm, you can try ensuring that both npm and node are +not running under Rosetta 2 and then reinstalling esbuild. This likely involves +changing how you installed npm and/or node. For example, installing node with +the universal installer here should work: https://nodejs.org/en/download/. Or +you could consider using yarn instead of npm which has built-in support for +installing a package on multiple platforms simultaneously. + +If you are installing with yarn, you can try listing both "arm64" and "x64" +in your ".yarnrc.yml" file using the "supportedArchitectures" feature: +https://yarnpkg.com/configuration/yarnrc/#supportedArchitectures +Keep in mind that this means multiple copies of esbuild will be present. +`; + } + throw new Error(` +You installed esbuild for another platform than the one you're currently using. +This won't work because esbuild is written with native code and needs to +install a platform-specific binary executable. +${suggestions} +Another alternative is to use the "esbuild-wasm" package instead, which works +the same way on all platforms. But it comes with a heavy performance cost and +can sometimes be 10x slower than the "esbuild" package, so you may also not +want to do that. +`); + } + throw new Error(`The package "${pkg}" could not be found, and is needed by esbuild. + +If you are installing esbuild with npm, make sure that you don't specify the +"--no-optional" or "--omit=optional" flags. The "optionalDependencies" feature +of "package.json" is used by esbuild to install the correct binary executable +for your current platform.`); + } + throw e; + } + } + if (/\.zip\//.test(binPath2)) { + let pnpapi; + try { + pnpapi = require("pnpapi"); + } catch (e) { + } + if (pnpapi) { + const root = pnpapi.getPackageInformation(pnpapi.topLevel).packageLocation; + const binTargetPath = path.join( + root, + "node_modules", + ".cache", + "esbuild", + `pnpapi-${pkg.replace("/", "-")}-${"0.21.5"}-${path.basename(subpath)}` + ); + if (!fs.existsSync(binTargetPath)) { + fs.mkdirSync(path.dirname(binTargetPath), { recursive: true }); + fs.copyFileSync(binPath2, binTargetPath); + fs.chmodSync(binTargetPath, 493); + } + return { binPath: binTargetPath, isWASM: isWASM2 }; + } + } + return { binPath: binPath2, isWASM: isWASM2 }; +} + +// lib/npm/node-shim.ts +var { binPath, isWASM } = generateBinPath(); +if (isWASM) { + require("child_process").execFileSync("node", [binPath].concat(process.argv.slice(2)), { stdio: "inherit" }); +} else { + require("child_process").execFileSync(binPath, process.argv.slice(2), { stdio: "inherit" }); +} diff --git a/node_modules/esbuild/install.js b/node_modules/esbuild/install.js new file mode 100644 index 0000000..d97764e --- /dev/null +++ b/node_modules/esbuild/install.js @@ -0,0 +1,285 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); + +// lib/npm/node-platform.ts +var fs = require("fs"); +var os = require("os"); +var path = require("path"); +var ESBUILD_BINARY_PATH = process.env.ESBUILD_BINARY_PATH || ESBUILD_BINARY_PATH; +var isValidBinaryPath = (x) => !!x && x !== "/usr/bin/esbuild"; +var knownWindowsPackages = { + "win32 arm64 LE": "@esbuild/win32-arm64", + "win32 ia32 LE": "@esbuild/win32-ia32", + "win32 x64 LE": "@esbuild/win32-x64" +}; +var knownUnixlikePackages = { + "aix ppc64 BE": "@esbuild/aix-ppc64", + "android arm64 LE": "@esbuild/android-arm64", + "darwin arm64 LE": "@esbuild/darwin-arm64", + "darwin x64 LE": "@esbuild/darwin-x64", + "freebsd arm64 LE": "@esbuild/freebsd-arm64", + "freebsd x64 LE": "@esbuild/freebsd-x64", + "linux arm LE": "@esbuild/linux-arm", + "linux arm64 LE": "@esbuild/linux-arm64", + "linux ia32 LE": "@esbuild/linux-ia32", + "linux mips64el LE": "@esbuild/linux-mips64el", + "linux ppc64 LE": "@esbuild/linux-ppc64", + "linux riscv64 LE": "@esbuild/linux-riscv64", + "linux s390x BE": "@esbuild/linux-s390x", + "linux x64 LE": "@esbuild/linux-x64", + "linux loong64 LE": "@esbuild/linux-loong64", + "netbsd x64 LE": "@esbuild/netbsd-x64", + "openbsd x64 LE": "@esbuild/openbsd-x64", + "sunos x64 LE": "@esbuild/sunos-x64" +}; +var knownWebAssemblyFallbackPackages = { + "android arm LE": "@esbuild/android-arm", + "android x64 LE": "@esbuild/android-x64" +}; +function pkgAndSubpathForCurrentPlatform() { + let pkg; + let subpath; + let isWASM = false; + let platformKey = `${process.platform} ${os.arch()} ${os.endianness()}`; + if (platformKey in knownWindowsPackages) { + pkg = knownWindowsPackages[platformKey]; + subpath = "esbuild.exe"; + } else if (platformKey in knownUnixlikePackages) { + pkg = knownUnixlikePackages[platformKey]; + subpath = "bin/esbuild"; + } else if (platformKey in knownWebAssemblyFallbackPackages) { + pkg = knownWebAssemblyFallbackPackages[platformKey]; + subpath = "bin/esbuild"; + isWASM = true; + } else { + throw new Error(`Unsupported platform: ${platformKey}`); + } + return { pkg, subpath, isWASM }; +} +function downloadedBinPath(pkg, subpath) { + const esbuildLibDir = path.dirname(require.resolve("esbuild")); + return path.join(esbuildLibDir, `downloaded-${pkg.replace("/", "-")}-${path.basename(subpath)}`); +} + +// lib/npm/node-install.ts +var fs2 = require("fs"); +var os2 = require("os"); +var path2 = require("path"); +var zlib = require("zlib"); +var https = require("https"); +var child_process = require("child_process"); +var versionFromPackageJSON = require(path2.join(__dirname, "package.json")).version; +var toPath = path2.join(__dirname, "bin", "esbuild"); +var isToPathJS = true; +function validateBinaryVersion(...command) { + command.push("--version"); + let stdout; + try { + stdout = child_process.execFileSync(command.shift(), command, { + // Without this, this install script strangely crashes with the error + // "EACCES: permission denied, write" but only on Ubuntu Linux when node is + // installed from the Snap Store. This is not a problem when you download + // the official version of node. The problem appears to be that stderr + // (i.e. file descriptor 2) isn't writable? + // + // More info: + // - https://snapcraft.io/ (what the Snap Store is) + // - https://nodejs.org/dist/ (download the official version of node) + // - https://github.com/evanw/esbuild/issues/1711#issuecomment-1027554035 + // + stdio: "pipe" + }).toString().trim(); + } catch (err) { + if (os2.platform() === "darwin" && /_SecTrustEvaluateWithError/.test(err + "")) { + let os3 = "this version of macOS"; + try { + os3 = "macOS " + child_process.execFileSync("sw_vers", ["-productVersion"]).toString().trim(); + } catch { + } + throw new Error(`The "esbuild" package cannot be installed because ${os3} is too outdated. + +The Go compiler (which esbuild relies on) no longer supports ${os3}, +which means the "esbuild" binary executable can't be run. You can either: + + * Update your version of macOS to one that the Go compiler supports + * Use the "esbuild-wasm" package instead of the "esbuild" package + * Build esbuild yourself using an older version of the Go compiler +`); + } + throw err; + } + if (stdout !== versionFromPackageJSON) { + throw new Error(`Expected ${JSON.stringify(versionFromPackageJSON)} but got ${JSON.stringify(stdout)}`); + } +} +function isYarn() { + const { npm_config_user_agent } = process.env; + if (npm_config_user_agent) { + return /\byarn\//.test(npm_config_user_agent); + } + return false; +} +function fetch(url) { + return new Promise((resolve, reject) => { + https.get(url, (res) => { + if ((res.statusCode === 301 || res.statusCode === 302) && res.headers.location) + return fetch(res.headers.location).then(resolve, reject); + if (res.statusCode !== 200) + return reject(new Error(`Server responded with ${res.statusCode}`)); + let chunks = []; + res.on("data", (chunk) => chunks.push(chunk)); + res.on("end", () => resolve(Buffer.concat(chunks))); + }).on("error", reject); + }); +} +function extractFileFromTarGzip(buffer, subpath) { + try { + buffer = zlib.unzipSync(buffer); + } catch (err) { + throw new Error(`Invalid gzip data in archive: ${err && err.message || err}`); + } + let str = (i, n) => String.fromCharCode(...buffer.subarray(i, i + n)).replace(/\0.*$/, ""); + let offset = 0; + subpath = `package/${subpath}`; + while (offset < buffer.length) { + let name = str(offset, 100); + let size = parseInt(str(offset + 124, 12), 8); + offset += 512; + if (!isNaN(size)) { + if (name === subpath) return buffer.subarray(offset, offset + size); + offset += size + 511 & ~511; + } + } + throw new Error(`Could not find ${JSON.stringify(subpath)} in archive`); +} +function installUsingNPM(pkg, subpath, binPath) { + const env = { ...process.env, npm_config_global: void 0 }; + const esbuildLibDir = path2.dirname(require.resolve("esbuild")); + const installDir = path2.join(esbuildLibDir, "npm-install"); + fs2.mkdirSync(installDir); + try { + fs2.writeFileSync(path2.join(installDir, "package.json"), "{}"); + child_process.execSync( + `npm install --loglevel=error --prefer-offline --no-audit --progress=false ${pkg}@${versionFromPackageJSON}`, + { cwd: installDir, stdio: "pipe", env } + ); + const installedBinPath = path2.join(installDir, "node_modules", pkg, subpath); + fs2.renameSync(installedBinPath, binPath); + } finally { + try { + removeRecursive(installDir); + } catch { + } + } +} +function removeRecursive(dir) { + for (const entry of fs2.readdirSync(dir)) { + const entryPath = path2.join(dir, entry); + let stats; + try { + stats = fs2.lstatSync(entryPath); + } catch { + continue; + } + if (stats.isDirectory()) removeRecursive(entryPath); + else fs2.unlinkSync(entryPath); + } + fs2.rmdirSync(dir); +} +function applyManualBinaryPathOverride(overridePath) { + const pathString = JSON.stringify(overridePath); + fs2.writeFileSync(toPath, `#!/usr/bin/env node +require('child_process').execFileSync(${pathString}, process.argv.slice(2), { stdio: 'inherit' }); +`); + const libMain = path2.join(__dirname, "lib", "main.js"); + const code = fs2.readFileSync(libMain, "utf8"); + fs2.writeFileSync(libMain, `var ESBUILD_BINARY_PATH = ${pathString}; +${code}`); +} +function maybeOptimizePackage(binPath) { + if (os2.platform() !== "win32" && !isYarn()) { + const tempPath = path2.join(__dirname, "bin-esbuild"); + try { + fs2.linkSync(binPath, tempPath); + fs2.renameSync(tempPath, toPath); + isToPathJS = false; + fs2.unlinkSync(tempPath); + } catch { + } + } +} +async function downloadDirectlyFromNPM(pkg, subpath, binPath) { + const url = `https://registry.npmjs.org/${pkg}/-/${pkg.replace("@esbuild/", "")}-${versionFromPackageJSON}.tgz`; + console.error(`[esbuild] Trying to download ${JSON.stringify(url)}`); + try { + fs2.writeFileSync(binPath, extractFileFromTarGzip(await fetch(url), subpath)); + fs2.chmodSync(binPath, 493); + } catch (e) { + console.error(`[esbuild] Failed to download ${JSON.stringify(url)}: ${e && e.message || e}`); + throw e; + } +} +async function checkAndPreparePackage() { + if (isValidBinaryPath(ESBUILD_BINARY_PATH)) { + if (!fs2.existsSync(ESBUILD_BINARY_PATH)) { + console.warn(`[esbuild] Ignoring bad configuration: ESBUILD_BINARY_PATH=${ESBUILD_BINARY_PATH}`); + } else { + applyManualBinaryPathOverride(ESBUILD_BINARY_PATH); + return; + } + } + const { pkg, subpath } = pkgAndSubpathForCurrentPlatform(); + let binPath; + try { + binPath = require.resolve(`${pkg}/${subpath}`); + } catch (e) { + console.error(`[esbuild] Failed to find package "${pkg}" on the file system + +This can happen if you use the "--no-optional" flag. The "optionalDependencies" +package.json feature is used by esbuild to install the correct binary executable +for your current platform. This install script will now attempt to work around +this. If that fails, you need to remove the "--no-optional" flag to use esbuild. +`); + binPath = downloadedBinPath(pkg, subpath); + try { + console.error(`[esbuild] Trying to install package "${pkg}" using npm`); + installUsingNPM(pkg, subpath, binPath); + } catch (e2) { + console.error(`[esbuild] Failed to install package "${pkg}" using npm: ${e2 && e2.message || e2}`); + try { + await downloadDirectlyFromNPM(pkg, subpath, binPath); + } catch (e3) { + throw new Error(`Failed to install package "${pkg}"`); + } + } + } + maybeOptimizePackage(binPath); +} +checkAndPreparePackage().then(() => { + if (isToPathJS) { + validateBinaryVersion(process.execPath, toPath); + } else { + validateBinaryVersion(toPath); + } +}); diff --git a/node_modules/esbuild/lib/main.d.ts b/node_modules/esbuild/lib/main.d.ts new file mode 100644 index 0000000..d5c6ac9 --- /dev/null +++ b/node_modules/esbuild/lib/main.d.ts @@ -0,0 +1,705 @@ +export type Platform = 'browser' | 'node' | 'neutral' +export type Format = 'iife' | 'cjs' | 'esm' +export type Loader = 'base64' | 'binary' | 'copy' | 'css' | 'dataurl' | 'default' | 'empty' | 'file' | 'js' | 'json' | 'jsx' | 'local-css' | 'text' | 'ts' | 'tsx' +export type LogLevel = 'verbose' | 'debug' | 'info' | 'warning' | 'error' | 'silent' +export type Charset = 'ascii' | 'utf8' +export type Drop = 'console' | 'debugger' + +interface CommonOptions { + /** Documentation: https://esbuild.github.io/api/#sourcemap */ + sourcemap?: boolean | 'linked' | 'inline' | 'external' | 'both' + /** Documentation: https://esbuild.github.io/api/#legal-comments */ + legalComments?: 'none' | 'inline' | 'eof' | 'linked' | 'external' + /** Documentation: https://esbuild.github.io/api/#source-root */ + sourceRoot?: string + /** Documentation: https://esbuild.github.io/api/#sources-content */ + sourcesContent?: boolean + + /** Documentation: https://esbuild.github.io/api/#format */ + format?: Format + /** Documentation: https://esbuild.github.io/api/#global-name */ + globalName?: string + /** Documentation: https://esbuild.github.io/api/#target */ + target?: string | string[] + /** Documentation: https://esbuild.github.io/api/#supported */ + supported?: Record + /** Documentation: https://esbuild.github.io/api/#platform */ + platform?: Platform + + /** Documentation: https://esbuild.github.io/api/#mangle-props */ + mangleProps?: RegExp + /** Documentation: https://esbuild.github.io/api/#mangle-props */ + reserveProps?: RegExp + /** Documentation: https://esbuild.github.io/api/#mangle-props */ + mangleQuoted?: boolean + /** Documentation: https://esbuild.github.io/api/#mangle-props */ + mangleCache?: Record + /** Documentation: https://esbuild.github.io/api/#drop */ + drop?: Drop[] + /** Documentation: https://esbuild.github.io/api/#drop-labels */ + dropLabels?: string[] + /** Documentation: https://esbuild.github.io/api/#minify */ + minify?: boolean + /** Documentation: https://esbuild.github.io/api/#minify */ + minifyWhitespace?: boolean + /** Documentation: https://esbuild.github.io/api/#minify */ + minifyIdentifiers?: boolean + /** Documentation: https://esbuild.github.io/api/#minify */ + minifySyntax?: boolean + /** Documentation: https://esbuild.github.io/api/#line-limit */ + lineLimit?: number + /** Documentation: https://esbuild.github.io/api/#charset */ + charset?: Charset + /** Documentation: https://esbuild.github.io/api/#tree-shaking */ + treeShaking?: boolean + /** Documentation: https://esbuild.github.io/api/#ignore-annotations */ + ignoreAnnotations?: boolean + + /** Documentation: https://esbuild.github.io/api/#jsx */ + jsx?: 'transform' | 'preserve' | 'automatic' + /** Documentation: https://esbuild.github.io/api/#jsx-factory */ + jsxFactory?: string + /** Documentation: https://esbuild.github.io/api/#jsx-fragment */ + jsxFragment?: string + /** Documentation: https://esbuild.github.io/api/#jsx-import-source */ + jsxImportSource?: string + /** Documentation: https://esbuild.github.io/api/#jsx-development */ + jsxDev?: boolean + /** Documentation: https://esbuild.github.io/api/#jsx-side-effects */ + jsxSideEffects?: boolean + + /** Documentation: https://esbuild.github.io/api/#define */ + define?: { [key: string]: string } + /** Documentation: https://esbuild.github.io/api/#pure */ + pure?: string[] + /** Documentation: https://esbuild.github.io/api/#keep-names */ + keepNames?: boolean + + /** Documentation: https://esbuild.github.io/api/#color */ + color?: boolean + /** Documentation: https://esbuild.github.io/api/#log-level */ + logLevel?: LogLevel + /** Documentation: https://esbuild.github.io/api/#log-limit */ + logLimit?: number + /** Documentation: https://esbuild.github.io/api/#log-override */ + logOverride?: Record + + /** Documentation: https://esbuild.github.io/api/#tsconfig-raw */ + tsconfigRaw?: string | TsconfigRaw +} + +export interface TsconfigRaw { + compilerOptions?: { + alwaysStrict?: boolean + baseUrl?: string + experimentalDecorators?: boolean + importsNotUsedAsValues?: 'remove' | 'preserve' | 'error' + jsx?: 'preserve' | 'react-native' | 'react' | 'react-jsx' | 'react-jsxdev' + jsxFactory?: string + jsxFragmentFactory?: string + jsxImportSource?: string + paths?: Record + preserveValueImports?: boolean + strict?: boolean + target?: string + useDefineForClassFields?: boolean + verbatimModuleSyntax?: boolean + } +} + +export interface BuildOptions extends CommonOptions { + /** Documentation: https://esbuild.github.io/api/#bundle */ + bundle?: boolean + /** Documentation: https://esbuild.github.io/api/#splitting */ + splitting?: boolean + /** Documentation: https://esbuild.github.io/api/#preserve-symlinks */ + preserveSymlinks?: boolean + /** Documentation: https://esbuild.github.io/api/#outfile */ + outfile?: string + /** Documentation: https://esbuild.github.io/api/#metafile */ + metafile?: boolean + /** Documentation: https://esbuild.github.io/api/#outdir */ + outdir?: string + /** Documentation: https://esbuild.github.io/api/#outbase */ + outbase?: string + /** Documentation: https://esbuild.github.io/api/#external */ + external?: string[] + /** Documentation: https://esbuild.github.io/api/#packages */ + packages?: 'external' + /** Documentation: https://esbuild.github.io/api/#alias */ + alias?: Record + /** Documentation: https://esbuild.github.io/api/#loader */ + loader?: { [ext: string]: Loader } + /** Documentation: https://esbuild.github.io/api/#resolve-extensions */ + resolveExtensions?: string[] + /** Documentation: https://esbuild.github.io/api/#main-fields */ + mainFields?: string[] + /** Documentation: https://esbuild.github.io/api/#conditions */ + conditions?: string[] + /** Documentation: https://esbuild.github.io/api/#write */ + write?: boolean + /** Documentation: https://esbuild.github.io/api/#allow-overwrite */ + allowOverwrite?: boolean + /** Documentation: https://esbuild.github.io/api/#tsconfig */ + tsconfig?: string + /** Documentation: https://esbuild.github.io/api/#out-extension */ + outExtension?: { [ext: string]: string } + /** Documentation: https://esbuild.github.io/api/#public-path */ + publicPath?: string + /** Documentation: https://esbuild.github.io/api/#entry-names */ + entryNames?: string + /** Documentation: https://esbuild.github.io/api/#chunk-names */ + chunkNames?: string + /** Documentation: https://esbuild.github.io/api/#asset-names */ + assetNames?: string + /** Documentation: https://esbuild.github.io/api/#inject */ + inject?: string[] + /** Documentation: https://esbuild.github.io/api/#banner */ + banner?: { [type: string]: string } + /** Documentation: https://esbuild.github.io/api/#footer */ + footer?: { [type: string]: string } + /** Documentation: https://esbuild.github.io/api/#entry-points */ + entryPoints?: string[] | Record | { in: string, out: string }[] + /** Documentation: https://esbuild.github.io/api/#stdin */ + stdin?: StdinOptions + /** Documentation: https://esbuild.github.io/plugins/ */ + plugins?: Plugin[] + /** Documentation: https://esbuild.github.io/api/#working-directory */ + absWorkingDir?: string + /** Documentation: https://esbuild.github.io/api/#node-paths */ + nodePaths?: string[]; // The "NODE_PATH" variable from Node.js +} + +export interface StdinOptions { + contents: string | Uint8Array + resolveDir?: string + sourcefile?: string + loader?: Loader +} + +export interface Message { + id: string + pluginName: string + text: string + location: Location | null + notes: Note[] + + /** + * Optional user-specified data that is passed through unmodified. You can + * use this to stash the original error, for example. + */ + detail: any +} + +export interface Note { + text: string + location: Location | null +} + +export interface Location { + file: string + namespace: string + /** 1-based */ + line: number + /** 0-based, in bytes */ + column: number + /** in bytes */ + length: number + lineText: string + suggestion: string +} + +export interface OutputFile { + path: string + contents: Uint8Array + hash: string + /** "contents" as text (changes automatically with "contents") */ + readonly text: string +} + +export interface BuildResult { + errors: Message[] + warnings: Message[] + /** Only when "write: false" */ + outputFiles: OutputFile[] | (ProvidedOptions['write'] extends false ? never : undefined) + /** Only when "metafile: true" */ + metafile: Metafile | (ProvidedOptions['metafile'] extends true ? never : undefined) + /** Only when "mangleCache" is present */ + mangleCache: Record | (ProvidedOptions['mangleCache'] extends Object ? never : undefined) +} + +export interface BuildFailure extends Error { + errors: Message[] + warnings: Message[] +} + +/** Documentation: https://esbuild.github.io/api/#serve-arguments */ +export interface ServeOptions { + port?: number + host?: string + servedir?: string + keyfile?: string + certfile?: string + fallback?: string + onRequest?: (args: ServeOnRequestArgs) => void +} + +export interface ServeOnRequestArgs { + remoteAddress: string + method: string + path: string + status: number + /** The time to generate the response, not to send it */ + timeInMS: number +} + +/** Documentation: https://esbuild.github.io/api/#serve-return-values */ +export interface ServeResult { + port: number + host: string +} + +export interface TransformOptions extends CommonOptions { + /** Documentation: https://esbuild.github.io/api/#sourcefile */ + sourcefile?: string + /** Documentation: https://esbuild.github.io/api/#loader */ + loader?: Loader + /** Documentation: https://esbuild.github.io/api/#banner */ + banner?: string + /** Documentation: https://esbuild.github.io/api/#footer */ + footer?: string +} + +export interface TransformResult { + code: string + map: string + warnings: Message[] + /** Only when "mangleCache" is present */ + mangleCache: Record | (ProvidedOptions['mangleCache'] extends Object ? never : undefined) + /** Only when "legalComments" is "external" */ + legalComments: string | (ProvidedOptions['legalComments'] extends 'external' ? never : undefined) +} + +export interface TransformFailure extends Error { + errors: Message[] + warnings: Message[] +} + +export interface Plugin { + name: string + setup: (build: PluginBuild) => (void | Promise) +} + +export interface PluginBuild { + /** Documentation: https://esbuild.github.io/plugins/#build-options */ + initialOptions: BuildOptions + + /** Documentation: https://esbuild.github.io/plugins/#resolve */ + resolve(path: string, options?: ResolveOptions): Promise + + /** Documentation: https://esbuild.github.io/plugins/#on-start */ + onStart(callback: () => + (OnStartResult | null | void | Promise)): void + + /** Documentation: https://esbuild.github.io/plugins/#on-end */ + onEnd(callback: (result: BuildResult) => + (OnEndResult | null | void | Promise)): void + + /** Documentation: https://esbuild.github.io/plugins/#on-resolve */ + onResolve(options: OnResolveOptions, callback: (args: OnResolveArgs) => + (OnResolveResult | null | undefined | Promise)): void + + /** Documentation: https://esbuild.github.io/plugins/#on-load */ + onLoad(options: OnLoadOptions, callback: (args: OnLoadArgs) => + (OnLoadResult | null | undefined | Promise)): void + + /** Documentation: https://esbuild.github.io/plugins/#on-dispose */ + onDispose(callback: () => void): void + + // This is a full copy of the esbuild library in case you need it + esbuild: { + context: typeof context, + build: typeof build, + buildSync: typeof buildSync, + transform: typeof transform, + transformSync: typeof transformSync, + formatMessages: typeof formatMessages, + formatMessagesSync: typeof formatMessagesSync, + analyzeMetafile: typeof analyzeMetafile, + analyzeMetafileSync: typeof analyzeMetafileSync, + initialize: typeof initialize, + version: typeof version, + } +} + +/** Documentation: https://esbuild.github.io/plugins/#resolve-options */ +export interface ResolveOptions { + pluginName?: string + importer?: string + namespace?: string + resolveDir?: string + kind?: ImportKind + pluginData?: any + with?: Record +} + +/** Documentation: https://esbuild.github.io/plugins/#resolve-results */ +export interface ResolveResult { + errors: Message[] + warnings: Message[] + + path: string + external: boolean + sideEffects: boolean + namespace: string + suffix: string + pluginData: any +} + +export interface OnStartResult { + errors?: PartialMessage[] + warnings?: PartialMessage[] +} + +export interface OnEndResult { + errors?: PartialMessage[] + warnings?: PartialMessage[] +} + +/** Documentation: https://esbuild.github.io/plugins/#on-resolve-options */ +export interface OnResolveOptions { + filter: RegExp + namespace?: string +} + +/** Documentation: https://esbuild.github.io/plugins/#on-resolve-arguments */ +export interface OnResolveArgs { + path: string + importer: string + namespace: string + resolveDir: string + kind: ImportKind + pluginData: any + with: Record +} + +export type ImportKind = + | 'entry-point' + + // JS + | 'import-statement' + | 'require-call' + | 'dynamic-import' + | 'require-resolve' + + // CSS + | 'import-rule' + | 'composes-from' + | 'url-token' + +/** Documentation: https://esbuild.github.io/plugins/#on-resolve-results */ +export interface OnResolveResult { + pluginName?: string + + errors?: PartialMessage[] + warnings?: PartialMessage[] + + path?: string + external?: boolean + sideEffects?: boolean + namespace?: string + suffix?: string + pluginData?: any + + watchFiles?: string[] + watchDirs?: string[] +} + +/** Documentation: https://esbuild.github.io/plugins/#on-load-options */ +export interface OnLoadOptions { + filter: RegExp + namespace?: string +} + +/** Documentation: https://esbuild.github.io/plugins/#on-load-arguments */ +export interface OnLoadArgs { + path: string + namespace: string + suffix: string + pluginData: any + with: Record +} + +/** Documentation: https://esbuild.github.io/plugins/#on-load-results */ +export interface OnLoadResult { + pluginName?: string + + errors?: PartialMessage[] + warnings?: PartialMessage[] + + contents?: string | Uint8Array + resolveDir?: string + loader?: Loader + pluginData?: any + + watchFiles?: string[] + watchDirs?: string[] +} + +export interface PartialMessage { + id?: string + pluginName?: string + text?: string + location?: Partial | null + notes?: PartialNote[] + detail?: any +} + +export interface PartialNote { + text?: string + location?: Partial | null +} + +/** Documentation: https://esbuild.github.io/api/#metafile */ +export interface Metafile { + inputs: { + [path: string]: { + bytes: number + imports: { + path: string + kind: ImportKind + external?: boolean + original?: string + with?: Record + }[] + format?: 'cjs' | 'esm' + with?: Record + } + } + outputs: { + [path: string]: { + bytes: number + inputs: { + [path: string]: { + bytesInOutput: number + } + } + imports: { + path: string + kind: ImportKind | 'file-loader' + external?: boolean + }[] + exports: string[] + entryPoint?: string + cssBundle?: string + } + } +} + +export interface FormatMessagesOptions { + kind: 'error' | 'warning' + color?: boolean + terminalWidth?: number +} + +export interface AnalyzeMetafileOptions { + color?: boolean + verbose?: boolean +} + +export interface WatchOptions { +} + +export interface BuildContext { + /** Documentation: https://esbuild.github.io/api/#rebuild */ + rebuild(): Promise> + + /** Documentation: https://esbuild.github.io/api/#watch */ + watch(options?: WatchOptions): Promise + + /** Documentation: https://esbuild.github.io/api/#serve */ + serve(options?: ServeOptions): Promise + + cancel(): Promise + dispose(): Promise +} + +// This is a TypeScript type-level function which replaces any keys in "In" +// that aren't in "Out" with "never". We use this to reject properties with +// typos in object literals. See: https://stackoverflow.com/questions/49580725 +type SameShape = In & { [Key in Exclude]: never } + +/** + * This function invokes the "esbuild" command-line tool for you. It returns a + * promise that either resolves with a "BuildResult" object or rejects with a + * "BuildFailure" object. + * + * - Works in node: yes + * - Works in browser: yes + * + * Documentation: https://esbuild.github.io/api/#build + */ +export declare function build(options: SameShape): Promise> + +/** + * This is the advanced long-running form of "build" that supports additional + * features such as watch mode and a local development server. + * + * - Works in node: yes + * - Works in browser: no + * + * Documentation: https://esbuild.github.io/api/#build + */ +export declare function context(options: SameShape): Promise> + +/** + * This function transforms a single JavaScript file. It can be used to minify + * JavaScript, convert TypeScript/JSX to JavaScript, or convert newer JavaScript + * to older JavaScript. It returns a promise that is either resolved with a + * "TransformResult" object or rejected with a "TransformFailure" object. + * + * - Works in node: yes + * - Works in browser: yes + * + * Documentation: https://esbuild.github.io/api/#transform + */ +export declare function transform(input: string | Uint8Array, options?: SameShape): Promise> + +/** + * Converts log messages to formatted message strings suitable for printing in + * the terminal. This allows you to reuse the built-in behavior of esbuild's + * log message formatter. This is a batch-oriented API for efficiency. + * + * - Works in node: yes + * - Works in browser: yes + */ +export declare function formatMessages(messages: PartialMessage[], options: FormatMessagesOptions): Promise + +/** + * Pretty-prints an analysis of the metafile JSON to a string. This is just for + * convenience to be able to match esbuild's pretty-printing exactly. If you want + * to customize it, you can just inspect the data in the metafile yourself. + * + * - Works in node: yes + * - Works in browser: yes + * + * Documentation: https://esbuild.github.io/api/#analyze + */ +export declare function analyzeMetafile(metafile: Metafile | string, options?: AnalyzeMetafileOptions): Promise + +/** + * A synchronous version of "build". + * + * - Works in node: yes + * - Works in browser: no + * + * Documentation: https://esbuild.github.io/api/#build + */ +export declare function buildSync(options: SameShape): BuildResult + +/** + * A synchronous version of "transform". + * + * - Works in node: yes + * - Works in browser: no + * + * Documentation: https://esbuild.github.io/api/#transform + */ +export declare function transformSync(input: string | Uint8Array, options?: SameShape): TransformResult + +/** + * A synchronous version of "formatMessages". + * + * - Works in node: yes + * - Works in browser: no + */ +export declare function formatMessagesSync(messages: PartialMessage[], options: FormatMessagesOptions): string[] + +/** + * A synchronous version of "analyzeMetafile". + * + * - Works in node: yes + * - Works in browser: no + * + * Documentation: https://esbuild.github.io/api/#analyze + */ +export declare function analyzeMetafileSync(metafile: Metafile | string, options?: AnalyzeMetafileOptions): string + +/** + * This configures the browser-based version of esbuild. It is necessary to + * call this first and wait for the returned promise to be resolved before + * making other API calls when using esbuild in the browser. + * + * - Works in node: yes + * - Works in browser: yes ("options" is required) + * + * Documentation: https://esbuild.github.io/api/#browser + */ +export declare function initialize(options: InitializeOptions): Promise + +export interface InitializeOptions { + /** + * The URL of the "esbuild.wasm" file. This must be provided when running + * esbuild in the browser. + */ + wasmURL?: string | URL + + /** + * The result of calling "new WebAssembly.Module(buffer)" where "buffer" + * is a typed array or ArrayBuffer containing the binary code of the + * "esbuild.wasm" file. + * + * You can use this as an alternative to "wasmURL" for environments where it's + * not possible to download the WebAssembly module. + */ + wasmModule?: WebAssembly.Module + + /** + * By default esbuild runs the WebAssembly-based browser API in a web worker + * to avoid blocking the UI thread. This can be disabled by setting "worker" + * to false. + */ + worker?: boolean +} + +export let version: string + +// Call this function to terminate esbuild's child process. The child process +// is not terminated and re-created after each API call because it's more +// efficient to keep it around when there are multiple API calls. +// +// In node this happens automatically before the parent node process exits. So +// you only need to call this if you know you will not make any more esbuild +// API calls and you want to clean up resources. +// +// Unlike node, Deno lacks the necessary APIs to clean up child processes +// automatically. You must manually call stop() in Deno when you're done +// using esbuild or Deno will continue running forever. +// +// Another reason you might want to call this is if you are using esbuild from +// within a Deno test. Deno fails tests that create a child process without +// killing it before the test ends, so you have to call this function (and +// await the returned promise) in every Deno test that uses esbuild. +export declare function stop(): Promise + +// Note: These declarations exist to avoid type errors when you omit "dom" from +// "lib" in your "tsconfig.json" file. TypeScript confusingly declares the +// global "WebAssembly" type in "lib.dom.d.ts" even though it has nothing to do +// with the browser DOM and is present in many non-browser JavaScript runtimes +// (e.g. node and deno). Declaring it here allows esbuild's API to be used in +// these scenarios. +// +// There's an open issue about getting this problem corrected (although these +// declarations will need to remain even if this is fixed for backward +// compatibility with older TypeScript versions): +// +// https://github.com/microsoft/TypeScript-DOM-lib-generator/issues/826 +// +declare global { + namespace WebAssembly { + interface Module { + } + } + interface URL { + } +} diff --git a/node_modules/esbuild/lib/main.js b/node_modules/esbuild/lib/main.js new file mode 100644 index 0000000..555613c --- /dev/null +++ b/node_modules/esbuild/lib/main.js @@ -0,0 +1,2239 @@ +"use strict"; +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// lib/npm/node.ts +var node_exports = {}; +__export(node_exports, { + analyzeMetafile: () => analyzeMetafile, + analyzeMetafileSync: () => analyzeMetafileSync, + build: () => build, + buildSync: () => buildSync, + context: () => context, + default: () => node_default, + formatMessages: () => formatMessages, + formatMessagesSync: () => formatMessagesSync, + initialize: () => initialize, + stop: () => stop, + transform: () => transform, + transformSync: () => transformSync, + version: () => version +}); +module.exports = __toCommonJS(node_exports); + +// lib/shared/stdio_protocol.ts +function encodePacket(packet) { + let visit = (value) => { + if (value === null) { + bb.write8(0); + } else if (typeof value === "boolean") { + bb.write8(1); + bb.write8(+value); + } else if (typeof value === "number") { + bb.write8(2); + bb.write32(value | 0); + } else if (typeof value === "string") { + bb.write8(3); + bb.write(encodeUTF8(value)); + } else if (value instanceof Uint8Array) { + bb.write8(4); + bb.write(value); + } else if (value instanceof Array) { + bb.write8(5); + bb.write32(value.length); + for (let item of value) { + visit(item); + } + } else { + let keys = Object.keys(value); + bb.write8(6); + bb.write32(keys.length); + for (let key of keys) { + bb.write(encodeUTF8(key)); + visit(value[key]); + } + } + }; + let bb = new ByteBuffer(); + bb.write32(0); + bb.write32(packet.id << 1 | +!packet.isRequest); + visit(packet.value); + writeUInt32LE(bb.buf, bb.len - 4, 0); + return bb.buf.subarray(0, bb.len); +} +function decodePacket(bytes) { + let visit = () => { + switch (bb.read8()) { + case 0: + return null; + case 1: + return !!bb.read8(); + case 2: + return bb.read32(); + case 3: + return decodeUTF8(bb.read()); + case 4: + return bb.read(); + case 5: { + let count = bb.read32(); + let value2 = []; + for (let i = 0; i < count; i++) { + value2.push(visit()); + } + return value2; + } + case 6: { + let count = bb.read32(); + let value2 = {}; + for (let i = 0; i < count; i++) { + value2[decodeUTF8(bb.read())] = visit(); + } + return value2; + } + default: + throw new Error("Invalid packet"); + } + }; + let bb = new ByteBuffer(bytes); + let id = bb.read32(); + let isRequest = (id & 1) === 0; + id >>>= 1; + let value = visit(); + if (bb.ptr !== bytes.length) { + throw new Error("Invalid packet"); + } + return { id, isRequest, value }; +} +var ByteBuffer = class { + constructor(buf = new Uint8Array(1024)) { + this.buf = buf; + this.len = 0; + this.ptr = 0; + } + _write(delta) { + if (this.len + delta > this.buf.length) { + let clone = new Uint8Array((this.len + delta) * 2); + clone.set(this.buf); + this.buf = clone; + } + this.len += delta; + return this.len - delta; + } + write8(value) { + let offset = this._write(1); + this.buf[offset] = value; + } + write32(value) { + let offset = this._write(4); + writeUInt32LE(this.buf, value, offset); + } + write(bytes) { + let offset = this._write(4 + bytes.length); + writeUInt32LE(this.buf, bytes.length, offset); + this.buf.set(bytes, offset + 4); + } + _read(delta) { + if (this.ptr + delta > this.buf.length) { + throw new Error("Invalid packet"); + } + this.ptr += delta; + return this.ptr - delta; + } + read8() { + return this.buf[this._read(1)]; + } + read32() { + return readUInt32LE(this.buf, this._read(4)); + } + read() { + let length = this.read32(); + let bytes = new Uint8Array(length); + let ptr = this._read(bytes.length); + bytes.set(this.buf.subarray(ptr, ptr + length)); + return bytes; + } +}; +var encodeUTF8; +var decodeUTF8; +var encodeInvariant; +if (typeof TextEncoder !== "undefined" && typeof TextDecoder !== "undefined") { + let encoder = new TextEncoder(); + let decoder = new TextDecoder(); + encodeUTF8 = (text) => encoder.encode(text); + decodeUTF8 = (bytes) => decoder.decode(bytes); + encodeInvariant = 'new TextEncoder().encode("")'; +} else if (typeof Buffer !== "undefined") { + encodeUTF8 = (text) => Buffer.from(text); + decodeUTF8 = (bytes) => { + let { buffer, byteOffset, byteLength } = bytes; + return Buffer.from(buffer, byteOffset, byteLength).toString(); + }; + encodeInvariant = 'Buffer.from("")'; +} else { + throw new Error("No UTF-8 codec found"); +} +if (!(encodeUTF8("") instanceof Uint8Array)) + throw new Error(`Invariant violation: "${encodeInvariant} instanceof Uint8Array" is incorrectly false + +This indicates that your JavaScript environment is broken. You cannot use +esbuild in this environment because esbuild relies on this invariant. This +is not a problem with esbuild. You need to fix your environment instead. +`); +function readUInt32LE(buffer, offset) { + return buffer[offset++] | buffer[offset++] << 8 | buffer[offset++] << 16 | buffer[offset++] << 24; +} +function writeUInt32LE(buffer, value, offset) { + buffer[offset++] = value; + buffer[offset++] = value >> 8; + buffer[offset++] = value >> 16; + buffer[offset++] = value >> 24; +} + +// lib/shared/common.ts +var quote = JSON.stringify; +var buildLogLevelDefault = "warning"; +var transformLogLevelDefault = "silent"; +function validateTarget(target) { + validateStringValue(target, "target"); + if (target.indexOf(",") >= 0) throw new Error(`Invalid target: ${target}`); + return target; +} +var canBeAnything = () => null; +var mustBeBoolean = (value) => typeof value === "boolean" ? null : "a boolean"; +var mustBeString = (value) => typeof value === "string" ? null : "a string"; +var mustBeRegExp = (value) => value instanceof RegExp ? null : "a RegExp object"; +var mustBeInteger = (value) => typeof value === "number" && value === (value | 0) ? null : "an integer"; +var mustBeFunction = (value) => typeof value === "function" ? null : "a function"; +var mustBeArray = (value) => Array.isArray(value) ? null : "an array"; +var mustBeObject = (value) => typeof value === "object" && value !== null && !Array.isArray(value) ? null : "an object"; +var mustBeEntryPoints = (value) => typeof value === "object" && value !== null ? null : "an array or an object"; +var mustBeWebAssemblyModule = (value) => value instanceof WebAssembly.Module ? null : "a WebAssembly.Module"; +var mustBeObjectOrNull = (value) => typeof value === "object" && !Array.isArray(value) ? null : "an object or null"; +var mustBeStringOrBoolean = (value) => typeof value === "string" || typeof value === "boolean" ? null : "a string or a boolean"; +var mustBeStringOrObject = (value) => typeof value === "string" || typeof value === "object" && value !== null && !Array.isArray(value) ? null : "a string or an object"; +var mustBeStringOrArray = (value) => typeof value === "string" || Array.isArray(value) ? null : "a string or an array"; +var mustBeStringOrUint8Array = (value) => typeof value === "string" || value instanceof Uint8Array ? null : "a string or a Uint8Array"; +var mustBeStringOrURL = (value) => typeof value === "string" || value instanceof URL ? null : "a string or a URL"; +function getFlag(object, keys, key, mustBeFn) { + let value = object[key]; + keys[key + ""] = true; + if (value === void 0) return void 0; + let mustBe = mustBeFn(value); + if (mustBe !== null) throw new Error(`${quote(key)} must be ${mustBe}`); + return value; +} +function checkForInvalidFlags(object, keys, where) { + for (let key in object) { + if (!(key in keys)) { + throw new Error(`Invalid option ${where}: ${quote(key)}`); + } + } +} +function validateInitializeOptions(options) { + let keys = /* @__PURE__ */ Object.create(null); + let wasmURL = getFlag(options, keys, "wasmURL", mustBeStringOrURL); + let wasmModule = getFlag(options, keys, "wasmModule", mustBeWebAssemblyModule); + let worker = getFlag(options, keys, "worker", mustBeBoolean); + checkForInvalidFlags(options, keys, "in initialize() call"); + return { + wasmURL, + wasmModule, + worker + }; +} +function validateMangleCache(mangleCache) { + let validated; + if (mangleCache !== void 0) { + validated = /* @__PURE__ */ Object.create(null); + for (let key in mangleCache) { + let value = mangleCache[key]; + if (typeof value === "string" || value === false) { + validated[key] = value; + } else { + throw new Error(`Expected ${quote(key)} in mangle cache to map to either a string or false`); + } + } + } + return validated; +} +function pushLogFlags(flags, options, keys, isTTY2, logLevelDefault) { + let color = getFlag(options, keys, "color", mustBeBoolean); + let logLevel = getFlag(options, keys, "logLevel", mustBeString); + let logLimit = getFlag(options, keys, "logLimit", mustBeInteger); + if (color !== void 0) flags.push(`--color=${color}`); + else if (isTTY2) flags.push(`--color=true`); + flags.push(`--log-level=${logLevel || logLevelDefault}`); + flags.push(`--log-limit=${logLimit || 0}`); +} +function validateStringValue(value, what, key) { + if (typeof value !== "string") { + throw new Error(`Expected value for ${what}${key !== void 0 ? " " + quote(key) : ""} to be a string, got ${typeof value} instead`); + } + return value; +} +function pushCommonFlags(flags, options, keys) { + let legalComments = getFlag(options, keys, "legalComments", mustBeString); + let sourceRoot = getFlag(options, keys, "sourceRoot", mustBeString); + let sourcesContent = getFlag(options, keys, "sourcesContent", mustBeBoolean); + let target = getFlag(options, keys, "target", mustBeStringOrArray); + let format = getFlag(options, keys, "format", mustBeString); + let globalName = getFlag(options, keys, "globalName", mustBeString); + let mangleProps = getFlag(options, keys, "mangleProps", mustBeRegExp); + let reserveProps = getFlag(options, keys, "reserveProps", mustBeRegExp); + let mangleQuoted = getFlag(options, keys, "mangleQuoted", mustBeBoolean); + let minify = getFlag(options, keys, "minify", mustBeBoolean); + let minifySyntax = getFlag(options, keys, "minifySyntax", mustBeBoolean); + let minifyWhitespace = getFlag(options, keys, "minifyWhitespace", mustBeBoolean); + let minifyIdentifiers = getFlag(options, keys, "minifyIdentifiers", mustBeBoolean); + let lineLimit = getFlag(options, keys, "lineLimit", mustBeInteger); + let drop = getFlag(options, keys, "drop", mustBeArray); + let dropLabels = getFlag(options, keys, "dropLabels", mustBeArray); + let charset = getFlag(options, keys, "charset", mustBeString); + let treeShaking = getFlag(options, keys, "treeShaking", mustBeBoolean); + let ignoreAnnotations = getFlag(options, keys, "ignoreAnnotations", mustBeBoolean); + let jsx = getFlag(options, keys, "jsx", mustBeString); + let jsxFactory = getFlag(options, keys, "jsxFactory", mustBeString); + let jsxFragment = getFlag(options, keys, "jsxFragment", mustBeString); + let jsxImportSource = getFlag(options, keys, "jsxImportSource", mustBeString); + let jsxDev = getFlag(options, keys, "jsxDev", mustBeBoolean); + let jsxSideEffects = getFlag(options, keys, "jsxSideEffects", mustBeBoolean); + let define = getFlag(options, keys, "define", mustBeObject); + let logOverride = getFlag(options, keys, "logOverride", mustBeObject); + let supported = getFlag(options, keys, "supported", mustBeObject); + let pure = getFlag(options, keys, "pure", mustBeArray); + let keepNames = getFlag(options, keys, "keepNames", mustBeBoolean); + let platform = getFlag(options, keys, "platform", mustBeString); + let tsconfigRaw = getFlag(options, keys, "tsconfigRaw", mustBeStringOrObject); + if (legalComments) flags.push(`--legal-comments=${legalComments}`); + if (sourceRoot !== void 0) flags.push(`--source-root=${sourceRoot}`); + if (sourcesContent !== void 0) flags.push(`--sources-content=${sourcesContent}`); + if (target) { + if (Array.isArray(target)) flags.push(`--target=${Array.from(target).map(validateTarget).join(",")}`); + else flags.push(`--target=${validateTarget(target)}`); + } + if (format) flags.push(`--format=${format}`); + if (globalName) flags.push(`--global-name=${globalName}`); + if (platform) flags.push(`--platform=${platform}`); + if (tsconfigRaw) flags.push(`--tsconfig-raw=${typeof tsconfigRaw === "string" ? tsconfigRaw : JSON.stringify(tsconfigRaw)}`); + if (minify) flags.push("--minify"); + if (minifySyntax) flags.push("--minify-syntax"); + if (minifyWhitespace) flags.push("--minify-whitespace"); + if (minifyIdentifiers) flags.push("--minify-identifiers"); + if (lineLimit) flags.push(`--line-limit=${lineLimit}`); + if (charset) flags.push(`--charset=${charset}`); + if (treeShaking !== void 0) flags.push(`--tree-shaking=${treeShaking}`); + if (ignoreAnnotations) flags.push(`--ignore-annotations`); + if (drop) for (let what of drop) flags.push(`--drop:${validateStringValue(what, "drop")}`); + if (dropLabels) flags.push(`--drop-labels=${Array.from(dropLabels).map((what) => validateStringValue(what, "dropLabels")).join(",")}`); + if (mangleProps) flags.push(`--mangle-props=${mangleProps.source}`); + if (reserveProps) flags.push(`--reserve-props=${reserveProps.source}`); + if (mangleQuoted !== void 0) flags.push(`--mangle-quoted=${mangleQuoted}`); + if (jsx) flags.push(`--jsx=${jsx}`); + if (jsxFactory) flags.push(`--jsx-factory=${jsxFactory}`); + if (jsxFragment) flags.push(`--jsx-fragment=${jsxFragment}`); + if (jsxImportSource) flags.push(`--jsx-import-source=${jsxImportSource}`); + if (jsxDev) flags.push(`--jsx-dev`); + if (jsxSideEffects) flags.push(`--jsx-side-effects`); + if (define) { + for (let key in define) { + if (key.indexOf("=") >= 0) throw new Error(`Invalid define: ${key}`); + flags.push(`--define:${key}=${validateStringValue(define[key], "define", key)}`); + } + } + if (logOverride) { + for (let key in logOverride) { + if (key.indexOf("=") >= 0) throw new Error(`Invalid log override: ${key}`); + flags.push(`--log-override:${key}=${validateStringValue(logOverride[key], "log override", key)}`); + } + } + if (supported) { + for (let key in supported) { + if (key.indexOf("=") >= 0) throw new Error(`Invalid supported: ${key}`); + const value = supported[key]; + if (typeof value !== "boolean") throw new Error(`Expected value for supported ${quote(key)} to be a boolean, got ${typeof value} instead`); + flags.push(`--supported:${key}=${value}`); + } + } + if (pure) for (let fn of pure) flags.push(`--pure:${validateStringValue(fn, "pure")}`); + if (keepNames) flags.push(`--keep-names`); +} +function flagsForBuildOptions(callName, options, isTTY2, logLevelDefault, writeDefault) { + var _a2; + let flags = []; + let entries = []; + let keys = /* @__PURE__ */ Object.create(null); + let stdinContents = null; + let stdinResolveDir = null; + pushLogFlags(flags, options, keys, isTTY2, logLevelDefault); + pushCommonFlags(flags, options, keys); + let sourcemap = getFlag(options, keys, "sourcemap", mustBeStringOrBoolean); + let bundle = getFlag(options, keys, "bundle", mustBeBoolean); + let splitting = getFlag(options, keys, "splitting", mustBeBoolean); + let preserveSymlinks = getFlag(options, keys, "preserveSymlinks", mustBeBoolean); + let metafile = getFlag(options, keys, "metafile", mustBeBoolean); + let outfile = getFlag(options, keys, "outfile", mustBeString); + let outdir = getFlag(options, keys, "outdir", mustBeString); + let outbase = getFlag(options, keys, "outbase", mustBeString); + let tsconfig = getFlag(options, keys, "tsconfig", mustBeString); + let resolveExtensions = getFlag(options, keys, "resolveExtensions", mustBeArray); + let nodePathsInput = getFlag(options, keys, "nodePaths", mustBeArray); + let mainFields = getFlag(options, keys, "mainFields", mustBeArray); + let conditions = getFlag(options, keys, "conditions", mustBeArray); + let external = getFlag(options, keys, "external", mustBeArray); + let packages = getFlag(options, keys, "packages", mustBeString); + let alias = getFlag(options, keys, "alias", mustBeObject); + let loader = getFlag(options, keys, "loader", mustBeObject); + let outExtension = getFlag(options, keys, "outExtension", mustBeObject); + let publicPath = getFlag(options, keys, "publicPath", mustBeString); + let entryNames = getFlag(options, keys, "entryNames", mustBeString); + let chunkNames = getFlag(options, keys, "chunkNames", mustBeString); + let assetNames = getFlag(options, keys, "assetNames", mustBeString); + let inject = getFlag(options, keys, "inject", mustBeArray); + let banner = getFlag(options, keys, "banner", mustBeObject); + let footer = getFlag(options, keys, "footer", mustBeObject); + let entryPoints = getFlag(options, keys, "entryPoints", mustBeEntryPoints); + let absWorkingDir = getFlag(options, keys, "absWorkingDir", mustBeString); + let stdin = getFlag(options, keys, "stdin", mustBeObject); + let write = (_a2 = getFlag(options, keys, "write", mustBeBoolean)) != null ? _a2 : writeDefault; + let allowOverwrite = getFlag(options, keys, "allowOverwrite", mustBeBoolean); + let mangleCache = getFlag(options, keys, "mangleCache", mustBeObject); + keys.plugins = true; + checkForInvalidFlags(options, keys, `in ${callName}() call`); + if (sourcemap) flags.push(`--sourcemap${sourcemap === true ? "" : `=${sourcemap}`}`); + if (bundle) flags.push("--bundle"); + if (allowOverwrite) flags.push("--allow-overwrite"); + if (splitting) flags.push("--splitting"); + if (preserveSymlinks) flags.push("--preserve-symlinks"); + if (metafile) flags.push(`--metafile`); + if (outfile) flags.push(`--outfile=${outfile}`); + if (outdir) flags.push(`--outdir=${outdir}`); + if (outbase) flags.push(`--outbase=${outbase}`); + if (tsconfig) flags.push(`--tsconfig=${tsconfig}`); + if (packages) flags.push(`--packages=${packages}`); + if (resolveExtensions) { + let values = []; + for (let value of resolveExtensions) { + validateStringValue(value, "resolve extension"); + if (value.indexOf(",") >= 0) throw new Error(`Invalid resolve extension: ${value}`); + values.push(value); + } + flags.push(`--resolve-extensions=${values.join(",")}`); + } + if (publicPath) flags.push(`--public-path=${publicPath}`); + if (entryNames) flags.push(`--entry-names=${entryNames}`); + if (chunkNames) flags.push(`--chunk-names=${chunkNames}`); + if (assetNames) flags.push(`--asset-names=${assetNames}`); + if (mainFields) { + let values = []; + for (let value of mainFields) { + validateStringValue(value, "main field"); + if (value.indexOf(",") >= 0) throw new Error(`Invalid main field: ${value}`); + values.push(value); + } + flags.push(`--main-fields=${values.join(",")}`); + } + if (conditions) { + let values = []; + for (let value of conditions) { + validateStringValue(value, "condition"); + if (value.indexOf(",") >= 0) throw new Error(`Invalid condition: ${value}`); + values.push(value); + } + flags.push(`--conditions=${values.join(",")}`); + } + if (external) for (let name of external) flags.push(`--external:${validateStringValue(name, "external")}`); + if (alias) { + for (let old in alias) { + if (old.indexOf("=") >= 0) throw new Error(`Invalid package name in alias: ${old}`); + flags.push(`--alias:${old}=${validateStringValue(alias[old], "alias", old)}`); + } + } + if (banner) { + for (let type in banner) { + if (type.indexOf("=") >= 0) throw new Error(`Invalid banner file type: ${type}`); + flags.push(`--banner:${type}=${validateStringValue(banner[type], "banner", type)}`); + } + } + if (footer) { + for (let type in footer) { + if (type.indexOf("=") >= 0) throw new Error(`Invalid footer file type: ${type}`); + flags.push(`--footer:${type}=${validateStringValue(footer[type], "footer", type)}`); + } + } + if (inject) for (let path3 of inject) flags.push(`--inject:${validateStringValue(path3, "inject")}`); + if (loader) { + for (let ext in loader) { + if (ext.indexOf("=") >= 0) throw new Error(`Invalid loader extension: ${ext}`); + flags.push(`--loader:${ext}=${validateStringValue(loader[ext], "loader", ext)}`); + } + } + if (outExtension) { + for (let ext in outExtension) { + if (ext.indexOf("=") >= 0) throw new Error(`Invalid out extension: ${ext}`); + flags.push(`--out-extension:${ext}=${validateStringValue(outExtension[ext], "out extension", ext)}`); + } + } + if (entryPoints) { + if (Array.isArray(entryPoints)) { + for (let i = 0, n = entryPoints.length; i < n; i++) { + let entryPoint = entryPoints[i]; + if (typeof entryPoint === "object" && entryPoint !== null) { + let entryPointKeys = /* @__PURE__ */ Object.create(null); + let input = getFlag(entryPoint, entryPointKeys, "in", mustBeString); + let output = getFlag(entryPoint, entryPointKeys, "out", mustBeString); + checkForInvalidFlags(entryPoint, entryPointKeys, "in entry point at index " + i); + if (input === void 0) throw new Error('Missing property "in" for entry point at index ' + i); + if (output === void 0) throw new Error('Missing property "out" for entry point at index ' + i); + entries.push([output, input]); + } else { + entries.push(["", validateStringValue(entryPoint, "entry point at index " + i)]); + } + } + } else { + for (let key in entryPoints) { + entries.push([key, validateStringValue(entryPoints[key], "entry point", key)]); + } + } + } + if (stdin) { + let stdinKeys = /* @__PURE__ */ Object.create(null); + let contents = getFlag(stdin, stdinKeys, "contents", mustBeStringOrUint8Array); + let resolveDir = getFlag(stdin, stdinKeys, "resolveDir", mustBeString); + let sourcefile = getFlag(stdin, stdinKeys, "sourcefile", mustBeString); + let loader2 = getFlag(stdin, stdinKeys, "loader", mustBeString); + checkForInvalidFlags(stdin, stdinKeys, 'in "stdin" object'); + if (sourcefile) flags.push(`--sourcefile=${sourcefile}`); + if (loader2) flags.push(`--loader=${loader2}`); + if (resolveDir) stdinResolveDir = resolveDir; + if (typeof contents === "string") stdinContents = encodeUTF8(contents); + else if (contents instanceof Uint8Array) stdinContents = contents; + } + let nodePaths = []; + if (nodePathsInput) { + for (let value of nodePathsInput) { + value += ""; + nodePaths.push(value); + } + } + return { + entries, + flags, + write, + stdinContents, + stdinResolveDir, + absWorkingDir, + nodePaths, + mangleCache: validateMangleCache(mangleCache) + }; +} +function flagsForTransformOptions(callName, options, isTTY2, logLevelDefault) { + let flags = []; + let keys = /* @__PURE__ */ Object.create(null); + pushLogFlags(flags, options, keys, isTTY2, logLevelDefault); + pushCommonFlags(flags, options, keys); + let sourcemap = getFlag(options, keys, "sourcemap", mustBeStringOrBoolean); + let sourcefile = getFlag(options, keys, "sourcefile", mustBeString); + let loader = getFlag(options, keys, "loader", mustBeString); + let banner = getFlag(options, keys, "banner", mustBeString); + let footer = getFlag(options, keys, "footer", mustBeString); + let mangleCache = getFlag(options, keys, "mangleCache", mustBeObject); + checkForInvalidFlags(options, keys, `in ${callName}() call`); + if (sourcemap) flags.push(`--sourcemap=${sourcemap === true ? "external" : sourcemap}`); + if (sourcefile) flags.push(`--sourcefile=${sourcefile}`); + if (loader) flags.push(`--loader=${loader}`); + if (banner) flags.push(`--banner=${banner}`); + if (footer) flags.push(`--footer=${footer}`); + return { + flags, + mangleCache: validateMangleCache(mangleCache) + }; +} +function createChannel(streamIn) { + const requestCallbacksByKey = {}; + const closeData = { didClose: false, reason: "" }; + let responseCallbacks = {}; + let nextRequestID = 0; + let nextBuildKey = 0; + let stdout = new Uint8Array(16 * 1024); + let stdoutUsed = 0; + let readFromStdout = (chunk) => { + let limit = stdoutUsed + chunk.length; + if (limit > stdout.length) { + let swap = new Uint8Array(limit * 2); + swap.set(stdout); + stdout = swap; + } + stdout.set(chunk, stdoutUsed); + stdoutUsed += chunk.length; + let offset = 0; + while (offset + 4 <= stdoutUsed) { + let length = readUInt32LE(stdout, offset); + if (offset + 4 + length > stdoutUsed) { + break; + } + offset += 4; + handleIncomingPacket(stdout.subarray(offset, offset + length)); + offset += length; + } + if (offset > 0) { + stdout.copyWithin(0, offset, stdoutUsed); + stdoutUsed -= offset; + } + }; + let afterClose = (error) => { + closeData.didClose = true; + if (error) closeData.reason = ": " + (error.message || error); + const text = "The service was stopped" + closeData.reason; + for (let id in responseCallbacks) { + responseCallbacks[id](text, null); + } + responseCallbacks = {}; + }; + let sendRequest = (refs, value, callback) => { + if (closeData.didClose) return callback("The service is no longer running" + closeData.reason, null); + let id = nextRequestID++; + responseCallbacks[id] = (error, response) => { + try { + callback(error, response); + } finally { + if (refs) refs.unref(); + } + }; + if (refs) refs.ref(); + streamIn.writeToStdin(encodePacket({ id, isRequest: true, value })); + }; + let sendResponse = (id, value) => { + if (closeData.didClose) throw new Error("The service is no longer running" + closeData.reason); + streamIn.writeToStdin(encodePacket({ id, isRequest: false, value })); + }; + let handleRequest = async (id, request) => { + try { + if (request.command === "ping") { + sendResponse(id, {}); + return; + } + if (typeof request.key === "number") { + const requestCallbacks = requestCallbacksByKey[request.key]; + if (!requestCallbacks) { + return; + } + const callback = requestCallbacks[request.command]; + if (callback) { + await callback(id, request); + return; + } + } + throw new Error(`Invalid command: ` + request.command); + } catch (e) { + const errors = [extractErrorMessageV8(e, streamIn, null, void 0, "")]; + try { + sendResponse(id, { errors }); + } catch { + } + } + }; + let isFirstPacket = true; + let handleIncomingPacket = (bytes) => { + if (isFirstPacket) { + isFirstPacket = false; + let binaryVersion = String.fromCharCode(...bytes); + if (binaryVersion !== "0.21.5") { + throw new Error(`Cannot start service: Host version "${"0.21.5"}" does not match binary version ${quote(binaryVersion)}`); + } + return; + } + let packet = decodePacket(bytes); + if (packet.isRequest) { + handleRequest(packet.id, packet.value); + } else { + let callback = responseCallbacks[packet.id]; + delete responseCallbacks[packet.id]; + if (packet.value.error) callback(packet.value.error, {}); + else callback(null, packet.value); + } + }; + let buildOrContext = ({ callName, refs, options, isTTY: isTTY2, defaultWD: defaultWD2, callback }) => { + let refCount = 0; + const buildKey = nextBuildKey++; + const requestCallbacks = {}; + const buildRefs = { + ref() { + if (++refCount === 1) { + if (refs) refs.ref(); + } + }, + unref() { + if (--refCount === 0) { + delete requestCallbacksByKey[buildKey]; + if (refs) refs.unref(); + } + } + }; + requestCallbacksByKey[buildKey] = requestCallbacks; + buildRefs.ref(); + buildOrContextImpl( + callName, + buildKey, + sendRequest, + sendResponse, + buildRefs, + streamIn, + requestCallbacks, + options, + isTTY2, + defaultWD2, + (err, res) => { + try { + callback(err, res); + } finally { + buildRefs.unref(); + } + } + ); + }; + let transform2 = ({ callName, refs, input, options, isTTY: isTTY2, fs: fs3, callback }) => { + const details = createObjectStash(); + let start = (inputPath) => { + try { + if (typeof input !== "string" && !(input instanceof Uint8Array)) + throw new Error('The input to "transform" must be a string or a Uint8Array'); + let { + flags, + mangleCache + } = flagsForTransformOptions(callName, options, isTTY2, transformLogLevelDefault); + let request = { + command: "transform", + flags, + inputFS: inputPath !== null, + input: inputPath !== null ? encodeUTF8(inputPath) : typeof input === "string" ? encodeUTF8(input) : input + }; + if (mangleCache) request.mangleCache = mangleCache; + sendRequest(refs, request, (error, response) => { + if (error) return callback(new Error(error), null); + let errors = replaceDetailsInMessages(response.errors, details); + let warnings = replaceDetailsInMessages(response.warnings, details); + let outstanding = 1; + let next = () => { + if (--outstanding === 0) { + let result = { + warnings, + code: response.code, + map: response.map, + mangleCache: void 0, + legalComments: void 0 + }; + if ("legalComments" in response) result.legalComments = response == null ? void 0 : response.legalComments; + if (response.mangleCache) result.mangleCache = response == null ? void 0 : response.mangleCache; + callback(null, result); + } + }; + if (errors.length > 0) return callback(failureErrorWithLog("Transform failed", errors, warnings), null); + if (response.codeFS) { + outstanding++; + fs3.readFile(response.code, (err, contents) => { + if (err !== null) { + callback(err, null); + } else { + response.code = contents; + next(); + } + }); + } + if (response.mapFS) { + outstanding++; + fs3.readFile(response.map, (err, contents) => { + if (err !== null) { + callback(err, null); + } else { + response.map = contents; + next(); + } + }); + } + next(); + }); + } catch (e) { + let flags = []; + try { + pushLogFlags(flags, options, {}, isTTY2, transformLogLevelDefault); + } catch { + } + const error = extractErrorMessageV8(e, streamIn, details, void 0, ""); + sendRequest(refs, { command: "error", flags, error }, () => { + error.detail = details.load(error.detail); + callback(failureErrorWithLog("Transform failed", [error], []), null); + }); + } + }; + if ((typeof input === "string" || input instanceof Uint8Array) && input.length > 1024 * 1024) { + let next = start; + start = () => fs3.writeFile(input, next); + } + start(null); + }; + let formatMessages2 = ({ callName, refs, messages, options, callback }) => { + if (!options) throw new Error(`Missing second argument in ${callName}() call`); + let keys = {}; + let kind = getFlag(options, keys, "kind", mustBeString); + let color = getFlag(options, keys, "color", mustBeBoolean); + let terminalWidth = getFlag(options, keys, "terminalWidth", mustBeInteger); + checkForInvalidFlags(options, keys, `in ${callName}() call`); + if (kind === void 0) throw new Error(`Missing "kind" in ${callName}() call`); + if (kind !== "error" && kind !== "warning") throw new Error(`Expected "kind" to be "error" or "warning" in ${callName}() call`); + let request = { + command: "format-msgs", + messages: sanitizeMessages(messages, "messages", null, "", terminalWidth), + isWarning: kind === "warning" + }; + if (color !== void 0) request.color = color; + if (terminalWidth !== void 0) request.terminalWidth = terminalWidth; + sendRequest(refs, request, (error, response) => { + if (error) return callback(new Error(error), null); + callback(null, response.messages); + }); + }; + let analyzeMetafile2 = ({ callName, refs, metafile, options, callback }) => { + if (options === void 0) options = {}; + let keys = {}; + let color = getFlag(options, keys, "color", mustBeBoolean); + let verbose = getFlag(options, keys, "verbose", mustBeBoolean); + checkForInvalidFlags(options, keys, `in ${callName}() call`); + let request = { + command: "analyze-metafile", + metafile + }; + if (color !== void 0) request.color = color; + if (verbose !== void 0) request.verbose = verbose; + sendRequest(refs, request, (error, response) => { + if (error) return callback(new Error(error), null); + callback(null, response.result); + }); + }; + return { + readFromStdout, + afterClose, + service: { + buildOrContext, + transform: transform2, + formatMessages: formatMessages2, + analyzeMetafile: analyzeMetafile2 + } + }; +} +function buildOrContextImpl(callName, buildKey, sendRequest, sendResponse, refs, streamIn, requestCallbacks, options, isTTY2, defaultWD2, callback) { + const details = createObjectStash(); + const isContext = callName === "context"; + const handleError = (e, pluginName) => { + const flags = []; + try { + pushLogFlags(flags, options, {}, isTTY2, buildLogLevelDefault); + } catch { + } + const message = extractErrorMessageV8(e, streamIn, details, void 0, pluginName); + sendRequest(refs, { command: "error", flags, error: message }, () => { + message.detail = details.load(message.detail); + callback(failureErrorWithLog(isContext ? "Context failed" : "Build failed", [message], []), null); + }); + }; + let plugins; + if (typeof options === "object") { + const value = options.plugins; + if (value !== void 0) { + if (!Array.isArray(value)) return handleError(new Error(`"plugins" must be an array`), ""); + plugins = value; + } + } + if (plugins && plugins.length > 0) { + if (streamIn.isSync) return handleError(new Error("Cannot use plugins in synchronous API calls"), ""); + handlePlugins( + buildKey, + sendRequest, + sendResponse, + refs, + streamIn, + requestCallbacks, + options, + plugins, + details + ).then( + (result) => { + if (!result.ok) return handleError(result.error, result.pluginName); + try { + buildOrContextContinue(result.requestPlugins, result.runOnEndCallbacks, result.scheduleOnDisposeCallbacks); + } catch (e) { + handleError(e, ""); + } + }, + (e) => handleError(e, "") + ); + return; + } + try { + buildOrContextContinue(null, (result, done) => done([], []), () => { + }); + } catch (e) { + handleError(e, ""); + } + function buildOrContextContinue(requestPlugins, runOnEndCallbacks, scheduleOnDisposeCallbacks) { + const writeDefault = streamIn.hasFS; + const { + entries, + flags, + write, + stdinContents, + stdinResolveDir, + absWorkingDir, + nodePaths, + mangleCache + } = flagsForBuildOptions(callName, options, isTTY2, buildLogLevelDefault, writeDefault); + if (write && !streamIn.hasFS) throw new Error(`The "write" option is unavailable in this environment`); + const request = { + command: "build", + key: buildKey, + entries, + flags, + write, + stdinContents, + stdinResolveDir, + absWorkingDir: absWorkingDir || defaultWD2, + nodePaths, + context: isContext + }; + if (requestPlugins) request.plugins = requestPlugins; + if (mangleCache) request.mangleCache = mangleCache; + const buildResponseToResult = (response, callback2) => { + const result = { + errors: replaceDetailsInMessages(response.errors, details), + warnings: replaceDetailsInMessages(response.warnings, details), + outputFiles: void 0, + metafile: void 0, + mangleCache: void 0 + }; + const originalErrors = result.errors.slice(); + const originalWarnings = result.warnings.slice(); + if (response.outputFiles) result.outputFiles = response.outputFiles.map(convertOutputFiles); + if (response.metafile) result.metafile = JSON.parse(response.metafile); + if (response.mangleCache) result.mangleCache = response.mangleCache; + if (response.writeToStdout !== void 0) console.log(decodeUTF8(response.writeToStdout).replace(/\n$/, "")); + runOnEndCallbacks(result, (onEndErrors, onEndWarnings) => { + if (originalErrors.length > 0 || onEndErrors.length > 0) { + const error = failureErrorWithLog("Build failed", originalErrors.concat(onEndErrors), originalWarnings.concat(onEndWarnings)); + return callback2(error, null, onEndErrors, onEndWarnings); + } + callback2(null, result, onEndErrors, onEndWarnings); + }); + }; + let latestResultPromise; + let provideLatestResult; + if (isContext) + requestCallbacks["on-end"] = (id, request2) => new Promise((resolve) => { + buildResponseToResult(request2, (err, result, onEndErrors, onEndWarnings) => { + const response = { + errors: onEndErrors, + warnings: onEndWarnings + }; + if (provideLatestResult) provideLatestResult(err, result); + latestResultPromise = void 0; + provideLatestResult = void 0; + sendResponse(id, response); + resolve(); + }); + }); + sendRequest(refs, request, (error, response) => { + if (error) return callback(new Error(error), null); + if (!isContext) { + return buildResponseToResult(response, (err, res) => { + scheduleOnDisposeCallbacks(); + return callback(err, res); + }); + } + if (response.errors.length > 0) { + return callback(failureErrorWithLog("Context failed", response.errors, response.warnings), null); + } + let didDispose = false; + const result = { + rebuild: () => { + if (!latestResultPromise) latestResultPromise = new Promise((resolve, reject) => { + let settlePromise; + provideLatestResult = (err, result2) => { + if (!settlePromise) settlePromise = () => err ? reject(err) : resolve(result2); + }; + const triggerAnotherBuild = () => { + const request2 = { + command: "rebuild", + key: buildKey + }; + sendRequest(refs, request2, (error2, response2) => { + if (error2) { + reject(new Error(error2)); + } else if (settlePromise) { + settlePromise(); + } else { + triggerAnotherBuild(); + } + }); + }; + triggerAnotherBuild(); + }); + return latestResultPromise; + }, + watch: (options2 = {}) => new Promise((resolve, reject) => { + if (!streamIn.hasFS) throw new Error(`Cannot use the "watch" API in this environment`); + const keys = {}; + checkForInvalidFlags(options2, keys, `in watch() call`); + const request2 = { + command: "watch", + key: buildKey + }; + sendRequest(refs, request2, (error2) => { + if (error2) reject(new Error(error2)); + else resolve(void 0); + }); + }), + serve: (options2 = {}) => new Promise((resolve, reject) => { + if (!streamIn.hasFS) throw new Error(`Cannot use the "serve" API in this environment`); + const keys = {}; + const port = getFlag(options2, keys, "port", mustBeInteger); + const host = getFlag(options2, keys, "host", mustBeString); + const servedir = getFlag(options2, keys, "servedir", mustBeString); + const keyfile = getFlag(options2, keys, "keyfile", mustBeString); + const certfile = getFlag(options2, keys, "certfile", mustBeString); + const fallback = getFlag(options2, keys, "fallback", mustBeString); + const onRequest = getFlag(options2, keys, "onRequest", mustBeFunction); + checkForInvalidFlags(options2, keys, `in serve() call`); + const request2 = { + command: "serve", + key: buildKey, + onRequest: !!onRequest + }; + if (port !== void 0) request2.port = port; + if (host !== void 0) request2.host = host; + if (servedir !== void 0) request2.servedir = servedir; + if (keyfile !== void 0) request2.keyfile = keyfile; + if (certfile !== void 0) request2.certfile = certfile; + if (fallback !== void 0) request2.fallback = fallback; + sendRequest(refs, request2, (error2, response2) => { + if (error2) return reject(new Error(error2)); + if (onRequest) { + requestCallbacks["serve-request"] = (id, request3) => { + onRequest(request3.args); + sendResponse(id, {}); + }; + } + resolve(response2); + }); + }), + cancel: () => new Promise((resolve) => { + if (didDispose) return resolve(); + const request2 = { + command: "cancel", + key: buildKey + }; + sendRequest(refs, request2, () => { + resolve(); + }); + }), + dispose: () => new Promise((resolve) => { + if (didDispose) return resolve(); + didDispose = true; + const request2 = { + command: "dispose", + key: buildKey + }; + sendRequest(refs, request2, () => { + resolve(); + scheduleOnDisposeCallbacks(); + refs.unref(); + }); + }) + }; + refs.ref(); + callback(null, result); + }); + } +} +var handlePlugins = async (buildKey, sendRequest, sendResponse, refs, streamIn, requestCallbacks, initialOptions, plugins, details) => { + let onStartCallbacks = []; + let onEndCallbacks = []; + let onResolveCallbacks = {}; + let onLoadCallbacks = {}; + let onDisposeCallbacks = []; + let nextCallbackID = 0; + let i = 0; + let requestPlugins = []; + let isSetupDone = false; + plugins = [...plugins]; + for (let item of plugins) { + let keys = {}; + if (typeof item !== "object") throw new Error(`Plugin at index ${i} must be an object`); + const name = getFlag(item, keys, "name", mustBeString); + if (typeof name !== "string" || name === "") throw new Error(`Plugin at index ${i} is missing a name`); + try { + let setup = getFlag(item, keys, "setup", mustBeFunction); + if (typeof setup !== "function") throw new Error(`Plugin is missing a setup function`); + checkForInvalidFlags(item, keys, `on plugin ${quote(name)}`); + let plugin = { + name, + onStart: false, + onEnd: false, + onResolve: [], + onLoad: [] + }; + i++; + let resolve = (path3, options = {}) => { + if (!isSetupDone) throw new Error('Cannot call "resolve" before plugin setup has completed'); + if (typeof path3 !== "string") throw new Error(`The path to resolve must be a string`); + let keys2 = /* @__PURE__ */ Object.create(null); + let pluginName = getFlag(options, keys2, "pluginName", mustBeString); + let importer = getFlag(options, keys2, "importer", mustBeString); + let namespace = getFlag(options, keys2, "namespace", mustBeString); + let resolveDir = getFlag(options, keys2, "resolveDir", mustBeString); + let kind = getFlag(options, keys2, "kind", mustBeString); + let pluginData = getFlag(options, keys2, "pluginData", canBeAnything); + let importAttributes = getFlag(options, keys2, "with", mustBeObject); + checkForInvalidFlags(options, keys2, "in resolve() call"); + return new Promise((resolve2, reject) => { + const request = { + command: "resolve", + path: path3, + key: buildKey, + pluginName: name + }; + if (pluginName != null) request.pluginName = pluginName; + if (importer != null) request.importer = importer; + if (namespace != null) request.namespace = namespace; + if (resolveDir != null) request.resolveDir = resolveDir; + if (kind != null) request.kind = kind; + else throw new Error(`Must specify "kind" when calling "resolve"`); + if (pluginData != null) request.pluginData = details.store(pluginData); + if (importAttributes != null) request.with = sanitizeStringMap(importAttributes, "with"); + sendRequest(refs, request, (error, response) => { + if (error !== null) reject(new Error(error)); + else resolve2({ + errors: replaceDetailsInMessages(response.errors, details), + warnings: replaceDetailsInMessages(response.warnings, details), + path: response.path, + external: response.external, + sideEffects: response.sideEffects, + namespace: response.namespace, + suffix: response.suffix, + pluginData: details.load(response.pluginData) + }); + }); + }); + }; + let promise = setup({ + initialOptions, + resolve, + onStart(callback) { + let registeredText = `This error came from the "onStart" callback registered here:`; + let registeredNote = extractCallerV8(new Error(registeredText), streamIn, "onStart"); + onStartCallbacks.push({ name, callback, note: registeredNote }); + plugin.onStart = true; + }, + onEnd(callback) { + let registeredText = `This error came from the "onEnd" callback registered here:`; + let registeredNote = extractCallerV8(new Error(registeredText), streamIn, "onEnd"); + onEndCallbacks.push({ name, callback, note: registeredNote }); + plugin.onEnd = true; + }, + onResolve(options, callback) { + let registeredText = `This error came from the "onResolve" callback registered here:`; + let registeredNote = extractCallerV8(new Error(registeredText), streamIn, "onResolve"); + let keys2 = {}; + let filter = getFlag(options, keys2, "filter", mustBeRegExp); + let namespace = getFlag(options, keys2, "namespace", mustBeString); + checkForInvalidFlags(options, keys2, `in onResolve() call for plugin ${quote(name)}`); + if (filter == null) throw new Error(`onResolve() call is missing a filter`); + let id = nextCallbackID++; + onResolveCallbacks[id] = { name, callback, note: registeredNote }; + plugin.onResolve.push({ id, filter: filter.source, namespace: namespace || "" }); + }, + onLoad(options, callback) { + let registeredText = `This error came from the "onLoad" callback registered here:`; + let registeredNote = extractCallerV8(new Error(registeredText), streamIn, "onLoad"); + let keys2 = {}; + let filter = getFlag(options, keys2, "filter", mustBeRegExp); + let namespace = getFlag(options, keys2, "namespace", mustBeString); + checkForInvalidFlags(options, keys2, `in onLoad() call for plugin ${quote(name)}`); + if (filter == null) throw new Error(`onLoad() call is missing a filter`); + let id = nextCallbackID++; + onLoadCallbacks[id] = { name, callback, note: registeredNote }; + plugin.onLoad.push({ id, filter: filter.source, namespace: namespace || "" }); + }, + onDispose(callback) { + onDisposeCallbacks.push(callback); + }, + esbuild: streamIn.esbuild + }); + if (promise) await promise; + requestPlugins.push(plugin); + } catch (e) { + return { ok: false, error: e, pluginName: name }; + } + } + requestCallbacks["on-start"] = async (id, request) => { + let response = { errors: [], warnings: [] }; + await Promise.all(onStartCallbacks.map(async ({ name, callback, note }) => { + try { + let result = await callback(); + if (result != null) { + if (typeof result !== "object") throw new Error(`Expected onStart() callback in plugin ${quote(name)} to return an object`); + let keys = {}; + let errors = getFlag(result, keys, "errors", mustBeArray); + let warnings = getFlag(result, keys, "warnings", mustBeArray); + checkForInvalidFlags(result, keys, `from onStart() callback in plugin ${quote(name)}`); + if (errors != null) response.errors.push(...sanitizeMessages(errors, "errors", details, name, void 0)); + if (warnings != null) response.warnings.push(...sanitizeMessages(warnings, "warnings", details, name, void 0)); + } + } catch (e) { + response.errors.push(extractErrorMessageV8(e, streamIn, details, note && note(), name)); + } + })); + sendResponse(id, response); + }; + requestCallbacks["on-resolve"] = async (id, request) => { + let response = {}, name = "", callback, note; + for (let id2 of request.ids) { + try { + ({ name, callback, note } = onResolveCallbacks[id2]); + let result = await callback({ + path: request.path, + importer: request.importer, + namespace: request.namespace, + resolveDir: request.resolveDir, + kind: request.kind, + pluginData: details.load(request.pluginData), + with: request.with + }); + if (result != null) { + if (typeof result !== "object") throw new Error(`Expected onResolve() callback in plugin ${quote(name)} to return an object`); + let keys = {}; + let pluginName = getFlag(result, keys, "pluginName", mustBeString); + let path3 = getFlag(result, keys, "path", mustBeString); + let namespace = getFlag(result, keys, "namespace", mustBeString); + let suffix = getFlag(result, keys, "suffix", mustBeString); + let external = getFlag(result, keys, "external", mustBeBoolean); + let sideEffects = getFlag(result, keys, "sideEffects", mustBeBoolean); + let pluginData = getFlag(result, keys, "pluginData", canBeAnything); + let errors = getFlag(result, keys, "errors", mustBeArray); + let warnings = getFlag(result, keys, "warnings", mustBeArray); + let watchFiles = getFlag(result, keys, "watchFiles", mustBeArray); + let watchDirs = getFlag(result, keys, "watchDirs", mustBeArray); + checkForInvalidFlags(result, keys, `from onResolve() callback in plugin ${quote(name)}`); + response.id = id2; + if (pluginName != null) response.pluginName = pluginName; + if (path3 != null) response.path = path3; + if (namespace != null) response.namespace = namespace; + if (suffix != null) response.suffix = suffix; + if (external != null) response.external = external; + if (sideEffects != null) response.sideEffects = sideEffects; + if (pluginData != null) response.pluginData = details.store(pluginData); + if (errors != null) response.errors = sanitizeMessages(errors, "errors", details, name, void 0); + if (warnings != null) response.warnings = sanitizeMessages(warnings, "warnings", details, name, void 0); + if (watchFiles != null) response.watchFiles = sanitizeStringArray(watchFiles, "watchFiles"); + if (watchDirs != null) response.watchDirs = sanitizeStringArray(watchDirs, "watchDirs"); + break; + } + } catch (e) { + response = { id: id2, errors: [extractErrorMessageV8(e, streamIn, details, note && note(), name)] }; + break; + } + } + sendResponse(id, response); + }; + requestCallbacks["on-load"] = async (id, request) => { + let response = {}, name = "", callback, note; + for (let id2 of request.ids) { + try { + ({ name, callback, note } = onLoadCallbacks[id2]); + let result = await callback({ + path: request.path, + namespace: request.namespace, + suffix: request.suffix, + pluginData: details.load(request.pluginData), + with: request.with + }); + if (result != null) { + if (typeof result !== "object") throw new Error(`Expected onLoad() callback in plugin ${quote(name)} to return an object`); + let keys = {}; + let pluginName = getFlag(result, keys, "pluginName", mustBeString); + let contents = getFlag(result, keys, "contents", mustBeStringOrUint8Array); + let resolveDir = getFlag(result, keys, "resolveDir", mustBeString); + let pluginData = getFlag(result, keys, "pluginData", canBeAnything); + let loader = getFlag(result, keys, "loader", mustBeString); + let errors = getFlag(result, keys, "errors", mustBeArray); + let warnings = getFlag(result, keys, "warnings", mustBeArray); + let watchFiles = getFlag(result, keys, "watchFiles", mustBeArray); + let watchDirs = getFlag(result, keys, "watchDirs", mustBeArray); + checkForInvalidFlags(result, keys, `from onLoad() callback in plugin ${quote(name)}`); + response.id = id2; + if (pluginName != null) response.pluginName = pluginName; + if (contents instanceof Uint8Array) response.contents = contents; + else if (contents != null) response.contents = encodeUTF8(contents); + if (resolveDir != null) response.resolveDir = resolveDir; + if (pluginData != null) response.pluginData = details.store(pluginData); + if (loader != null) response.loader = loader; + if (errors != null) response.errors = sanitizeMessages(errors, "errors", details, name, void 0); + if (warnings != null) response.warnings = sanitizeMessages(warnings, "warnings", details, name, void 0); + if (watchFiles != null) response.watchFiles = sanitizeStringArray(watchFiles, "watchFiles"); + if (watchDirs != null) response.watchDirs = sanitizeStringArray(watchDirs, "watchDirs"); + break; + } + } catch (e) { + response = { id: id2, errors: [extractErrorMessageV8(e, streamIn, details, note && note(), name)] }; + break; + } + } + sendResponse(id, response); + }; + let runOnEndCallbacks = (result, done) => done([], []); + if (onEndCallbacks.length > 0) { + runOnEndCallbacks = (result, done) => { + (async () => { + const onEndErrors = []; + const onEndWarnings = []; + for (const { name, callback, note } of onEndCallbacks) { + let newErrors; + let newWarnings; + try { + const value = await callback(result); + if (value != null) { + if (typeof value !== "object") throw new Error(`Expected onEnd() callback in plugin ${quote(name)} to return an object`); + let keys = {}; + let errors = getFlag(value, keys, "errors", mustBeArray); + let warnings = getFlag(value, keys, "warnings", mustBeArray); + checkForInvalidFlags(value, keys, `from onEnd() callback in plugin ${quote(name)}`); + if (errors != null) newErrors = sanitizeMessages(errors, "errors", details, name, void 0); + if (warnings != null) newWarnings = sanitizeMessages(warnings, "warnings", details, name, void 0); + } + } catch (e) { + newErrors = [extractErrorMessageV8(e, streamIn, details, note && note(), name)]; + } + if (newErrors) { + onEndErrors.push(...newErrors); + try { + result.errors.push(...newErrors); + } catch { + } + } + if (newWarnings) { + onEndWarnings.push(...newWarnings); + try { + result.warnings.push(...newWarnings); + } catch { + } + } + } + done(onEndErrors, onEndWarnings); + })(); + }; + } + let scheduleOnDisposeCallbacks = () => { + for (const cb of onDisposeCallbacks) { + setTimeout(() => cb(), 0); + } + }; + isSetupDone = true; + return { + ok: true, + requestPlugins, + runOnEndCallbacks, + scheduleOnDisposeCallbacks + }; +}; +function createObjectStash() { + const map = /* @__PURE__ */ new Map(); + let nextID = 0; + return { + load(id) { + return map.get(id); + }, + store(value) { + if (value === void 0) return -1; + const id = nextID++; + map.set(id, value); + return id; + } + }; +} +function extractCallerV8(e, streamIn, ident) { + let note; + let tried = false; + return () => { + if (tried) return note; + tried = true; + try { + let lines = (e.stack + "").split("\n"); + lines.splice(1, 1); + let location = parseStackLinesV8(streamIn, lines, ident); + if (location) { + note = { text: e.message, location }; + return note; + } + } catch { + } + }; +} +function extractErrorMessageV8(e, streamIn, stash, note, pluginName) { + let text = "Internal error"; + let location = null; + try { + text = (e && e.message || e) + ""; + } catch { + } + try { + location = parseStackLinesV8(streamIn, (e.stack + "").split("\n"), ""); + } catch { + } + return { id: "", pluginName, text, location, notes: note ? [note] : [], detail: stash ? stash.store(e) : -1 }; +} +function parseStackLinesV8(streamIn, lines, ident) { + let at = " at "; + if (streamIn.readFileSync && !lines[0].startsWith(at) && lines[1].startsWith(at)) { + for (let i = 1; i < lines.length; i++) { + let line = lines[i]; + if (!line.startsWith(at)) continue; + line = line.slice(at.length); + while (true) { + let match = /^(?:new |async )?\S+ \((.*)\)$/.exec(line); + if (match) { + line = match[1]; + continue; + } + match = /^eval at \S+ \((.*)\)(?:, \S+:\d+:\d+)?$/.exec(line); + if (match) { + line = match[1]; + continue; + } + match = /^(\S+):(\d+):(\d+)$/.exec(line); + if (match) { + let contents; + try { + contents = streamIn.readFileSync(match[1], "utf8"); + } catch { + break; + } + let lineText = contents.split(/\r\n|\r|\n|\u2028|\u2029/)[+match[2] - 1] || ""; + let column = +match[3] - 1; + let length = lineText.slice(column, column + ident.length) === ident ? ident.length : 0; + return { + file: match[1], + namespace: "file", + line: +match[2], + column: encodeUTF8(lineText.slice(0, column)).length, + length: encodeUTF8(lineText.slice(column, column + length)).length, + lineText: lineText + "\n" + lines.slice(1).join("\n"), + suggestion: "" + }; + } + break; + } + } + } + return null; +} +function failureErrorWithLog(text, errors, warnings) { + let limit = 5; + text += errors.length < 1 ? "" : ` with ${errors.length} error${errors.length < 2 ? "" : "s"}:` + errors.slice(0, limit + 1).map((e, i) => { + if (i === limit) return "\n..."; + if (!e.location) return ` +error: ${e.text}`; + let { file, line, column } = e.location; + let pluginText = e.pluginName ? `[plugin: ${e.pluginName}] ` : ""; + return ` +${file}:${line}:${column}: ERROR: ${pluginText}${e.text}`; + }).join(""); + let error = new Error(text); + for (const [key, value] of [["errors", errors], ["warnings", warnings]]) { + Object.defineProperty(error, key, { + configurable: true, + enumerable: true, + get: () => value, + set: (value2) => Object.defineProperty(error, key, { + configurable: true, + enumerable: true, + value: value2 + }) + }); + } + return error; +} +function replaceDetailsInMessages(messages, stash) { + for (const message of messages) { + message.detail = stash.load(message.detail); + } + return messages; +} +function sanitizeLocation(location, where, terminalWidth) { + if (location == null) return null; + let keys = {}; + let file = getFlag(location, keys, "file", mustBeString); + let namespace = getFlag(location, keys, "namespace", mustBeString); + let line = getFlag(location, keys, "line", mustBeInteger); + let column = getFlag(location, keys, "column", mustBeInteger); + let length = getFlag(location, keys, "length", mustBeInteger); + let lineText = getFlag(location, keys, "lineText", mustBeString); + let suggestion = getFlag(location, keys, "suggestion", mustBeString); + checkForInvalidFlags(location, keys, where); + if (lineText) { + const relevantASCII = lineText.slice( + 0, + (column && column > 0 ? column : 0) + (length && length > 0 ? length : 0) + (terminalWidth && terminalWidth > 0 ? terminalWidth : 80) + ); + if (!/[\x7F-\uFFFF]/.test(relevantASCII) && !/\n/.test(lineText)) { + lineText = relevantASCII; + } + } + return { + file: file || "", + namespace: namespace || "", + line: line || 0, + column: column || 0, + length: length || 0, + lineText: lineText || "", + suggestion: suggestion || "" + }; +} +function sanitizeMessages(messages, property, stash, fallbackPluginName, terminalWidth) { + let messagesClone = []; + let index = 0; + for (const message of messages) { + let keys = {}; + let id = getFlag(message, keys, "id", mustBeString); + let pluginName = getFlag(message, keys, "pluginName", mustBeString); + let text = getFlag(message, keys, "text", mustBeString); + let location = getFlag(message, keys, "location", mustBeObjectOrNull); + let notes = getFlag(message, keys, "notes", mustBeArray); + let detail = getFlag(message, keys, "detail", canBeAnything); + let where = `in element ${index} of "${property}"`; + checkForInvalidFlags(message, keys, where); + let notesClone = []; + if (notes) { + for (const note of notes) { + let noteKeys = {}; + let noteText = getFlag(note, noteKeys, "text", mustBeString); + let noteLocation = getFlag(note, noteKeys, "location", mustBeObjectOrNull); + checkForInvalidFlags(note, noteKeys, where); + notesClone.push({ + text: noteText || "", + location: sanitizeLocation(noteLocation, where, terminalWidth) + }); + } + } + messagesClone.push({ + id: id || "", + pluginName: pluginName || fallbackPluginName, + text: text || "", + location: sanitizeLocation(location, where, terminalWidth), + notes: notesClone, + detail: stash ? stash.store(detail) : -1 + }); + index++; + } + return messagesClone; +} +function sanitizeStringArray(values, property) { + const result = []; + for (const value of values) { + if (typeof value !== "string") throw new Error(`${quote(property)} must be an array of strings`); + result.push(value); + } + return result; +} +function sanitizeStringMap(map, property) { + const result = /* @__PURE__ */ Object.create(null); + for (const key in map) { + const value = map[key]; + if (typeof value !== "string") throw new Error(`key ${quote(key)} in object ${quote(property)} must be a string`); + result[key] = value; + } + return result; +} +function convertOutputFiles({ path: path3, contents, hash }) { + let text = null; + return { + path: path3, + contents, + hash, + get text() { + const binary = this.contents; + if (text === null || binary !== contents) { + contents = binary; + text = decodeUTF8(binary); + } + return text; + } + }; +} + +// lib/npm/node-platform.ts +var fs = require("fs"); +var os = require("os"); +var path = require("path"); +var ESBUILD_BINARY_PATH = process.env.ESBUILD_BINARY_PATH || ESBUILD_BINARY_PATH; +var isValidBinaryPath = (x) => !!x && x !== "/usr/bin/esbuild"; +var packageDarwin_arm64 = "@esbuild/darwin-arm64"; +var packageDarwin_x64 = "@esbuild/darwin-x64"; +var knownWindowsPackages = { + "win32 arm64 LE": "@esbuild/win32-arm64", + "win32 ia32 LE": "@esbuild/win32-ia32", + "win32 x64 LE": "@esbuild/win32-x64" +}; +var knownUnixlikePackages = { + "aix ppc64 BE": "@esbuild/aix-ppc64", + "android arm64 LE": "@esbuild/android-arm64", + "darwin arm64 LE": "@esbuild/darwin-arm64", + "darwin x64 LE": "@esbuild/darwin-x64", + "freebsd arm64 LE": "@esbuild/freebsd-arm64", + "freebsd x64 LE": "@esbuild/freebsd-x64", + "linux arm LE": "@esbuild/linux-arm", + "linux arm64 LE": "@esbuild/linux-arm64", + "linux ia32 LE": "@esbuild/linux-ia32", + "linux mips64el LE": "@esbuild/linux-mips64el", + "linux ppc64 LE": "@esbuild/linux-ppc64", + "linux riscv64 LE": "@esbuild/linux-riscv64", + "linux s390x BE": "@esbuild/linux-s390x", + "linux x64 LE": "@esbuild/linux-x64", + "linux loong64 LE": "@esbuild/linux-loong64", + "netbsd x64 LE": "@esbuild/netbsd-x64", + "openbsd x64 LE": "@esbuild/openbsd-x64", + "sunos x64 LE": "@esbuild/sunos-x64" +}; +var knownWebAssemblyFallbackPackages = { + "android arm LE": "@esbuild/android-arm", + "android x64 LE": "@esbuild/android-x64" +}; +function pkgAndSubpathForCurrentPlatform() { + let pkg; + let subpath; + let isWASM = false; + let platformKey = `${process.platform} ${os.arch()} ${os.endianness()}`; + if (platformKey in knownWindowsPackages) { + pkg = knownWindowsPackages[platformKey]; + subpath = "esbuild.exe"; + } else if (platformKey in knownUnixlikePackages) { + pkg = knownUnixlikePackages[platformKey]; + subpath = "bin/esbuild"; + } else if (platformKey in knownWebAssemblyFallbackPackages) { + pkg = knownWebAssemblyFallbackPackages[platformKey]; + subpath = "bin/esbuild"; + isWASM = true; + } else { + throw new Error(`Unsupported platform: ${platformKey}`); + } + return { pkg, subpath, isWASM }; +} +function pkgForSomeOtherPlatform() { + const libMainJS = require.resolve("esbuild"); + const nodeModulesDirectory = path.dirname(path.dirname(path.dirname(libMainJS))); + if (path.basename(nodeModulesDirectory) === "node_modules") { + for (const unixKey in knownUnixlikePackages) { + try { + const pkg = knownUnixlikePackages[unixKey]; + if (fs.existsSync(path.join(nodeModulesDirectory, pkg))) return pkg; + } catch { + } + } + for (const windowsKey in knownWindowsPackages) { + try { + const pkg = knownWindowsPackages[windowsKey]; + if (fs.existsSync(path.join(nodeModulesDirectory, pkg))) return pkg; + } catch { + } + } + } + return null; +} +function downloadedBinPath(pkg, subpath) { + const esbuildLibDir = path.dirname(require.resolve("esbuild")); + return path.join(esbuildLibDir, `downloaded-${pkg.replace("/", "-")}-${path.basename(subpath)}`); +} +function generateBinPath() { + if (isValidBinaryPath(ESBUILD_BINARY_PATH)) { + if (!fs.existsSync(ESBUILD_BINARY_PATH)) { + console.warn(`[esbuild] Ignoring bad configuration: ESBUILD_BINARY_PATH=${ESBUILD_BINARY_PATH}`); + } else { + return { binPath: ESBUILD_BINARY_PATH, isWASM: false }; + } + } + const { pkg, subpath, isWASM } = pkgAndSubpathForCurrentPlatform(); + let binPath; + try { + binPath = require.resolve(`${pkg}/${subpath}`); + } catch (e) { + binPath = downloadedBinPath(pkg, subpath); + if (!fs.existsSync(binPath)) { + try { + require.resolve(pkg); + } catch { + const otherPkg = pkgForSomeOtherPlatform(); + if (otherPkg) { + let suggestions = ` +Specifically the "${otherPkg}" package is present but this platform +needs the "${pkg}" package instead. People often get into this +situation by installing esbuild on Windows or macOS and copying "node_modules" +into a Docker image that runs Linux, or by copying "node_modules" between +Windows and WSL environments. + +If you are installing with npm, you can try not copying the "node_modules" +directory when you copy the files over, and running "npm ci" or "npm install" +on the destination platform after the copy. Or you could consider using yarn +instead of npm which has built-in support for installing a package on multiple +platforms simultaneously. + +If you are installing with yarn, you can try listing both this platform and the +other platform in your ".yarnrc.yml" file using the "supportedArchitectures" +feature: https://yarnpkg.com/configuration/yarnrc/#supportedArchitectures +Keep in mind that this means multiple copies of esbuild will be present. +`; + if (pkg === packageDarwin_x64 && otherPkg === packageDarwin_arm64 || pkg === packageDarwin_arm64 && otherPkg === packageDarwin_x64) { + suggestions = ` +Specifically the "${otherPkg}" package is present but this platform +needs the "${pkg}" package instead. People often get into this +situation by installing esbuild with npm running inside of Rosetta 2 and then +trying to use it with node running outside of Rosetta 2, or vice versa (Rosetta +2 is Apple's on-the-fly x86_64-to-arm64 translation service). + +If you are installing with npm, you can try ensuring that both npm and node are +not running under Rosetta 2 and then reinstalling esbuild. This likely involves +changing how you installed npm and/or node. For example, installing node with +the universal installer here should work: https://nodejs.org/en/download/. Or +you could consider using yarn instead of npm which has built-in support for +installing a package on multiple platforms simultaneously. + +If you are installing with yarn, you can try listing both "arm64" and "x64" +in your ".yarnrc.yml" file using the "supportedArchitectures" feature: +https://yarnpkg.com/configuration/yarnrc/#supportedArchitectures +Keep in mind that this means multiple copies of esbuild will be present. +`; + } + throw new Error(` +You installed esbuild for another platform than the one you're currently using. +This won't work because esbuild is written with native code and needs to +install a platform-specific binary executable. +${suggestions} +Another alternative is to use the "esbuild-wasm" package instead, which works +the same way on all platforms. But it comes with a heavy performance cost and +can sometimes be 10x slower than the "esbuild" package, so you may also not +want to do that. +`); + } + throw new Error(`The package "${pkg}" could not be found, and is needed by esbuild. + +If you are installing esbuild with npm, make sure that you don't specify the +"--no-optional" or "--omit=optional" flags. The "optionalDependencies" feature +of "package.json" is used by esbuild to install the correct binary executable +for your current platform.`); + } + throw e; + } + } + if (/\.zip\//.test(binPath)) { + let pnpapi; + try { + pnpapi = require("pnpapi"); + } catch (e) { + } + if (pnpapi) { + const root = pnpapi.getPackageInformation(pnpapi.topLevel).packageLocation; + const binTargetPath = path.join( + root, + "node_modules", + ".cache", + "esbuild", + `pnpapi-${pkg.replace("/", "-")}-${"0.21.5"}-${path.basename(subpath)}` + ); + if (!fs.existsSync(binTargetPath)) { + fs.mkdirSync(path.dirname(binTargetPath), { recursive: true }); + fs.copyFileSync(binPath, binTargetPath); + fs.chmodSync(binTargetPath, 493); + } + return { binPath: binTargetPath, isWASM }; + } + } + return { binPath, isWASM }; +} + +// lib/npm/node.ts +var child_process = require("child_process"); +var crypto = require("crypto"); +var path2 = require("path"); +var fs2 = require("fs"); +var os2 = require("os"); +var tty = require("tty"); +var worker_threads; +if (process.env.ESBUILD_WORKER_THREADS !== "0") { + try { + worker_threads = require("worker_threads"); + } catch { + } + let [major, minor] = process.versions.node.split("."); + if ( + // { + if ((!ESBUILD_BINARY_PATH || false) && (path2.basename(__filename) !== "main.js" || path2.basename(__dirname) !== "lib")) { + throw new Error( + `The esbuild JavaScript API cannot be bundled. Please mark the "esbuild" package as external so it's not included in the bundle. + +More information: The file containing the code for esbuild's JavaScript API (${__filename}) does not appear to be inside the esbuild package on the file system, which usually means that the esbuild package was bundled into another file. This is problematic because the API needs to run a binary executable inside the esbuild package which is located using a relative path from the API code to the executable. If the esbuild package is bundled, the relative path will be incorrect and the executable won't be found.` + ); + } + if (false) { + return ["node", [path2.join(__dirname, "..", "bin", "esbuild")]]; + } else { + const { binPath, isWASM } = generateBinPath(); + if (isWASM) { + return ["node", [binPath]]; + } else { + return [binPath, []]; + } + } +}; +var isTTY = () => tty.isatty(2); +var fsSync = { + readFile(tempFile, callback) { + try { + let contents = fs2.readFileSync(tempFile, "utf8"); + try { + fs2.unlinkSync(tempFile); + } catch { + } + callback(null, contents); + } catch (err) { + callback(err, null); + } + }, + writeFile(contents, callback) { + try { + let tempFile = randomFileName(); + fs2.writeFileSync(tempFile, contents); + callback(tempFile); + } catch { + callback(null); + } + } +}; +var fsAsync = { + readFile(tempFile, callback) { + try { + fs2.readFile(tempFile, "utf8", (err, contents) => { + try { + fs2.unlink(tempFile, () => callback(err, contents)); + } catch { + callback(err, contents); + } + }); + } catch (err) { + callback(err, null); + } + }, + writeFile(contents, callback) { + try { + let tempFile = randomFileName(); + fs2.writeFile(tempFile, contents, (err) => err !== null ? callback(null) : callback(tempFile)); + } catch { + callback(null); + } + } +}; +var version = "0.21.5"; +var build = (options) => ensureServiceIsRunning().build(options); +var context = (buildOptions) => ensureServiceIsRunning().context(buildOptions); +var transform = (input, options) => ensureServiceIsRunning().transform(input, options); +var formatMessages = (messages, options) => ensureServiceIsRunning().formatMessages(messages, options); +var analyzeMetafile = (messages, options) => ensureServiceIsRunning().analyzeMetafile(messages, options); +var buildSync = (options) => { + if (worker_threads && !isInternalWorkerThread) { + if (!workerThreadService) workerThreadService = startWorkerThreadService(worker_threads); + return workerThreadService.buildSync(options); + } + let result; + runServiceSync((service) => service.buildOrContext({ + callName: "buildSync", + refs: null, + options, + isTTY: isTTY(), + defaultWD, + callback: (err, res) => { + if (err) throw err; + result = res; + } + })); + return result; +}; +var transformSync = (input, options) => { + if (worker_threads && !isInternalWorkerThread) { + if (!workerThreadService) workerThreadService = startWorkerThreadService(worker_threads); + return workerThreadService.transformSync(input, options); + } + let result; + runServiceSync((service) => service.transform({ + callName: "transformSync", + refs: null, + input, + options: options || {}, + isTTY: isTTY(), + fs: fsSync, + callback: (err, res) => { + if (err) throw err; + result = res; + } + })); + return result; +}; +var formatMessagesSync = (messages, options) => { + if (worker_threads && !isInternalWorkerThread) { + if (!workerThreadService) workerThreadService = startWorkerThreadService(worker_threads); + return workerThreadService.formatMessagesSync(messages, options); + } + let result; + runServiceSync((service) => service.formatMessages({ + callName: "formatMessagesSync", + refs: null, + messages, + options, + callback: (err, res) => { + if (err) throw err; + result = res; + } + })); + return result; +}; +var analyzeMetafileSync = (metafile, options) => { + if (worker_threads && !isInternalWorkerThread) { + if (!workerThreadService) workerThreadService = startWorkerThreadService(worker_threads); + return workerThreadService.analyzeMetafileSync(metafile, options); + } + let result; + runServiceSync((service) => service.analyzeMetafile({ + callName: "analyzeMetafileSync", + refs: null, + metafile: typeof metafile === "string" ? metafile : JSON.stringify(metafile), + options, + callback: (err, res) => { + if (err) throw err; + result = res; + } + })); + return result; +}; +var stop = () => { + if (stopService) stopService(); + if (workerThreadService) workerThreadService.stop(); + return Promise.resolve(); +}; +var initializeWasCalled = false; +var initialize = (options) => { + options = validateInitializeOptions(options || {}); + if (options.wasmURL) throw new Error(`The "wasmURL" option only works in the browser`); + if (options.wasmModule) throw new Error(`The "wasmModule" option only works in the browser`); + if (options.worker) throw new Error(`The "worker" option only works in the browser`); + if (initializeWasCalled) throw new Error('Cannot call "initialize" more than once'); + ensureServiceIsRunning(); + initializeWasCalled = true; + return Promise.resolve(); +}; +var defaultWD = process.cwd(); +var longLivedService; +var stopService; +var ensureServiceIsRunning = () => { + if (longLivedService) return longLivedService; + let [command, args] = esbuildCommandAndArgs(); + let child = child_process.spawn(command, args.concat(`--service=${"0.21.5"}`, "--ping"), { + windowsHide: true, + stdio: ["pipe", "pipe", "inherit"], + cwd: defaultWD + }); + let { readFromStdout, afterClose, service } = createChannel({ + writeToStdin(bytes) { + child.stdin.write(bytes, (err) => { + if (err) afterClose(err); + }); + }, + readFileSync: fs2.readFileSync, + isSync: false, + hasFS: true, + esbuild: node_exports + }); + child.stdin.on("error", afterClose); + child.on("error", afterClose); + const stdin = child.stdin; + const stdout = child.stdout; + stdout.on("data", readFromStdout); + stdout.on("end", afterClose); + stopService = () => { + stdin.destroy(); + stdout.destroy(); + child.kill(); + initializeWasCalled = false; + longLivedService = void 0; + stopService = void 0; + }; + let refCount = 0; + child.unref(); + if (stdin.unref) { + stdin.unref(); + } + if (stdout.unref) { + stdout.unref(); + } + const refs = { + ref() { + if (++refCount === 1) child.ref(); + }, + unref() { + if (--refCount === 0) child.unref(); + } + }; + longLivedService = { + build: (options) => new Promise((resolve, reject) => { + service.buildOrContext({ + callName: "build", + refs, + options, + isTTY: isTTY(), + defaultWD, + callback: (err, res) => err ? reject(err) : resolve(res) + }); + }), + context: (options) => new Promise((resolve, reject) => service.buildOrContext({ + callName: "context", + refs, + options, + isTTY: isTTY(), + defaultWD, + callback: (err, res) => err ? reject(err) : resolve(res) + })), + transform: (input, options) => new Promise((resolve, reject) => service.transform({ + callName: "transform", + refs, + input, + options: options || {}, + isTTY: isTTY(), + fs: fsAsync, + callback: (err, res) => err ? reject(err) : resolve(res) + })), + formatMessages: (messages, options) => new Promise((resolve, reject) => service.formatMessages({ + callName: "formatMessages", + refs, + messages, + options, + callback: (err, res) => err ? reject(err) : resolve(res) + })), + analyzeMetafile: (metafile, options) => new Promise((resolve, reject) => service.analyzeMetafile({ + callName: "analyzeMetafile", + refs, + metafile: typeof metafile === "string" ? metafile : JSON.stringify(metafile), + options, + callback: (err, res) => err ? reject(err) : resolve(res) + })) + }; + return longLivedService; +}; +var runServiceSync = (callback) => { + let [command, args] = esbuildCommandAndArgs(); + let stdin = new Uint8Array(); + let { readFromStdout, afterClose, service } = createChannel({ + writeToStdin(bytes) { + if (stdin.length !== 0) throw new Error("Must run at most one command"); + stdin = bytes; + }, + isSync: true, + hasFS: true, + esbuild: node_exports + }); + callback(service); + let stdout = child_process.execFileSync(command, args.concat(`--service=${"0.21.5"}`), { + cwd: defaultWD, + windowsHide: true, + input: stdin, + // We don't know how large the output could be. If it's too large, the + // command will fail with ENOBUFS. Reserve 16mb for now since that feels + // like it should be enough. Also allow overriding this with an environment + // variable. + maxBuffer: +process.env.ESBUILD_MAX_BUFFER || 16 * 1024 * 1024 + }); + readFromStdout(stdout); + afterClose(null); +}; +var randomFileName = () => { + return path2.join(os2.tmpdir(), `esbuild-${crypto.randomBytes(32).toString("hex")}`); +}; +var workerThreadService = null; +var startWorkerThreadService = (worker_threads2) => { + let { port1: mainPort, port2: workerPort } = new worker_threads2.MessageChannel(); + let worker = new worker_threads2.Worker(__filename, { + workerData: { workerPort, defaultWD, esbuildVersion: "0.21.5" }, + transferList: [workerPort], + // From node's documentation: https://nodejs.org/api/worker_threads.html + // + // Take care when launching worker threads from preload scripts (scripts loaded + // and run using the `-r` command line flag). Unless the `execArgv` option is + // explicitly set, new Worker threads automatically inherit the command line flags + // from the running process and will preload the same preload scripts as the main + // thread. If the preload script unconditionally launches a worker thread, every + // thread spawned will spawn another until the application crashes. + // + execArgv: [] + }); + let nextID = 0; + let fakeBuildError = (text) => { + let error = new Error(`Build failed with 1 error: +error: ${text}`); + let errors = [{ id: "", pluginName: "", text, location: null, notes: [], detail: void 0 }]; + error.errors = errors; + error.warnings = []; + return error; + }; + let validateBuildSyncOptions = (options) => { + if (!options) return; + let plugins = options.plugins; + if (plugins && plugins.length > 0) throw fakeBuildError(`Cannot use plugins in synchronous API calls`); + }; + let applyProperties = (object, properties) => { + for (let key in properties) { + object[key] = properties[key]; + } + }; + let runCallSync = (command, args) => { + let id = nextID++; + let sharedBuffer = new SharedArrayBuffer(8); + let sharedBufferView = new Int32Array(sharedBuffer); + let msg = { sharedBuffer, id, command, args }; + worker.postMessage(msg); + let status = Atomics.wait(sharedBufferView, 0, 0); + if (status !== "ok" && status !== "not-equal") throw new Error("Internal error: Atomics.wait() failed: " + status); + let { message: { id: id2, resolve, reject, properties } } = worker_threads2.receiveMessageOnPort(mainPort); + if (id !== id2) throw new Error(`Internal error: Expected id ${id} but got id ${id2}`); + if (reject) { + applyProperties(reject, properties); + throw reject; + } + return resolve; + }; + worker.unref(); + return { + buildSync(options) { + validateBuildSyncOptions(options); + return runCallSync("build", [options]); + }, + transformSync(input, options) { + return runCallSync("transform", [input, options]); + }, + formatMessagesSync(messages, options) { + return runCallSync("formatMessages", [messages, options]); + }, + analyzeMetafileSync(metafile, options) { + return runCallSync("analyzeMetafile", [metafile, options]); + }, + stop() { + worker.terminate(); + workerThreadService = null; + } + }; +}; +var startSyncServiceWorker = () => { + let workerPort = worker_threads.workerData.workerPort; + let parentPort = worker_threads.parentPort; + let extractProperties = (object) => { + let properties = {}; + if (object && typeof object === "object") { + for (let key in object) { + properties[key] = object[key]; + } + } + return properties; + }; + try { + let service = ensureServiceIsRunning(); + defaultWD = worker_threads.workerData.defaultWD; + parentPort.on("message", (msg) => { + (async () => { + let { sharedBuffer, id, command, args } = msg; + let sharedBufferView = new Int32Array(sharedBuffer); + try { + switch (command) { + case "build": + workerPort.postMessage({ id, resolve: await service.build(args[0]) }); + break; + case "transform": + workerPort.postMessage({ id, resolve: await service.transform(args[0], args[1]) }); + break; + case "formatMessages": + workerPort.postMessage({ id, resolve: await service.formatMessages(args[0], args[1]) }); + break; + case "analyzeMetafile": + workerPort.postMessage({ id, resolve: await service.analyzeMetafile(args[0], args[1]) }); + break; + default: + throw new Error(`Invalid command: ${command}`); + } + } catch (reject) { + workerPort.postMessage({ id, reject, properties: extractProperties(reject) }); + } + Atomics.add(sharedBufferView, 0, 1); + Atomics.notify(sharedBufferView, 0, Infinity); + })(); + }); + } catch (reject) { + parentPort.on("message", (msg) => { + let { sharedBuffer, id } = msg; + let sharedBufferView = new Int32Array(sharedBuffer); + workerPort.postMessage({ id, reject, properties: extractProperties(reject) }); + Atomics.add(sharedBufferView, 0, 1); + Atomics.notify(sharedBufferView, 0, Infinity); + }); + } +}; +if (isInternalWorkerThread) { + startSyncServiceWorker(); +} +var node_default = node_exports; +// Annotate the CommonJS export names for ESM import in node: +0 && (module.exports = { + analyzeMetafile, + analyzeMetafileSync, + build, + buildSync, + context, + formatMessages, + formatMessagesSync, + initialize, + stop, + transform, + transformSync, + version +}); diff --git a/node_modules/esbuild/package.json b/node_modules/esbuild/package.json new file mode 100644 index 0000000..fe253fb --- /dev/null +++ b/node_modules/esbuild/package.json @@ -0,0 +1,46 @@ +{ + "name": "esbuild", + "version": "0.21.5", + "description": "An extremely fast JavaScript and CSS bundler and minifier.", + "repository": { + "type": "git", + "url": "git+https://github.com/evanw/esbuild.git" + }, + "scripts": { + "postinstall": "node install.js" + }, + "main": "lib/main.js", + "types": "lib/main.d.ts", + "engines": { + "node": ">=12" + }, + "bin": { + "esbuild": "bin/esbuild" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + }, + "license": "MIT" +} diff --git a/node_modules/fsevents/LICENSE b/node_modules/fsevents/LICENSE new file mode 100644 index 0000000..5d70441 --- /dev/null +++ b/node_modules/fsevents/LICENSE @@ -0,0 +1,22 @@ +MIT License +----------- + +Copyright (C) 2010-2020 by Philipp Dunkel, Ben Noordhuis, Elan Shankar, Paul Miller + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/fsevents/README.md b/node_modules/fsevents/README.md new file mode 100644 index 0000000..50373a0 --- /dev/null +++ b/node_modules/fsevents/README.md @@ -0,0 +1,89 @@ +# fsevents + +Native access to MacOS FSEvents in [Node.js](https://nodejs.org/) + +The FSEvents API in MacOS allows applications to register for notifications of +changes to a given directory tree. It is a very fast and lightweight alternative +to kqueue. + +This is a low-level library. For a cross-platform file watching module that +uses fsevents, check out [Chokidar](https://github.com/paulmillr/chokidar). + +## Usage + +```sh +npm install fsevents +``` + +Supports only **Node.js v8.16 and higher**. + +```js +const fsevents = require('fsevents'); + +// To start observation +const stop = fsevents.watch(__dirname, (path, flags, id) => { + const info = fsevents.getInfo(path, flags); +}); + +// To end observation +stop(); +``` + +> **Important note:** The API behaviour is slightly different from typical JS APIs. The `stop` function **must** be +> retrieved and stored somewhere, even if you don't plan to stop the watcher. If you forget it, the garbage collector +> will eventually kick in, the watcher will be unregistered, and your callbacks won't be called anymore. + +The callback passed as the second parameter to `.watch` get's called whenever the operating system detects a +a change in the file system. It takes three arguments: + +###### `fsevents.watch(dirname: string, (path: string, flags: number, id: string) => void): () => Promise` + + * `path: string` - the item in the filesystem that have been changed + * `flags: number` - a numeric value describing what the change was + * `id: string` - an unique-id identifying this specific event + + Returns closer callback which when called returns a Promise resolving when the watcher process has been shut down. + +###### `fsevents.getInfo(path: string, flags: number, id: string): FsEventInfo` + +The `getInfo` function takes the `path`, `flags` and `id` arguments and converts those parameters into a structure +that is easier to digest to determine what the change was. + +The `FsEventsInfo` has the following shape: + +```js +/** + * @typedef {'created'|'modified'|'deleted'|'moved'|'root-changed'|'cloned'|'unknown'} FsEventsEvent + * @typedef {'file'|'directory'|'symlink'} FsEventsType + */ +{ + "event": "created", // {FsEventsEvent} + "path": "file.txt", + "type": "file", // {FsEventsType} + "changes": { + "inode": true, // Had iNode Meta-Information changed + "finder": false, // Had Finder Meta-Data changed + "access": false, // Had access permissions changed + "xattrs": false // Had xAttributes changed + }, + "flags": 0x100000000 +} +``` + +## Changelog + +- v2.3 supports Apple Silicon ARM CPUs +- v2 supports node 8.16+ and reduces package size massively +- v1.2.8 supports node 6+ +- v1.2.7 supports node 4+ + +## Troubleshooting + +- I'm getting `EBADPLATFORM` `Unsupported platform for fsevents` error. +- It's fine, nothing is broken. fsevents is macos-only. Other platforms are skipped. If you want to hide this warning, report a bug to NPM bugtracker asking them to hide ebadplatform warnings by default. + +## License + +The MIT License Copyright (C) 2010-2020 by Philipp Dunkel, Ben Noordhuis, Elan Shankar, Paul Miller — see LICENSE file. + +Visit our [GitHub page](https://github.com/fsevents/fsevents) and [NPM Page](https://npmjs.org/package/fsevents) diff --git a/node_modules/fsevents/fsevents.d.ts b/node_modules/fsevents/fsevents.d.ts new file mode 100644 index 0000000..2723c04 --- /dev/null +++ b/node_modules/fsevents/fsevents.d.ts @@ -0,0 +1,46 @@ +declare type Event = "created" | "cloned" | "modified" | "deleted" | "moved" | "root-changed" | "unknown"; +declare type Type = "file" | "directory" | "symlink"; +declare type FileChanges = { + inode: boolean; + finder: boolean; + access: boolean; + xattrs: boolean; +}; +declare type Info = { + event: Event; + path: string; + type: Type; + changes: FileChanges; + flags: number; +}; +declare type WatchHandler = (path: string, flags: number, id: string) => void; +export declare function watch(path: string, handler: WatchHandler): () => Promise; +export declare function watch(path: string, since: number, handler: WatchHandler): () => Promise; +export declare function getInfo(path: string, flags: number): Info; +export declare const constants: { + None: 0x00000000; + MustScanSubDirs: 0x00000001; + UserDropped: 0x00000002; + KernelDropped: 0x00000004; + EventIdsWrapped: 0x00000008; + HistoryDone: 0x00000010; + RootChanged: 0x00000020; + Mount: 0x00000040; + Unmount: 0x00000080; + ItemCreated: 0x00000100; + ItemRemoved: 0x00000200; + ItemInodeMetaMod: 0x00000400; + ItemRenamed: 0x00000800; + ItemModified: 0x00001000; + ItemFinderInfoMod: 0x00002000; + ItemChangeOwner: 0x00004000; + ItemXattrMod: 0x00008000; + ItemIsFile: 0x00010000; + ItemIsDir: 0x00020000; + ItemIsSymlink: 0x00040000; + ItemIsHardlink: 0x00100000; + ItemIsLastHardlink: 0x00200000; + OwnEvent: 0x00080000; + ItemCloned: 0x00400000; +}; +export {}; diff --git a/node_modules/fsevents/fsevents.js b/node_modules/fsevents/fsevents.js new file mode 100644 index 0000000..198da98 --- /dev/null +++ b/node_modules/fsevents/fsevents.js @@ -0,0 +1,83 @@ +/* + ** © 2020 by Philipp Dunkel, Ben Noordhuis, Elan Shankar, Paul Miller + ** Licensed under MIT License. + */ + +/* jshint node:true */ +"use strict"; + +if (process.platform !== "darwin") { + throw new Error(`Module 'fsevents' is not compatible with platform '${process.platform}'`); +} + +const Native = require("./fsevents.node"); +const events = Native.constants; + +function watch(path, since, handler) { + if (typeof path !== "string") { + throw new TypeError(`fsevents argument 1 must be a string and not a ${typeof path}`); + } + if ("function" === typeof since && "undefined" === typeof handler) { + handler = since; + since = Native.flags.SinceNow; + } + if (typeof since !== "number") { + throw new TypeError(`fsevents argument 2 must be a number and not a ${typeof since}`); + } + if (typeof handler !== "function") { + throw new TypeError(`fsevents argument 3 must be a function and not a ${typeof handler}`); + } + + let instance = Native.start(Native.global, path, since, handler); + if (!instance) throw new Error(`could not watch: ${path}`); + return () => { + const result = instance ? Promise.resolve(instance).then(Native.stop) : Promise.resolve(undefined); + instance = undefined; + return result; + }; +} + +function getInfo(path, flags) { + return { + path, + flags, + event: getEventType(flags), + type: getFileType(flags), + changes: getFileChanges(flags), + }; +} + +function getFileType(flags) { + if (events.ItemIsFile & flags) return "file"; + if (events.ItemIsDir & flags) return "directory"; + if (events.MustScanSubDirs & flags) return "directory"; + if (events.ItemIsSymlink & flags) return "symlink"; +} +function anyIsTrue(obj) { + for (let key in obj) { + if (obj[key]) return true; + } + return false; +} +function getEventType(flags) { + if (events.ItemRemoved & flags) return "deleted"; + if (events.ItemRenamed & flags) return "moved"; + if (events.ItemCreated & flags) return "created"; + if (events.ItemModified & flags) return "modified"; + if (events.RootChanged & flags) return "root-changed"; + if (events.ItemCloned & flags) return "cloned"; + if (anyIsTrue(flags)) return "modified"; + return "unknown"; +} +function getFileChanges(flags) { + return { + inode: !!(events.ItemInodeMetaMod & flags), + finder: !!(events.ItemFinderInfoMod & flags), + access: !!(events.ItemChangeOwner & flags), + xattrs: !!(events.ItemXattrMod & flags), + }; +} + +exports.watch = watch; +exports.getInfo = getInfo; +exports.constants = events; diff --git a/node_modules/fsevents/fsevents.node b/node_modules/fsevents/fsevents.node new file mode 100755 index 0000000..1cc3345 Binary files /dev/null and b/node_modules/fsevents/fsevents.node differ diff --git a/node_modules/fsevents/package.json b/node_modules/fsevents/package.json new file mode 100644 index 0000000..5d0ee15 --- /dev/null +++ b/node_modules/fsevents/package.json @@ -0,0 +1,62 @@ +{ + "name": "fsevents", + "version": "2.3.3", + "description": "Native Access to MacOS FSEvents", + "main": "fsevents.js", + "types": "fsevents.d.ts", + "os": [ + "darwin" + ], + "files": [ + "fsevents.d.ts", + "fsevents.js", + "fsevents.node" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + }, + "scripts": { + "clean": "node-gyp clean && rm -f fsevents.node", + "build": "node-gyp clean && rm -f fsevents.node && node-gyp rebuild && node-gyp clean", + "test": "/bin/bash ./test.sh 2>/dev/null", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "https://github.com/fsevents/fsevents.git" + }, + "keywords": [ + "fsevents", + "mac" + ], + "contributors": [ + { + "name": "Philipp Dunkel", + "email": "pip@pipobscure.com" + }, + { + "name": "Ben Noordhuis", + "email": "info@bnoordhuis.nl" + }, + { + "name": "Elan Shankar", + "email": "elan.shanker@gmail.com" + }, + { + "name": "Miroslav Bajtoš", + "email": "mbajtoss@gmail.com" + }, + { + "name": "Paul Miller", + "url": "https://paulmillr.com" + } + ], + "license": "MIT", + "bugs": { + "url": "https://github.com/fsevents/fsevents/issues" + }, + "homepage": "https://github.com/fsevents/fsevents", + "devDependencies": { + "node-gyp": "^9.4.0" + } +} diff --git a/node_modules/nanoid/LICENSE b/node_modules/nanoid/LICENSE new file mode 100644 index 0000000..37f56aa --- /dev/null +++ b/node_modules/nanoid/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright 2017 Andrey Sitnik + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/nanoid/README.md b/node_modules/nanoid/README.md new file mode 100644 index 0000000..35abb57 --- /dev/null +++ b/node_modules/nanoid/README.md @@ -0,0 +1,39 @@ +# Nano ID + +Nano ID logo by Anton Lovchikov + +**English** | [Русский](./README.ru.md) | [简体中文](./README.zh-CN.md) | [Bahasa Indonesia](./README.id-ID.md) + +A tiny, secure, URL-friendly, unique string ID generator for JavaScript. + +> “An amazing level of senseless perfectionism, +> which is simply impossible not to respect.” + +* **Small.** 130 bytes (minified and gzipped). No dependencies. + [Size Limit] controls the size. +* **Fast.** It is 2 times faster than UUID. +* **Safe.** It uses hardware random generator. Can be used in clusters. +* **Short IDs.** It uses a larger alphabet than UUID (`A-Za-z0-9_-`). + So ID size was reduced from 36 to 21 symbols. +* **Portable.** Nano ID was ported + to [20 programming languages](#other-programming-languages). + +```js +import { nanoid } from 'nanoid' +model.id = nanoid() //=> "V1StGXR8_Z5jdHi6B-myT" +``` + +Supports modern browsers, IE [with Babel], Node.js and React Native. + +[online tool]: https://gitpod.io/#https://github.com/ai/nanoid/ +[with Babel]: https://developer.epages.com/blog/coding/how-to-transpile-node-modules-with-babel-and-webpack-in-a-monorepo/ +[Size Limit]: https://github.com/ai/size-limit + + + Sponsored by Evil Martians + + +## Docs +Read full docs **[here](https://github.com/ai/nanoid#readme)**. diff --git a/node_modules/nanoid/async/index.browser.cjs b/node_modules/nanoid/async/index.browser.cjs new file mode 100644 index 0000000..7e5bba8 --- /dev/null +++ b/node_modules/nanoid/async/index.browser.cjs @@ -0,0 +1,34 @@ +let random = async bytes => crypto.getRandomValues(new Uint8Array(bytes)) +let customAlphabet = (alphabet, defaultSize = 21) => { + let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 + let step = -~((1.6 * mask * defaultSize) / alphabet.length) + return async (size = defaultSize) => { + let id = '' + while (true) { + let bytes = crypto.getRandomValues(new Uint8Array(step)) + let i = step + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + } + } +} +let nanoid = async (size = 21) => { + let id = '' + let bytes = crypto.getRandomValues(new Uint8Array(size)) + while (size--) { + let byte = bytes[size] & 63 + if (byte < 36) { + id += byte.toString(36) + } else if (byte < 62) { + id += (byte - 26).toString(36).toUpperCase() + } else if (byte < 63) { + id += '_' + } else { + id += '-' + } + } + return id +} +module.exports = { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.browser.js b/node_modules/nanoid/async/index.browser.js new file mode 100644 index 0000000..5ece04d --- /dev/null +++ b/node_modules/nanoid/async/index.browser.js @@ -0,0 +1,34 @@ +let random = async bytes => crypto.getRandomValues(new Uint8Array(bytes)) +let customAlphabet = (alphabet, defaultSize = 21) => { + let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 + let step = -~((1.6 * mask * defaultSize) / alphabet.length) + return async (size = defaultSize) => { + let id = '' + while (true) { + let bytes = crypto.getRandomValues(new Uint8Array(step)) + let i = step + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + } + } +} +let nanoid = async (size = 21) => { + let id = '' + let bytes = crypto.getRandomValues(new Uint8Array(size)) + while (size--) { + let byte = bytes[size] & 63 + if (byte < 36) { + id += byte.toString(36) + } else if (byte < 62) { + id += (byte - 26).toString(36).toUpperCase() + } else if (byte < 63) { + id += '_' + } else { + id += '-' + } + } + return id +} +export { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.cjs b/node_modules/nanoid/async/index.cjs new file mode 100644 index 0000000..50db105 --- /dev/null +++ b/node_modules/nanoid/async/index.cjs @@ -0,0 +1,35 @@ +let crypto = require('crypto') +let { urlAlphabet } = require('../url-alphabet/index.cjs') +let random = bytes => + new Promise((resolve, reject) => { + crypto.randomFill(Buffer.allocUnsafe(bytes), (err, buf) => { + if (err) { + reject(err) + } else { + resolve(buf) + } + }) + }) +let customAlphabet = (alphabet, defaultSize = 21) => { + let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 + let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) + let tick = (id, size = defaultSize) => + random(step).then(bytes => { + let i = step + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + return tick(id, size) + }) + return size => tick('', size) +} +let nanoid = (size = 21) => + random(size).then(bytes => { + let id = '' + while (size--) { + id += urlAlphabet[bytes[size] & 63] + } + return id + }) +module.exports = { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.d.ts b/node_modules/nanoid/async/index.d.ts new file mode 100644 index 0000000..9e91965 --- /dev/null +++ b/node_modules/nanoid/async/index.d.ts @@ -0,0 +1,56 @@ +/** + * Generate secure URL-friendly unique ID. The non-blocking version. + * + * By default, the ID will have 21 symbols to have a collision probability + * similar to UUID v4. + * + * ```js + * import { nanoid } from 'nanoid/async' + * nanoid().then(id => { + * model.id = id + * }) + * ``` + * + * @param size Size of the ID. The default size is 21. + * @returns A promise with a random string. + */ +export function nanoid(size?: number): Promise + +/** + * A low-level function. + * Generate secure unique ID with custom alphabet. The non-blocking version. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * @param alphabet Alphabet used to generate the ID. + * @param defaultSize Size of the ID. The default size is 21. + * @returns A function that returns a promise with a random string. + * + * ```js + * import { customAlphabet } from 'nanoid/async' + * const nanoid = customAlphabet('0123456789абвгдеё', 5) + * nanoid().then(id => { + * model.id = id //=> "8ё56а" + * }) + * ``` + */ +export function customAlphabet( + alphabet: string, + defaultSize?: number +): (size?: number) => Promise + +/** + * Generate an array of random bytes collected from hardware noise. + * + * ```js + * import { random } from 'nanoid/async' + * random(5).then(bytes => { + * bytes //=> [10, 67, 212, 67, 89] + * }) + * ``` + * + * @param bytes Size of the array. + * @returns A promise with a random bytes array. + */ +export function random(bytes: number): Promise diff --git a/node_modules/nanoid/async/index.js b/node_modules/nanoid/async/index.js new file mode 100644 index 0000000..803fad6 --- /dev/null +++ b/node_modules/nanoid/async/index.js @@ -0,0 +1,35 @@ +import crypto from 'crypto' +import { urlAlphabet } from '../url-alphabet/index.js' +let random = bytes => + new Promise((resolve, reject) => { + crypto.randomFill(Buffer.allocUnsafe(bytes), (err, buf) => { + if (err) { + reject(err) + } else { + resolve(buf) + } + }) + }) +let customAlphabet = (alphabet, defaultSize = 21) => { + let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 + let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) + let tick = (id, size = defaultSize) => + random(step).then(bytes => { + let i = step + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + return tick(id, size) + }) + return size => tick('', size) +} +let nanoid = (size = 21) => + random(size).then(bytes => { + let id = '' + while (size--) { + id += urlAlphabet[bytes[size] & 63] + } + return id + }) +export { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/index.native.js b/node_modules/nanoid/async/index.native.js new file mode 100644 index 0000000..5cb3d57 --- /dev/null +++ b/node_modules/nanoid/async/index.native.js @@ -0,0 +1,26 @@ +import { getRandomBytesAsync } from 'expo-random' +import { urlAlphabet } from '../url-alphabet/index.js' +let random = getRandomBytesAsync +let customAlphabet = (alphabet, defaultSize = 21) => { + let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 + let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) + let tick = (id, size = defaultSize) => + random(step).then(bytes => { + let i = step + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + return tick(id, size) + }) + return size => tick('', size) +} +let nanoid = (size = 21) => + random(size).then(bytes => { + let id = '' + while (size--) { + id += urlAlphabet[bytes[size] & 63] + } + return id + }) +export { nanoid, customAlphabet, random } diff --git a/node_modules/nanoid/async/package.json b/node_modules/nanoid/async/package.json new file mode 100644 index 0000000..578cdb4 --- /dev/null +++ b/node_modules/nanoid/async/package.json @@ -0,0 +1,12 @@ +{ + "type": "module", + "main": "index.cjs", + "module": "index.js", + "react-native": { + "./index.js": "./index.native.js" + }, + "browser": { + "./index.js": "./index.browser.js", + "./index.cjs": "./index.browser.cjs" + } +} \ No newline at end of file diff --git a/node_modules/nanoid/bin/nanoid.cjs b/node_modules/nanoid/bin/nanoid.cjs new file mode 100755 index 0000000..c76db0f --- /dev/null +++ b/node_modules/nanoid/bin/nanoid.cjs @@ -0,0 +1,55 @@ +#!/usr/bin/env node + +let { nanoid, customAlphabet } = require('..') + +function print(msg) { + process.stdout.write(msg + '\n') +} + +function error(msg) { + process.stderr.write(msg + '\n') + process.exit(1) +} + +if (process.argv.includes('--help') || process.argv.includes('-h')) { + print(` + Usage + $ nanoid [options] + + Options + -s, --size Generated ID size + -a, --alphabet Alphabet to use + -h, --help Show this help + + Examples + $ nanoid --s 15 + S9sBF77U6sDB8Yg + + $ nanoid --size 10 --alphabet abc + bcabababca`) + process.exit() +} + +let alphabet, size +for (let i = 2; i < process.argv.length; i++) { + let arg = process.argv[i] + if (arg === '--size' || arg === '-s') { + size = Number(process.argv[i + 1]) + i += 1 + if (Number.isNaN(size) || size <= 0) { + error('Size must be positive integer') + } + } else if (arg === '--alphabet' || arg === '-a') { + alphabet = process.argv[i + 1] + i += 1 + } else { + error('Unknown argument ' + arg) + } +} + +if (alphabet) { + let customNanoid = customAlphabet(alphabet, size) + print(customNanoid()) +} else { + print(nanoid(size)) +} diff --git a/node_modules/nanoid/index.browser.cjs b/node_modules/nanoid/index.browser.cjs new file mode 100644 index 0000000..f800d6f --- /dev/null +++ b/node_modules/nanoid/index.browser.cjs @@ -0,0 +1,34 @@ +let { urlAlphabet } = require('./url-alphabet/index.cjs') +let random = bytes => crypto.getRandomValues(new Uint8Array(bytes)) +let customRandom = (alphabet, defaultSize, getRandom) => { + let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 + let step = -~((1.6 * mask * defaultSize) / alphabet.length) + return (size = defaultSize) => { + let id = '' + while (true) { + let bytes = getRandom(step) + let j = step + while (j--) { + id += alphabet[bytes[j] & mask] || '' + if (id.length === size) return id + } + } + } +} +let customAlphabet = (alphabet, size = 21) => + customRandom(alphabet, size, random) +let nanoid = (size = 21) => + crypto.getRandomValues(new Uint8Array(size)).reduce((id, byte) => { + byte &= 63 + if (byte < 36) { + id += byte.toString(36) + } else if (byte < 62) { + id += (byte - 26).toString(36).toUpperCase() + } else if (byte > 62) { + id += '-' + } else { + id += '_' + } + return id + }, '') +module.exports = { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/index.browser.js b/node_modules/nanoid/index.browser.js new file mode 100644 index 0000000..8b3139b --- /dev/null +++ b/node_modules/nanoid/index.browser.js @@ -0,0 +1,34 @@ +import { urlAlphabet } from './url-alphabet/index.js' +let random = bytes => crypto.getRandomValues(new Uint8Array(bytes)) +let customRandom = (alphabet, defaultSize, getRandom) => { + let mask = (2 << (Math.log(alphabet.length - 1) / Math.LN2)) - 1 + let step = -~((1.6 * mask * defaultSize) / alphabet.length) + return (size = defaultSize) => { + let id = '' + while (true) { + let bytes = getRandom(step) + let j = step + while (j--) { + id += alphabet[bytes[j] & mask] || '' + if (id.length === size) return id + } + } + } +} +let customAlphabet = (alphabet, size = 21) => + customRandom(alphabet, size, random) +let nanoid = (size = 21) => + crypto.getRandomValues(new Uint8Array(size)).reduce((id, byte) => { + byte &= 63 + if (byte < 36) { + id += byte.toString(36) + } else if (byte < 62) { + id += (byte - 26).toString(36).toUpperCase() + } else if (byte > 62) { + id += '-' + } else { + id += '_' + } + return id + }, '') +export { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/index.cjs b/node_modules/nanoid/index.cjs new file mode 100644 index 0000000..0fa85e9 --- /dev/null +++ b/node_modules/nanoid/index.cjs @@ -0,0 +1,45 @@ +let crypto = require('crypto') +let { urlAlphabet } = require('./url-alphabet/index.cjs') +const POOL_SIZE_MULTIPLIER = 128 +let pool, poolOffset +let fillPool = bytes => { + if (!pool || pool.length < bytes) { + pool = Buffer.allocUnsafe(bytes * POOL_SIZE_MULTIPLIER) + crypto.randomFillSync(pool) + poolOffset = 0 + } else if (poolOffset + bytes > pool.length) { + crypto.randomFillSync(pool) + poolOffset = 0 + } + poolOffset += bytes +} +let random = bytes => { + fillPool((bytes -= 0)) + return pool.subarray(poolOffset - bytes, poolOffset) +} +let customRandom = (alphabet, defaultSize, getRandom) => { + let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 + let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) + return (size = defaultSize) => { + let id = '' + while (true) { + let bytes = getRandom(step) + let i = step + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + } + } +} +let customAlphabet = (alphabet, size = 21) => + customRandom(alphabet, size, random) +let nanoid = (size = 21) => { + fillPool((size -= 0)) + let id = '' + for (let i = poolOffset - size; i < poolOffset; i++) { + id += urlAlphabet[pool[i] & 63] + } + return id +} +module.exports = { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/index.d.cts b/node_modules/nanoid/index.d.cts new file mode 100644 index 0000000..3e111a3 --- /dev/null +++ b/node_modules/nanoid/index.d.cts @@ -0,0 +1,91 @@ +/** + * Generate secure URL-friendly unique ID. + * + * By default, the ID will have 21 symbols to have a collision probability + * similar to UUID v4. + * + * ```js + * import { nanoid } from 'nanoid' + * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" + * ``` + * + * @param size Size of the ID. The default size is 21. + * @returns A random string. + */ +export function nanoid(size?: number): string + +/** + * Generate secure unique ID with custom alphabet. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * @param alphabet Alphabet used to generate the ID. + * @param defaultSize Size of the ID. The default size is 21. + * @returns A random string generator. + * + * ```js + * const { customAlphabet } = require('nanoid') + * const nanoid = customAlphabet('0123456789абвгдеё', 5) + * nanoid() //=> "8ё56а" + * ``` + */ +export function customAlphabet( + alphabet: string, + defaultSize?: number +): (size?: number) => string + +/** + * Generate unique ID with custom random generator and alphabet. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * ```js + * import { customRandom } from 'nanoid/format' + * + * const nanoid = customRandom('abcdef', 5, size => { + * const random = [] + * for (let i = 0; i < size; i++) { + * random.push(randomByte()) + * } + * return random + * }) + * + * nanoid() //=> "fbaef" + * ``` + * + * @param alphabet Alphabet used to generate a random string. + * @param size Size of the random string. + * @param random A random bytes generator. + * @returns A random string generator. + */ +export function customRandom( + alphabet: string, + size: number, + random: (bytes: number) => Uint8Array +): () => string + +/** + * URL safe symbols. + * + * ```js + * import { urlAlphabet } from 'nanoid' + * const nanoid = customAlphabet(urlAlphabet, 10) + * nanoid() //=> "Uakgb_J5m9" + * ``` + */ +export const urlAlphabet: string + +/** + * Generate an array of random bytes collected from hardware noise. + * + * ```js + * import { customRandom, random } from 'nanoid' + * const nanoid = customRandom("abcdef", 5, random) + * ``` + * + * @param bytes Size of the array. + * @returns An array of random bytes. + */ +export function random(bytes: number): Uint8Array diff --git a/node_modules/nanoid/index.d.ts b/node_modules/nanoid/index.d.ts new file mode 100644 index 0000000..3e111a3 --- /dev/null +++ b/node_modules/nanoid/index.d.ts @@ -0,0 +1,91 @@ +/** + * Generate secure URL-friendly unique ID. + * + * By default, the ID will have 21 symbols to have a collision probability + * similar to UUID v4. + * + * ```js + * import { nanoid } from 'nanoid' + * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" + * ``` + * + * @param size Size of the ID. The default size is 21. + * @returns A random string. + */ +export function nanoid(size?: number): string + +/** + * Generate secure unique ID with custom alphabet. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * @param alphabet Alphabet used to generate the ID. + * @param defaultSize Size of the ID. The default size is 21. + * @returns A random string generator. + * + * ```js + * const { customAlphabet } = require('nanoid') + * const nanoid = customAlphabet('0123456789абвгдеё', 5) + * nanoid() //=> "8ё56а" + * ``` + */ +export function customAlphabet( + alphabet: string, + defaultSize?: number +): (size?: number) => string + +/** + * Generate unique ID with custom random generator and alphabet. + * + * Alphabet must contain 256 symbols or less. Otherwise, the generator + * will not be secure. + * + * ```js + * import { customRandom } from 'nanoid/format' + * + * const nanoid = customRandom('abcdef', 5, size => { + * const random = [] + * for (let i = 0; i < size; i++) { + * random.push(randomByte()) + * } + * return random + * }) + * + * nanoid() //=> "fbaef" + * ``` + * + * @param alphabet Alphabet used to generate a random string. + * @param size Size of the random string. + * @param random A random bytes generator. + * @returns A random string generator. + */ +export function customRandom( + alphabet: string, + size: number, + random: (bytes: number) => Uint8Array +): () => string + +/** + * URL safe symbols. + * + * ```js + * import { urlAlphabet } from 'nanoid' + * const nanoid = customAlphabet(urlAlphabet, 10) + * nanoid() //=> "Uakgb_J5m9" + * ``` + */ +export const urlAlphabet: string + +/** + * Generate an array of random bytes collected from hardware noise. + * + * ```js + * import { customRandom, random } from 'nanoid' + * const nanoid = customRandom("abcdef", 5, random) + * ``` + * + * @param bytes Size of the array. + * @returns An array of random bytes. + */ +export function random(bytes: number): Uint8Array diff --git a/node_modules/nanoid/index.js b/node_modules/nanoid/index.js new file mode 100644 index 0000000..21e155f --- /dev/null +++ b/node_modules/nanoid/index.js @@ -0,0 +1,45 @@ +import crypto from 'crypto' +import { urlAlphabet } from './url-alphabet/index.js' +const POOL_SIZE_MULTIPLIER = 128 +let pool, poolOffset +let fillPool = bytes => { + if (!pool || pool.length < bytes) { + pool = Buffer.allocUnsafe(bytes * POOL_SIZE_MULTIPLIER) + crypto.randomFillSync(pool) + poolOffset = 0 + } else if (poolOffset + bytes > pool.length) { + crypto.randomFillSync(pool) + poolOffset = 0 + } + poolOffset += bytes +} +let random = bytes => { + fillPool((bytes -= 0)) + return pool.subarray(poolOffset - bytes, poolOffset) +} +let customRandom = (alphabet, defaultSize, getRandom) => { + let mask = (2 << (31 - Math.clz32((alphabet.length - 1) | 1))) - 1 + let step = Math.ceil((1.6 * mask * defaultSize) / alphabet.length) + return (size = defaultSize) => { + let id = '' + while (true) { + let bytes = getRandom(step) + let i = step + while (i--) { + id += alphabet[bytes[i] & mask] || '' + if (id.length === size) return id + } + } + } +} +let customAlphabet = (alphabet, size = 21) => + customRandom(alphabet, size, random) +let nanoid = (size = 21) => { + fillPool((size -= 0)) + let id = '' + for (let i = poolOffset - size; i < poolOffset; i++) { + id += urlAlphabet[pool[i] & 63] + } + return id +} +export { nanoid, customAlphabet, customRandom, urlAlphabet, random } diff --git a/node_modules/nanoid/nanoid.js b/node_modules/nanoid/nanoid.js new file mode 100644 index 0000000..ec242ea --- /dev/null +++ b/node_modules/nanoid/nanoid.js @@ -0,0 +1 @@ +export let nanoid=(t=21)=>crypto.getRandomValues(new Uint8Array(t)).reduce(((t,e)=>t+=(e&=63)<36?e.toString(36):e<62?(e-26).toString(36).toUpperCase():e<63?"_":"-"),""); \ No newline at end of file diff --git a/node_modules/nanoid/non-secure/index.cjs b/node_modules/nanoid/non-secure/index.cjs new file mode 100644 index 0000000..09d57cd --- /dev/null +++ b/node_modules/nanoid/non-secure/index.cjs @@ -0,0 +1,21 @@ +let urlAlphabet = + 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' +let customAlphabet = (alphabet, defaultSize = 21) => { + return (size = defaultSize) => { + let id = '' + let i = size + while (i--) { + id += alphabet[(Math.random() * alphabet.length) | 0] + } + return id + } +} +let nanoid = (size = 21) => { + let id = '' + let i = size + while (i--) { + id += urlAlphabet[(Math.random() * 64) | 0] + } + return id +} +module.exports = { nanoid, customAlphabet } diff --git a/node_modules/nanoid/non-secure/index.d.ts b/node_modules/nanoid/non-secure/index.d.ts new file mode 100644 index 0000000..4965322 --- /dev/null +++ b/node_modules/nanoid/non-secure/index.d.ts @@ -0,0 +1,33 @@ +/** + * Generate URL-friendly unique ID. This method uses the non-secure + * predictable random generator with bigger collision probability. + * + * ```js + * import { nanoid } from 'nanoid/non-secure' + * model.id = nanoid() //=> "Uakgb_J5m9g-0JDMbcJqL" + * ``` + * + * @param size Size of the ID. The default size is 21. + * @returns A random string. + */ +export function nanoid(size?: number): string + +/** + * Generate a unique ID based on a custom alphabet. + * This method uses the non-secure predictable random generator + * with bigger collision probability. + * + * @param alphabet Alphabet used to generate the ID. + * @param defaultSize Size of the ID. The default size is 21. + * @returns A random string generator. + * + * ```js + * import { customAlphabet } from 'nanoid/non-secure' + * const nanoid = customAlphabet('0123456789абвгдеё', 5) + * model.id = //=> "8ё56а" + * ``` + */ +export function customAlphabet( + alphabet: string, + defaultSize?: number +): (size?: number) => string diff --git a/node_modules/nanoid/non-secure/index.js b/node_modules/nanoid/non-secure/index.js new file mode 100644 index 0000000..e7e19ad --- /dev/null +++ b/node_modules/nanoid/non-secure/index.js @@ -0,0 +1,21 @@ +let urlAlphabet = + 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' +let customAlphabet = (alphabet, defaultSize = 21) => { + return (size = defaultSize) => { + let id = '' + let i = size + while (i--) { + id += alphabet[(Math.random() * alphabet.length) | 0] + } + return id + } +} +let nanoid = (size = 21) => { + let id = '' + let i = size + while (i--) { + id += urlAlphabet[(Math.random() * 64) | 0] + } + return id +} +export { nanoid, customAlphabet } diff --git a/node_modules/nanoid/non-secure/package.json b/node_modules/nanoid/non-secure/package.json new file mode 100644 index 0000000..9930d6a --- /dev/null +++ b/node_modules/nanoid/non-secure/package.json @@ -0,0 +1,6 @@ +{ + "type": "module", + "main": "index.cjs", + "module": "index.js", + "react-native": "index.js" +} \ No newline at end of file diff --git a/node_modules/nanoid/package.json b/node_modules/nanoid/package.json new file mode 100644 index 0000000..4f24d96 --- /dev/null +++ b/node_modules/nanoid/package.json @@ -0,0 +1,88 @@ +{ + "name": "nanoid", + "version": "3.3.7", + "description": "A tiny (116 bytes), secure URL-friendly unique string ID generator", + "keywords": [ + "uuid", + "random", + "id", + "url" + ], + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "author": "Andrey Sitnik ", + "license": "MIT", + "repository": "ai/nanoid", + "browser": { + "./index.js": "./index.browser.js", + "./async/index.js": "./async/index.browser.js", + "./async/index.cjs": "./async/index.browser.cjs", + "./index.cjs": "./index.browser.cjs" + }, + "react-native": "index.js", + "bin": "./bin/nanoid.cjs", + "sideEffects": false, + "types": "./index.d.ts", + "type": "module", + "main": "index.cjs", + "module": "index.js", + "exports": { + ".": { + "browser": "./index.browser.js", + "require": { + "types": "./index.d.cts", + "default": "./index.cjs" + }, + "import": { + "types": "./index.d.ts", + "default": "./index.js" + }, + "default": "./index.js" + }, + "./package.json": "./package.json", + "./async/package.json": "./async/package.json", + "./async": { + "browser": "./async/index.browser.js", + "require": { + "types": "./index.d.cts", + "default": "./async/index.cjs" + }, + "import": { + "types": "./index.d.ts", + "default": "./async/index.js" + }, + "default": "./async/index.js" + }, + "./non-secure/package.json": "./non-secure/package.json", + "./non-secure": { + "require": { + "types": "./index.d.cts", + "default": "./non-secure/index.cjs" + }, + "import": { + "types": "./index.d.ts", + "default": "./non-secure/index.js" + }, + "default": "./non-secure/index.js" + }, + "./url-alphabet/package.json": "./url-alphabet/package.json", + "./url-alphabet": { + "require": { + "types": "./index.d.cts", + "default": "./url-alphabet/index.cjs" + }, + "import": { + "types": "./index.d.ts", + "default": "./url-alphabet/index.js" + }, + "default": "./url-alphabet/index.js" + } + } +} \ No newline at end of file diff --git a/node_modules/nanoid/url-alphabet/index.cjs b/node_modules/nanoid/url-alphabet/index.cjs new file mode 100644 index 0000000..757b709 --- /dev/null +++ b/node_modules/nanoid/url-alphabet/index.cjs @@ -0,0 +1,3 @@ +let urlAlphabet = + 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' +module.exports = { urlAlphabet } diff --git a/node_modules/nanoid/url-alphabet/index.js b/node_modules/nanoid/url-alphabet/index.js new file mode 100644 index 0000000..c2782e5 --- /dev/null +++ b/node_modules/nanoid/url-alphabet/index.js @@ -0,0 +1,3 @@ +let urlAlphabet = + 'useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict' +export { urlAlphabet } diff --git a/node_modules/nanoid/url-alphabet/package.json b/node_modules/nanoid/url-alphabet/package.json new file mode 100644 index 0000000..9930d6a --- /dev/null +++ b/node_modules/nanoid/url-alphabet/package.json @@ -0,0 +1,6 @@ +{ + "type": "module", + "main": "index.cjs", + "module": "index.js", + "react-native": "index.js" +} \ No newline at end of file diff --git a/node_modules/picocolors/LICENSE b/node_modules/picocolors/LICENSE new file mode 100644 index 0000000..46c9b95 --- /dev/null +++ b/node_modules/picocolors/LICENSE @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) 2021-2024 Oleksii Raspopov, Kostiantyn Denysov, Anton Verinov + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/picocolors/README.md b/node_modules/picocolors/README.md new file mode 100644 index 0000000..54e3aa3 --- /dev/null +++ b/node_modules/picocolors/README.md @@ -0,0 +1,177 @@ +# picocolors + +The tiniest and the fastest library for terminal output formatting with ANSI colors. + +```javascript +import pc from "picocolors" + +console.log( + pc.green(`How are ${pc.italic(`you`)} doing?`) +) +``` + +- **No dependencies.** +- **14 times** smaller and **2 times** faster than chalk. +- Used by popular tools like PostCSS, SVGO, Stylelint, and Browserslist. +- Node.js v6+ & browsers support. Support for both CJS and ESM projects. +- TypeScript type declarations included. +- [`NO_COLOR`](https://no-color.org/) friendly. + +## Motivation + +With `picocolors` we are trying to draw attention to the `node_modules` size +problem and promote performance-first culture. + +## Prior Art + +Credits go to the following projects: + +- [Nanocolors](https://github.com/ai/nanocolors) by [@ai](https://github.com/ai) +- [Colorette](https://github.com/jorgebucaran/colorette) by [@jorgebucaran](https://github.com/jorgebucaran) +- [Kleur](https://github.com/lukeed/kleur) by [@lukeed](https://github.com/lukeed) +- [Colors.js](https://github.com/Marak/colors.js) by [@Marak](https://github.com/Marak) +- [Chalk](https://github.com/chalk/chalk) by [@sindresorhus](https://github.com/sindresorhus) + +## Benchmarks + +The space in node_modules including sub-dependencies: + +```diff +$ node ./benchmarks/size.js +Data from packagephobia.com + chalk 101 kB + cli-color 1249 kB + ansi-colors 25 kB + kleur 21 kB + colorette 17 kB + nanocolors 16 kB ++ picocolors 7 kB +``` + +Library loading time: + +```diff +$ node ./benchmarks/loading.js + chalk 6.167 ms + cli-color 31.431 ms + ansi-colors 1.585 ms + kleur 2.008 ms + kleur/colors 0.773 ms + colorette 2.476 ms + nanocolors 0.833 ms ++ picocolors 0.466 ms +``` + +Benchmark for simple use case: + +```diff +$ node ./benchmarks/simple.js + chalk 24,066,342 ops/sec + cli-color 938,700 ops/sec + ansi-colors 4,532,542 ops/sec + kleur 20,343,122 ops/sec + kleur/colors 35,415,770 ops/sec + colorette 34,244,834 ops/sec + nanocolors 33,443,265 ops/sec ++ picocolors 33,271,645 ops/sec +``` + +Benchmark for complex use cases: + +```diff +$ node ./benchmarks/complex.js + chalk 969,915 ops/sec + cli-color 131,639 ops/sec + ansi-colors 342,250 ops/sec + kleur 611,880 ops/sec + kleur/colors 1,129,526 ops/sec + colorette 1,747,277 ops/sec + nanocolors 1,251,312 ops/sec ++ picocolors 2,024,086 ops/sec +``` + +## Usage + +Picocolors provides an object which includes a variety of text coloring and formatting functions + +```javascript +import pc from "picocolors" +``` + +The object includes following coloring functions: `black`, `red`, `green`, `yellow`, `blue`, `magenta`, `cyan`, `white`, `gray`. + +```javascript +console.log(`I see a ${pc.red("red door")} and I want it painted ${pc.black("black")}`) +``` + +The object also includes following background color modifier functions: `bgBlack`, `bgRed`, `bgGreen`, `bgYellow`, `bgBlue`, `bgMagenta`, `bgCyan`, `bgWhite` and bright variants `bgBlackBright`, `bgRedBright`, `bgGreenBright`, `bgYellowBright`, `bgBlueBright`, `bgMagentaBright`, `bgCyanBright`, `bgWhiteBright`. + +```javascript +console.log( + pc.bgBlack( + pc.white(`Tom appeared on the sidewalk with a bucket of whitewash and a long-handled brush.`) + ) +) +``` + +Besides colors, the object includes following formatting functions: `dim`, `bold`, `hidden`, `italic`, `underline`, `strikethrough`, `reset`, `inverse` and bright variants `blackBright`, `redBright`, `greenBright`, `yellowBright`, `blueBright`, `magentaBright`, `cyanBright`, `whiteBright`. + +```javascript +for (let task of tasks) { + console.log(`${pc.bold(task.name)} ${pc.dim(task.durationMs + "ms")}`) +} +``` + +The library provides additional utilities to ensure the best results for the task: + +- `isColorSupported` — boolean, explicitly tells whether or not the colors or formatting appear on the screen + + ```javascript + import pc from "picocolors" + + if (pc.isColorSupported) { + console.log("Yay! This script can use colors and formatters") + } + ``` + +- `createColors(enabled)` — a function that returns a new API object with manually defined color support configuration + + ```javascript + import pc from "picocolors" + + let { red, bgWhite } = pc.createColors(options.enableColors) + ``` + +## Replacing `chalk` + +1. Replace package name in import: + + ```diff + - import chalk from 'chalk' + + import pico from 'picocolors' + ``` + +2. Replace variable: + + ```diff + - chalk.red(text) + + pico.red(text) + ``` + +3. Replace chains to nested calls: + + ```diff + - chalk.red.bold(text) + + pico.red(pico.bold(text)) + ``` + +4. You can use [`colorize-template`](https://github.com/usmanyunusov/colorize-template) + to replace chalk’s tagged template literal. + + ```diff + + import { createColorize } from 'colorize-template' + + + let colorize = createColorize(pico) + - chalk.red.bold`full {yellow ${"text"}}` + + colorize`{red.bold full {yellow ${"text"}}}` + ``` diff --git a/node_modules/picocolors/package.json b/node_modules/picocolors/package.json new file mode 100644 index 0000000..fd781f4 --- /dev/null +++ b/node_modules/picocolors/package.json @@ -0,0 +1,49 @@ +{ + "name": "picocolors", + "version": "1.1.0", + "main": "./picocolors.js", + "types": "./picocolors.d.ts", + "browser": { + "./picocolors.js": "./picocolors.browser.js" + }, + "sideEffects": false, + "description": "The tiniest and the fastest library for terminal output formatting with ANSI colors", + "scripts": { + "test": "node tests/test.js" + }, + "files": [ + "picocolors.*", + "types.ts" + ], + "keywords": [ + "terminal", + "colors", + "formatting", + "cli", + "console" + ], + "author": "Alexey Raspopov", + "repository": "alexeyraspopov/picocolors", + "license": "ISC", + "devDependencies": { + "ansi-colors": "^4.1.1", + "benchmark": "^2.1.4", + "chalk": "^4.1.2", + "clean-publish": "^3.0.3", + "cli-color": "^2.0.0", + "colorette": "^2.0.12", + "kleur": "^4.1.4", + "nanocolors": "^0.2.12", + "prettier": "^2.4.1" + }, + "prettier": { + "printWidth": 100, + "useTabs": true, + "tabWidth": 2, + "semi": false, + "arrowParens": "avoid" + }, + "clean-publish": { + "cleanDocs": true + } +} diff --git a/node_modules/picocolors/picocolors.browser.js b/node_modules/picocolors/picocolors.browser.js new file mode 100644 index 0000000..9dcf637 --- /dev/null +++ b/node_modules/picocolors/picocolors.browser.js @@ -0,0 +1,4 @@ +var x=String; +var create=function() {return {isColorSupported:false,reset:x,bold:x,dim:x,italic:x,underline:x,inverse:x,hidden:x,strikethrough:x,black:x,red:x,green:x,yellow:x,blue:x,magenta:x,cyan:x,white:x,gray:x,bgBlack:x,bgRed:x,bgGreen:x,bgYellow:x,bgBlue:x,bgMagenta:x,bgCyan:x,bgWhite:x,blackBright:x,redBright:x,greenBright:x,yellowBright:x,blueBright:x,magentaBright:x,cyanBright:x,whiteBright:x,bgBlackBright:x,bgRedBright:x,bgGreenBright:x,bgYellowBright:x,bgBlueBright:x,bgMagentaBright:x,bgCyanBright:x,bgWhiteBright:x}}; +module.exports=create(); +module.exports.createColors = create; diff --git a/node_modules/picocolors/picocolors.d.ts b/node_modules/picocolors/picocolors.d.ts new file mode 100644 index 0000000..94e146a --- /dev/null +++ b/node_modules/picocolors/picocolors.d.ts @@ -0,0 +1,5 @@ +import { Colors } from "./types" + +declare const picocolors: Colors & { createColors: (enabled?: boolean) => Colors } + +export = picocolors diff --git a/node_modules/picocolors/picocolors.js b/node_modules/picocolors/picocolors.js new file mode 100644 index 0000000..f5ea2a1 --- /dev/null +++ b/node_modules/picocolors/picocolors.js @@ -0,0 +1,85 @@ +let argv = process.argv || [], + env = process.env +let isColorSupported = + !("NO_COLOR" in env || argv.includes("--no-color")) && + ("FORCE_COLOR" in env || + argv.includes("--color") || + process.platform === "win32" || + (require != null && require("tty").isatty(1) && env.TERM !== "dumb") || + "CI" in env) + +let formatter = + (open, close, replace = open) => + input => { + let string = "" + input + let index = string.indexOf(close, open.length) + return ~index + ? open + replaceClose(string, close, replace, index) + close + : open + string + close + } + +let replaceClose = (string, close, replace, index) => { + let result = "" + let cursor = 0 + do { + result += string.substring(cursor, index) + replace + cursor = index + close.length + index = string.indexOf(close, cursor) + } while (~index) + return result + string.substring(cursor) +} + +let createColors = (enabled = isColorSupported) => { + let init = enabled ? formatter : () => String + return { + isColorSupported: enabled, + reset: init("\x1b[0m", "\x1b[0m"), + bold: init("\x1b[1m", "\x1b[22m", "\x1b[22m\x1b[1m"), + dim: init("\x1b[2m", "\x1b[22m", "\x1b[22m\x1b[2m"), + italic: init("\x1b[3m", "\x1b[23m"), + underline: init("\x1b[4m", "\x1b[24m"), + inverse: init("\x1b[7m", "\x1b[27m"), + hidden: init("\x1b[8m", "\x1b[28m"), + strikethrough: init("\x1b[9m", "\x1b[29m"), + + black: init("\x1b[30m", "\x1b[39m"), + red: init("\x1b[31m", "\x1b[39m"), + green: init("\x1b[32m", "\x1b[39m"), + yellow: init("\x1b[33m", "\x1b[39m"), + blue: init("\x1b[34m", "\x1b[39m"), + magenta: init("\x1b[35m", "\x1b[39m"), + cyan: init("\x1b[36m", "\x1b[39m"), + white: init("\x1b[37m", "\x1b[39m"), + gray: init("\x1b[90m", "\x1b[39m"), + + bgBlack: init("\x1b[40m", "\x1b[49m"), + bgRed: init("\x1b[41m", "\x1b[49m"), + bgGreen: init("\x1b[42m", "\x1b[49m"), + bgYellow: init("\x1b[43m", "\x1b[49m"), + bgBlue: init("\x1b[44m", "\x1b[49m"), + bgMagenta: init("\x1b[45m", "\x1b[49m"), + bgCyan: init("\x1b[46m", "\x1b[49m"), + bgWhite: init("\x1b[47m", "\x1b[49m"), + + blackBright: init("\x1b[90m", "\x1b[39m"), + redBright: init("\x1b[91m", "\x1b[39m"), + greenBright: init("\x1b[92m", "\x1b[39m"), + yellowBright: init("\x1b[93m", "\x1b[39m"), + blueBright: init("\x1b[94m", "\x1b[39m"), + magentaBright: init("\x1b[95m", "\x1b[39m"), + cyanBright: init("\x1b[96m", "\x1b[39m"), + whiteBright: init("\x1b[97m", "\x1b[39m"), + + bgBlackBright: init("\x1b[100m","\x1b[49m"), + bgRedBright: init("\x1b[101m","\x1b[49m"), + bgGreenBright: init("\x1b[102m","\x1b[49m"), + bgYellowBright: init("\x1b[103m","\x1b[49m"), + bgBlueBright: init("\x1b[104m","\x1b[49m"), + bgMagentaBright: init("\x1b[105m","\x1b[49m"), + bgCyanBright: init("\x1b[106m","\x1b[49m"), + bgWhiteBright: init("\x1b[107m","\x1b[49m"), + } +} + +module.exports = createColors() +module.exports.createColors = createColors diff --git a/node_modules/picocolors/types.ts b/node_modules/picocolors/types.ts new file mode 100644 index 0000000..8046e27 --- /dev/null +++ b/node_modules/picocolors/types.ts @@ -0,0 +1,52 @@ +export type Formatter = (input: string | number | null | undefined) => string + +export interface Colors { + isColorSupported: boolean + + reset: Formatter + bold: Formatter + dim: Formatter + italic: Formatter + underline: Formatter + inverse: Formatter + hidden: Formatter + strikethrough: Formatter + + black: Formatter + red: Formatter + green: Formatter + yellow: Formatter + blue: Formatter + magenta: Formatter + cyan: Formatter + white: Formatter + gray: Formatter + + bgBlack: Formatter + bgRed: Formatter + bgGreen: Formatter + bgYellow: Formatter + bgBlue: Formatter + bgMagenta: Formatter + bgCyan: Formatter + bgWhite: Formatter + + blackBright: Formatter + redBright: Formatter + greenBright: Formatter + yellowBright: Formatter + blueBright: Formatter + magentaBright: Formatter + cyanBright: Formatter + whiteBright: Formatter + + bgBlackBright: Formatter + bgRedBright: Formatter + bgGreenBright: Formatter + bgYellowBright: Formatter + bgBlueBright: Formatter + bgMagentaBright: Formatter + bgCyanBright: Formatter + bgWhiteBright: Formatter + +} diff --git a/node_modules/postcss/LICENSE b/node_modules/postcss/LICENSE new file mode 100644 index 0000000..da057b4 --- /dev/null +++ b/node_modules/postcss/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright 2013 Andrey Sitnik + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/postcss/README.md b/node_modules/postcss/README.md new file mode 100644 index 0000000..939a802 --- /dev/null +++ b/node_modules/postcss/README.md @@ -0,0 +1,28 @@ +# PostCSS + +Philosopher’s stone, logo of PostCSS + +PostCSS is a tool for transforming styles with JS plugins. +These plugins can lint your CSS, support variables and mixins, +transpile future CSS syntax, inline images, and more. + +PostCSS is used by industry leaders including Wikipedia, Twitter, Alibaba, +and JetBrains. The [Autoprefixer] and [Stylelint] PostCSS plugins is one of the most popular CSS tools. + +--- + +  Made at Evil Martians, product consulting for developer tools. + +--- + +[Abstract Syntax Tree]: https://en.wikipedia.org/wiki/Abstract_syntax_tree +[Evil Martians]: https://evilmartians.com/?utm_source=postcss +[Autoprefixer]: https://github.com/postcss/autoprefixer +[Stylelint]: https://stylelint.io/ +[plugins]: https://github.com/postcss/postcss#plugins + + +## Docs +Read full docs **[here](https://postcss.org/)**. diff --git a/node_modules/postcss/lib/at-rule.d.ts b/node_modules/postcss/lib/at-rule.d.ts new file mode 100644 index 0000000..b2a0e0f --- /dev/null +++ b/node_modules/postcss/lib/at-rule.d.ts @@ -0,0 +1,140 @@ +import Container, { + ContainerProps, + ContainerWithChildren +} from './container.js' + +declare namespace AtRule { + export interface AtRuleRaws extends Record { + /** + * The space symbols after the last child of the node to the end of the node. + */ + after?: string + + /** + * The space between the at-rule name and its parameters. + */ + afterName?: string + + /** + * The space symbols before the node. It also stores `*` + * and `_` symbols before the declaration (IE hack). + */ + before?: string + + /** + * The symbols between the last parameter and `{` for rules. + */ + between?: string + + /** + * The rule’s selector with comments. + */ + params?: { + raw: string + value: string + } + + /** + * Contains `true` if the last child has an (optional) semicolon. + */ + semicolon?: boolean + } + + export interface AtRuleProps extends ContainerProps { + /** Name of the at-rule. */ + name: string + /** Parameters following the name of the at-rule. */ + params?: number | string + /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ + raws?: AtRuleRaws + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { AtRule_ as default } +} + +/** + * Represents an at-rule. + * + * ```js + * Once (root, { AtRule }) { + * let media = new AtRule({ name: 'media', params: 'print' }) + * media.append(…) + * root.append(media) + * } + * ``` + * + * If it’s followed in the CSS by a `{}` block, this node will have + * a nodes property representing its children. + * + * ```js + * const root = postcss.parse('@charset "UTF-8"; @media print {}') + * + * const charset = root.first + * charset.type //=> 'atrule' + * charset.nodes //=> undefined + * + * const media = root.last + * media.nodes //=> [] + * ``` + */ +declare class AtRule_ extends Container { + /** + * An array containing the layer’s children. + * + * ```js + * const root = postcss.parse('@layer example { a { color: black } }') + * const layer = root.first + * layer.nodes.length //=> 1 + * layer.nodes[0].selector //=> 'a' + * ``` + * + * Can be `undefinded` if the at-rule has no body. + * + * ```js + * const root = postcss.parse('@layer a, b, c;') + * const layer = root.first + * layer.nodes //=> undefined + * ``` + */ + nodes: Container['nodes'] + parent: ContainerWithChildren | undefined + + raws: AtRule.AtRuleRaws + type: 'atrule' + constructor(defaults?: AtRule.AtRuleProps) + assign(overrides: AtRule.AtRuleProps | object): this + + clone(overrides?: Partial): this + + cloneAfter(overrides?: Partial): this + + cloneBefore(overrides?: Partial): this + /** + * The at-rule’s name immediately follows the `@`. + * + * ```js + * const root = postcss.parse('@media print {}') + * const media = root.first + * media.name //=> 'media' + * ``` + */ + get name(): string + set name(value: string) + /** + * The at-rule’s parameters, the values that follow the at-rule’s name + * but precede any `{}` block. + * + * ```js + * const root = postcss.parse('@media print, screen {}') + * const media = root.first + * media.params //=> 'print, screen' + * ``` + */ + get params(): string + set params(value: string) +} + +declare class AtRule extends AtRule_ {} + +export = AtRule diff --git a/node_modules/postcss/lib/at-rule.js b/node_modules/postcss/lib/at-rule.js new file mode 100644 index 0000000..9486447 --- /dev/null +++ b/node_modules/postcss/lib/at-rule.js @@ -0,0 +1,25 @@ +'use strict' + +let Container = require('./container') + +class AtRule extends Container { + constructor(defaults) { + super(defaults) + this.type = 'atrule' + } + + append(...children) { + if (!this.proxyOf.nodes) this.nodes = [] + return super.append(...children) + } + + prepend(...children) { + if (!this.proxyOf.nodes) this.nodes = [] + return super.prepend(...children) + } +} + +module.exports = AtRule +AtRule.default = AtRule + +Container.registerAtRule(AtRule) diff --git a/node_modules/postcss/lib/comment.d.ts b/node_modules/postcss/lib/comment.d.ts new file mode 100644 index 0000000..2b1a156 --- /dev/null +++ b/node_modules/postcss/lib/comment.d.ts @@ -0,0 +1,68 @@ +import Container from './container.js' +import Node, { NodeProps } from './node.js' + +declare namespace Comment { + export interface CommentRaws extends Record { + /** + * The space symbols before the node. + */ + before?: string + + /** + * The space symbols between `/*` and the comment’s text. + */ + left?: string + + /** + * The space symbols between the comment’s text. + */ + right?: string + } + + export interface CommentProps extends NodeProps { + /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ + raws?: CommentRaws + /** Content of the comment. */ + text: string + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Comment_ as default } +} + +/** + * It represents a class that handles + * [CSS comments](https://developer.mozilla.org/en-US/docs/Web/CSS/Comments) + * + * ```js + * Once (root, { Comment }) { + * const note = new Comment({ text: 'Note: …' }) + * root.append(note) + * } + * ``` + * + * Remember that CSS comments inside selectors, at-rule parameters, + * or declaration values will be stored in the `raws` properties + * explained above. + */ +declare class Comment_ extends Node { + parent: Container | undefined + raws: Comment.CommentRaws + type: 'comment' + constructor(defaults?: Comment.CommentProps) + + assign(overrides: Comment.CommentProps | object): this + + clone(overrides?: Partial): this + cloneAfter(overrides?: Partial): this + cloneBefore(overrides?: Partial): this + /** + * The comment's text. + */ + get text(): string + set text(value: string) +} + +declare class Comment extends Comment_ {} + +export = Comment diff --git a/node_modules/postcss/lib/comment.js b/node_modules/postcss/lib/comment.js new file mode 100644 index 0000000..c566506 --- /dev/null +++ b/node_modules/postcss/lib/comment.js @@ -0,0 +1,13 @@ +'use strict' + +let Node = require('./node') + +class Comment extends Node { + constructor(defaults) { + super(defaults) + this.type = 'comment' + } +} + +module.exports = Comment +Comment.default = Comment diff --git a/node_modules/postcss/lib/container.d.ts b/node_modules/postcss/lib/container.d.ts new file mode 100644 index 0000000..692bd69 --- /dev/null +++ b/node_modules/postcss/lib/container.d.ts @@ -0,0 +1,483 @@ +import AtRule from './at-rule.js' +import Comment from './comment.js' +import Declaration from './declaration.js' +import Node, { ChildNode, ChildProps, NodeProps } from './node.js' +import Rule from './rule.js' + +declare namespace Container { + export class ContainerWithChildren< + Child extends Node = ChildNode + > extends Container_ { + nodes: Child[] + } + + export interface ValueOptions { + /** + * String that’s used to narrow down values and speed up the regexp search. + */ + fast?: string + + /** + * An array of property names. + */ + props?: readonly string[] + } + + export interface ContainerProps extends NodeProps { + nodes?: readonly (ChildProps | Node)[] + } + + /** + * All types that can be passed into container methods to create or add a new + * child node. + */ + export type NewChild = + | ChildProps + | Node + | readonly ChildProps[] + | readonly Node[] + | readonly string[] + | string + | undefined + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Container_ as default } +} + +/** + * The `Root`, `AtRule`, and `Rule` container nodes + * inherit some common methods to help work with their children. + * + * Note that all containers can store any content. If you write a rule inside + * a rule, PostCSS will parse it. + */ +declare abstract class Container_ extends Node { + /** + * An array containing the container’s children. + * + * ```js + * const root = postcss.parse('a { color: black }') + * root.nodes.length //=> 1 + * root.nodes[0].selector //=> 'a' + * root.nodes[0].nodes[0].prop //=> 'color' + * ``` + */ + nodes: Child[] | undefined + + /** + * An internal method that converts a {@link NewChild} into a list of actual + * child nodes that can then be added to this container. + * + * This ensures that the nodes' parent is set to this container, that they use + * the correct prototype chain, and that they're marked as dirty. + * + * @param mnodes The new node or nodes to add. + * @param sample A node from whose raws the new node's `before` raw should be + * taken. + * @param type This should be set to `'prepend'` if the new nodes will be + * inserted at the beginning of the container. + * @hidden + */ + protected normalize( + nodes: Container.NewChild, + sample: Node | undefined, + type?: 'prepend' | false + ): Child[] + + /** + * Inserts new nodes to the end of the container. + * + * ```js + * const decl1 = new Declaration({ prop: 'color', value: 'black' }) + * const decl2 = new Declaration({ prop: 'background-color', value: 'white' }) + * rule.append(decl1, decl2) + * + * root.append({ name: 'charset', params: '"UTF-8"' }) // at-rule + * root.append({ selector: 'a' }) // rule + * rule.append({ prop: 'color', value: 'black' }) // declaration + * rule.append({ text: 'Comment' }) // comment + * + * root.append('a {}') + * root.first.append('color: black; z-index: 1') + * ``` + * + * @param nodes New nodes. + * @return This node for methods chain. + */ + append(...nodes: Container.NewChild[]): this + assign(overrides: Container.ContainerProps | object): this + clone(overrides?: Partial): this + cloneAfter(overrides?: Partial): this + + cloneBefore(overrides?: Partial): this + + /** + * Iterates through the container’s immediate children, + * calling `callback` for each child. + * + * Returning `false` in the callback will break iteration. + * + * This method only iterates through the container’s immediate children. + * If you need to recursively iterate through all the container’s descendant + * nodes, use `Container#walk`. + * + * Unlike the for `{}`-cycle or `Array#forEach` this iterator is safe + * if you are mutating the array of child nodes during iteration. + * PostCSS will adjust the current index to match the mutations. + * + * ```js + * const root = postcss.parse('a { color: black; z-index: 1 }') + * const rule = root.first + * + * for (const decl of rule.nodes) { + * decl.cloneBefore({ prop: '-webkit-' + decl.prop }) + * // Cycle will be infinite, because cloneBefore moves the current node + * // to the next index + * } + * + * rule.each(decl => { + * decl.cloneBefore({ prop: '-webkit-' + decl.prop }) + * // Will be executed only for color and z-index + * }) + * ``` + * + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + each( + callback: (node: Child, index: number) => false | void + ): false | undefined + /** + * Returns `true` if callback returns `true` + * for all of the container’s children. + * + * ```js + * const noPrefixes = rule.every(i => i.prop[0] !== '-') + * ``` + * + * @param condition Iterator returns true or false. + * @return Is every child pass condition. + */ + every( + condition: (node: Child, index: number, nodes: Child[]) => boolean + ): boolean + + /** + * Returns a `child`’s index within the `Container#nodes` array. + * + * ```js + * rule.index( rule.nodes[2] ) //=> 2 + * ``` + * + * @param child Child of the current container. + * @return Child index. + */ + index(child: Child | number): number + /** + * Insert new node after old node within the container. + * + * @param oldNode Child or child’s index. + * @param newNode New node. + * @return This node for methods chain. + */ + insertAfter(oldNode: Child | number, newNode: Container.NewChild): this + + /** + * Insert new node before old node within the container. + * + * ```js + * rule.insertBefore(decl, decl.clone({ prop: '-webkit-' + decl.prop })) + * ``` + * + * @param oldNode Child or child’s index. + * @param newNode New node. + * @return This node for methods chain. + */ + insertBefore(oldNode: Child | number, newNode: Container.NewChild): this + + /** + * Traverses the container’s descendant nodes, calling callback + * for each comment node. + * + * Like `Container#each`, this method is safe + * to use if you are mutating arrays during iteration. + * + * ```js + * root.walkComments(comment => { + * comment.remove() + * }) + * ``` + * + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + + /** + * Inserts new nodes to the start of the container. + * + * ```js + * const decl1 = new Declaration({ prop: 'color', value: 'black' }) + * const decl2 = new Declaration({ prop: 'background-color', value: 'white' }) + * rule.prepend(decl1, decl2) + * + * root.append({ name: 'charset', params: '"UTF-8"' }) // at-rule + * root.append({ selector: 'a' }) // rule + * rule.append({ prop: 'color', value: 'black' }) // declaration + * rule.append({ text: 'Comment' }) // comment + * + * root.append('a {}') + * root.first.append('color: black; z-index: 1') + * ``` + * + * @param nodes New nodes. + * @return This node for methods chain. + */ + prepend(...nodes: Container.NewChild[]): this + /** + * Add child to the end of the node. + * + * ```js + * rule.push(new Declaration({ prop: 'color', value: 'black' })) + * ``` + * + * @param child New node. + * @return This node for methods chain. + */ + push(child: Child): this + + /** + * Removes all children from the container + * and cleans their parent properties. + * + * ```js + * rule.removeAll() + * rule.nodes.length //=> 0 + * ``` + * + * @return This node for methods chain. + */ + removeAll(): this + + /** + * Removes node from the container and cleans the parent properties + * from the node and its children. + * + * ```js + * rule.nodes.length //=> 5 + * rule.removeChild(decl) + * rule.nodes.length //=> 4 + * decl.parent //=> undefined + * ``` + * + * @param child Child or child’s index. + * @return This node for methods chain. + */ + removeChild(child: Child | number): this + + replaceValues( + pattern: RegExp | string, + replaced: { (substring: string, ...args: any[]): string } | string + ): this + + /** + * Passes all declaration values within the container that match pattern + * through callback, replacing those values with the returned result + * of callback. + * + * This method is useful if you are using a custom unit or function + * and need to iterate through all values. + * + * ```js + * root.replaceValues(/\d+rem/, { fast: 'rem' }, string => { + * return 15 * parseInt(string) + 'px' + * }) + * ``` + * + * @param pattern Replace pattern. + * @param {object} options Options to speed up the search. + * @param replaced String to replace pattern or callback + * that returns a new value. The callback + * will receive the same arguments + * as those passed to a function parameter + * of `String#replace`. + * @return This node for methods chain. + */ + replaceValues( + pattern: RegExp | string, + options: Container.ValueOptions, + replaced: { (substring: string, ...args: any[]): string } | string + ): this + + /** + * Returns `true` if callback returns `true` for (at least) one + * of the container’s children. + * + * ```js + * const hasPrefix = rule.some(i => i.prop[0] === '-') + * ``` + * + * @param condition Iterator returns true or false. + * @return Is some child pass condition. + */ + some( + condition: (node: Child, index: number, nodes: Child[]) => boolean + ): boolean + + /** + * Traverses the container’s descendant nodes, calling callback + * for each node. + * + * Like container.each(), this method is safe to use + * if you are mutating arrays during iteration. + * + * If you only need to iterate through the container’s immediate children, + * use `Container#each`. + * + * ```js + * root.walk(node => { + * // Traverses all descendant nodes. + * }) + * ``` + * + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + walk( + callback: (node: ChildNode, index: number) => false | void + ): false | undefined + + /** + * Traverses the container’s descendant nodes, calling callback + * for each at-rule node. + * + * If you pass a filter, iteration will only happen over at-rules + * that have matching names. + * + * Like `Container#each`, this method is safe + * to use if you are mutating arrays during iteration. + * + * ```js + * root.walkAtRules(rule => { + * if (isOld(rule.name)) rule.remove() + * }) + * + * let first = false + * root.walkAtRules('charset', rule => { + * if (!first) { + * first = true + * } else { + * rule.remove() + * } + * }) + * ``` + * + * @param name String or regular expression to filter at-rules by name. + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + walkAtRules( + nameFilter: RegExp | string, + callback: (atRule: AtRule, index: number) => false | void + ): false | undefined + + walkAtRules( + callback: (atRule: AtRule, index: number) => false | void + ): false | undefined + walkComments( + callback: (comment: Comment, indexed: number) => false | void + ): false | undefined + + walkComments( + callback: (comment: Comment, indexed: number) => false | void + ): false | undefined + + /** + * Traverses the container’s descendant nodes, calling callback + * for each declaration node. + * + * If you pass a filter, iteration will only happen over declarations + * with matching properties. + * + * ```js + * root.walkDecls(decl => { + * checkPropertySupport(decl.prop) + * }) + * + * root.walkDecls('border-radius', decl => { + * decl.remove() + * }) + * + * root.walkDecls(/^background/, decl => { + * decl.value = takeFirstColorFromGradient(decl.value) + * }) + * ``` + * + * Like `Container#each`, this method is safe + * to use if you are mutating arrays during iteration. + * + * @param prop String or regular expression to filter declarations + * by property name. + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + walkDecls( + propFilter: RegExp | string, + callback: (decl: Declaration, index: number) => false | void + ): false | undefined + + walkDecls( + callback: (decl: Declaration, index: number) => false | void + ): false | undefined + + /** + * Traverses the container’s descendant nodes, calling callback + * for each rule node. + * + * If you pass a filter, iteration will only happen over rules + * with matching selectors. + * + * Like `Container#each`, this method is safe + * to use if you are mutating arrays during iteration. + * + * ```js + * const selectors = [] + * root.walkRules(rule => { + * selectors.push(rule.selector) + * }) + * console.log(`Your CSS uses ${ selectors.length } selectors`) + * ``` + * + * @param selector String or regular expression to filter rules by selector. + * @param callback Iterator receives each node and index. + * @return Returns `false` if iteration was broke. + */ + walkRules( + selectorFilter: RegExp | string, + callback: (rule: Rule, index: number) => false | void + ): false | undefined + walkRules( + callback: (rule: Rule, index: number) => false | void + ): false | undefined + /** + * The container’s first child. + * + * ```js + * rule.first === rules.nodes[0] + * ``` + */ + get first(): Child | undefined + /** + * The container’s last child. + * + * ```js + * rule.last === rule.nodes[rule.nodes.length - 1] + * ``` + */ + get last(): Child | undefined +} + +declare class Container< + Child extends Node = ChildNode +> extends Container_ {} + +export = Container diff --git a/node_modules/postcss/lib/container.js b/node_modules/postcss/lib/container.js new file mode 100644 index 0000000..8b9e1ef --- /dev/null +++ b/node_modules/postcss/lib/container.js @@ -0,0 +1,447 @@ +'use strict' + +let Comment = require('./comment') +let Declaration = require('./declaration') +let Node = require('./node') +let { isClean, my } = require('./symbols') + +let AtRule, parse, Root, Rule + +function cleanSource(nodes) { + return nodes.map(i => { + if (i.nodes) i.nodes = cleanSource(i.nodes) + delete i.source + return i + }) +} + +function markTreeDirty(node) { + node[isClean] = false + if (node.proxyOf.nodes) { + for (let i of node.proxyOf.nodes) { + markTreeDirty(i) + } + } +} + +class Container extends Node { + append(...children) { + for (let child of children) { + let nodes = this.normalize(child, this.last) + for (let node of nodes) this.proxyOf.nodes.push(node) + } + + this.markDirty() + + return this + } + + cleanRaws(keepBetween) { + super.cleanRaws(keepBetween) + if (this.nodes) { + for (let node of this.nodes) node.cleanRaws(keepBetween) + } + } + + each(callback) { + if (!this.proxyOf.nodes) return undefined + let iterator = this.getIterator() + + let index, result + while (this.indexes[iterator] < this.proxyOf.nodes.length) { + index = this.indexes[iterator] + result = callback(this.proxyOf.nodes[index], index) + if (result === false) break + + this.indexes[iterator] += 1 + } + + delete this.indexes[iterator] + return result + } + + every(condition) { + return this.nodes.every(condition) + } + + getIterator() { + if (!this.lastEach) this.lastEach = 0 + if (!this.indexes) this.indexes = {} + + this.lastEach += 1 + let iterator = this.lastEach + this.indexes[iterator] = 0 + + return iterator + } + + getProxyProcessor() { + return { + get(node, prop) { + if (prop === 'proxyOf') { + return node + } else if (!node[prop]) { + return node[prop] + } else if ( + prop === 'each' || + (typeof prop === 'string' && prop.startsWith('walk')) + ) { + return (...args) => { + return node[prop]( + ...args.map(i => { + if (typeof i === 'function') { + return (child, index) => i(child.toProxy(), index) + } else { + return i + } + }) + ) + } + } else if (prop === 'every' || prop === 'some') { + return cb => { + return node[prop]((child, ...other) => + cb(child.toProxy(), ...other) + ) + } + } else if (prop === 'root') { + return () => node.root().toProxy() + } else if (prop === 'nodes') { + return node.nodes.map(i => i.toProxy()) + } else if (prop === 'first' || prop === 'last') { + return node[prop].toProxy() + } else { + return node[prop] + } + }, + + set(node, prop, value) { + if (node[prop] === value) return true + node[prop] = value + if (prop === 'name' || prop === 'params' || prop === 'selector') { + node.markDirty() + } + return true + } + } + } + + index(child) { + if (typeof child === 'number') return child + if (child.proxyOf) child = child.proxyOf + return this.proxyOf.nodes.indexOf(child) + } + + insertAfter(exist, add) { + let existIndex = this.index(exist) + let nodes = this.normalize(add, this.proxyOf.nodes[existIndex]).reverse() + existIndex = this.index(exist) + for (let node of nodes) this.proxyOf.nodes.splice(existIndex + 1, 0, node) + + let index + for (let id in this.indexes) { + index = this.indexes[id] + if (existIndex < index) { + this.indexes[id] = index + nodes.length + } + } + + this.markDirty() + + return this + } + + insertBefore(exist, add) { + let existIndex = this.index(exist) + let type = existIndex === 0 ? 'prepend' : false + let nodes = this.normalize( + add, + this.proxyOf.nodes[existIndex], + type + ).reverse() + existIndex = this.index(exist) + for (let node of nodes) this.proxyOf.nodes.splice(existIndex, 0, node) + + let index + for (let id in this.indexes) { + index = this.indexes[id] + if (existIndex <= index) { + this.indexes[id] = index + nodes.length + } + } + + this.markDirty() + + return this + } + + normalize(nodes, sample) { + if (typeof nodes === 'string') { + nodes = cleanSource(parse(nodes).nodes) + } else if (typeof nodes === 'undefined') { + nodes = [] + } else if (Array.isArray(nodes)) { + nodes = nodes.slice(0) + for (let i of nodes) { + if (i.parent) i.parent.removeChild(i, 'ignore') + } + } else if (nodes.type === 'root' && this.type !== 'document') { + nodes = nodes.nodes.slice(0) + for (let i of nodes) { + if (i.parent) i.parent.removeChild(i, 'ignore') + } + } else if (nodes.type) { + nodes = [nodes] + } else if (nodes.prop) { + if (typeof nodes.value === 'undefined') { + throw new Error('Value field is missed in node creation') + } else if (typeof nodes.value !== 'string') { + nodes.value = String(nodes.value) + } + nodes = [new Declaration(nodes)] + } else if (nodes.selector || nodes.selectors) { + nodes = [new Rule(nodes)] + } else if (nodes.name) { + nodes = [new AtRule(nodes)] + } else if (nodes.text) { + nodes = [new Comment(nodes)] + } else { + throw new Error('Unknown node type in node creation') + } + + let processed = nodes.map(i => { + /* c8 ignore next */ + if (!i[my]) Container.rebuild(i) + i = i.proxyOf + if (i.parent) i.parent.removeChild(i) + if (i[isClean]) markTreeDirty(i) + + if (!i.raws) i.raws = {} + if (typeof i.raws.before === 'undefined') { + if (sample && typeof sample.raws.before !== 'undefined') { + i.raws.before = sample.raws.before.replace(/\S/g, '') + } + } + i.parent = this.proxyOf + return i + }) + + return processed + } + + prepend(...children) { + children = children.reverse() + for (let child of children) { + let nodes = this.normalize(child, this.first, 'prepend').reverse() + for (let node of nodes) this.proxyOf.nodes.unshift(node) + for (let id in this.indexes) { + this.indexes[id] = this.indexes[id] + nodes.length + } + } + + this.markDirty() + + return this + } + + push(child) { + child.parent = this + this.proxyOf.nodes.push(child) + return this + } + + removeAll() { + for (let node of this.proxyOf.nodes) node.parent = undefined + this.proxyOf.nodes = [] + + this.markDirty() + + return this + } + + removeChild(child) { + child = this.index(child) + this.proxyOf.nodes[child].parent = undefined + this.proxyOf.nodes.splice(child, 1) + + let index + for (let id in this.indexes) { + index = this.indexes[id] + if (index >= child) { + this.indexes[id] = index - 1 + } + } + + this.markDirty() + + return this + } + + replaceValues(pattern, opts, callback) { + if (!callback) { + callback = opts + opts = {} + } + + this.walkDecls(decl => { + if (opts.props && !opts.props.includes(decl.prop)) return + if (opts.fast && !decl.value.includes(opts.fast)) return + + decl.value = decl.value.replace(pattern, callback) + }) + + this.markDirty() + + return this + } + + some(condition) { + return this.nodes.some(condition) + } + + walk(callback) { + return this.each((child, i) => { + let result + try { + result = callback(child, i) + } catch (e) { + throw child.addToError(e) + } + if (result !== false && child.walk) { + result = child.walk(callback) + } + + return result + }) + } + + walkAtRules(name, callback) { + if (!callback) { + callback = name + return this.walk((child, i) => { + if (child.type === 'atrule') { + return callback(child, i) + } + }) + } + if (name instanceof RegExp) { + return this.walk((child, i) => { + if (child.type === 'atrule' && name.test(child.name)) { + return callback(child, i) + } + }) + } + return this.walk((child, i) => { + if (child.type === 'atrule' && child.name === name) { + return callback(child, i) + } + }) + } + + walkComments(callback) { + return this.walk((child, i) => { + if (child.type === 'comment') { + return callback(child, i) + } + }) + } + + walkDecls(prop, callback) { + if (!callback) { + callback = prop + return this.walk((child, i) => { + if (child.type === 'decl') { + return callback(child, i) + } + }) + } + if (prop instanceof RegExp) { + return this.walk((child, i) => { + if (child.type === 'decl' && prop.test(child.prop)) { + return callback(child, i) + } + }) + } + return this.walk((child, i) => { + if (child.type === 'decl' && child.prop === prop) { + return callback(child, i) + } + }) + } + + walkRules(selector, callback) { + if (!callback) { + callback = selector + + return this.walk((child, i) => { + if (child.type === 'rule') { + return callback(child, i) + } + }) + } + if (selector instanceof RegExp) { + return this.walk((child, i) => { + if (child.type === 'rule' && selector.test(child.selector)) { + return callback(child, i) + } + }) + } + return this.walk((child, i) => { + if (child.type === 'rule' && child.selector === selector) { + return callback(child, i) + } + }) + } + + get first() { + if (!this.proxyOf.nodes) return undefined + return this.proxyOf.nodes[0] + } + + get last() { + if (!this.proxyOf.nodes) return undefined + return this.proxyOf.nodes[this.proxyOf.nodes.length - 1] + } +} + +Container.registerParse = dependant => { + parse = dependant +} + +Container.registerRule = dependant => { + Rule = dependant +} + +Container.registerAtRule = dependant => { + AtRule = dependant +} + +Container.registerRoot = dependant => { + Root = dependant +} + +module.exports = Container +Container.default = Container + +/* c8 ignore start */ +Container.rebuild = node => { + if (node.type === 'atrule') { + Object.setPrototypeOf(node, AtRule.prototype) + } else if (node.type === 'rule') { + Object.setPrototypeOf(node, Rule.prototype) + } else if (node.type === 'decl') { + Object.setPrototypeOf(node, Declaration.prototype) + } else if (node.type === 'comment') { + Object.setPrototypeOf(node, Comment.prototype) + } else if (node.type === 'root') { + Object.setPrototypeOf(node, Root.prototype) + } + + node[my] = true + + if (node.nodes) { + node.nodes.forEach(child => { + Container.rebuild(child) + }) + } +} +/* c8 ignore stop */ diff --git a/node_modules/postcss/lib/css-syntax-error.d.ts b/node_modules/postcss/lib/css-syntax-error.d.ts new file mode 100644 index 0000000..e540d84 --- /dev/null +++ b/node_modules/postcss/lib/css-syntax-error.d.ts @@ -0,0 +1,248 @@ +import { FilePosition } from './input.js' + +declare namespace CssSyntaxError { + /** + * A position that is part of a range. + */ + export interface RangePosition { + /** + * The column number in the input. + */ + column: number + + /** + * The line number in the input. + */ + line: number + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { CssSyntaxError_ as default } +} + +/** + * The CSS parser throws this error for broken CSS. + * + * Custom parsers can throw this error for broken custom syntax using + * the `Node#error` method. + * + * PostCSS will use the input source map to detect the original error location. + * If you wrote a Sass file, compiled it to CSS and then parsed it with PostCSS, + * PostCSS will show the original position in the Sass file. + * + * If you need the position in the PostCSS input + * (e.g., to debug the previous compiler), use `error.input.file`. + * + * ```js + * // Raising error from plugin + * throw node.error('Unknown variable', { plugin: 'postcss-vars' }) + * ``` + * + * ```js + * // Catching and checking syntax error + * try { + * postcss.parse('a{') + * } catch (error) { + * if (error.name === 'CssSyntaxError') { + * error //=> CssSyntaxError + * } + * } + * ``` + */ +declare class CssSyntaxError_ extends Error { + /** + * Source column of the error. + * + * ```js + * error.column //=> 1 + * error.input.column //=> 4 + * ``` + * + * PostCSS will use the input source map to detect the original location. + * If you need the position in the PostCSS input, use `error.input.column`. + */ + column?: number + + /** + * Source column of the error's end, exclusive. Provided if the error pertains + * to a range. + * + * ```js + * error.endColumn //=> 1 + * error.input.endColumn //=> 4 + * ``` + * + * PostCSS will use the input source map to detect the original location. + * If you need the position in the PostCSS input, use `error.input.endColumn`. + */ + endColumn?: number + + /** + * Source line of the error's end, exclusive. Provided if the error pertains + * to a range. + * + * ```js + * error.endLine //=> 3 + * error.input.endLine //=> 4 + * ``` + * + * PostCSS will use the input source map to detect the original location. + * If you need the position in the PostCSS input, use `error.input.endLine`. + */ + endLine?: number + + /** + * Absolute path to the broken file. + * + * ```js + * error.file //=> 'a.sass' + * error.input.file //=> 'a.css' + * ``` + * + * PostCSS will use the input source map to detect the original location. + * If you need the position in the PostCSS input, use `error.input.file`. + */ + file?: string + + /** + * Input object with PostCSS internal information + * about input file. If input has source map + * from previous tool, PostCSS will use origin + * (for example, Sass) source. You can use this + * object to get PostCSS input source. + * + * ```js + * error.input.file //=> 'a.css' + * error.file //=> 'a.sass' + * ``` + */ + input?: FilePosition + + /** + * Source line of the error. + * + * ```js + * error.line //=> 2 + * error.input.line //=> 4 + * ``` + * + * PostCSS will use the input source map to detect the original location. + * If you need the position in the PostCSS input, use `error.input.line`. + */ + line?: number + + /** + * Full error text in the GNU error format + * with plugin, file, line and column. + * + * ```js + * error.message //=> 'a.css:1:1: Unclosed block' + * ``` + */ + message: string + + /** + * Always equal to `'CssSyntaxError'`. You should always check error type + * by `error.name === 'CssSyntaxError'` + * instead of `error instanceof CssSyntaxError`, + * because npm could have several PostCSS versions. + * + * ```js + * if (error.name === 'CssSyntaxError') { + * error //=> CssSyntaxError + * } + * ``` + */ + name: 'CssSyntaxError' + + /** + * Plugin name, if error came from plugin. + * + * ```js + * error.plugin //=> 'postcss-vars' + * ``` + */ + plugin?: string + + /** + * Error message. + * + * ```js + * error.message //=> 'Unclosed block' + * ``` + */ + reason: string + + /** + * Source code of the broken file. + * + * ```js + * error.source //=> 'a { b {} }' + * error.input.source //=> 'a b { }' + * ``` + */ + source?: string + + stack: string + + /** + * Instantiates a CSS syntax error. Can be instantiated for a single position + * or for a range. + * @param message Error message. + * @param lineOrStartPos If for a single position, the line number, or if for + * a range, the inclusive start position of the error. + * @param columnOrEndPos If for a single position, the column number, or if for + * a range, the exclusive end position of the error. + * @param source Source code of the broken file. + * @param file Absolute path to the broken file. + * @param plugin PostCSS plugin name, if error came from plugin. + */ + constructor( + message: string, + lineOrStartPos?: CssSyntaxError.RangePosition | number, + columnOrEndPos?: CssSyntaxError.RangePosition | number, + source?: string, + file?: string, + plugin?: string + ) + + /** + * Returns a few lines of CSS source that caused the error. + * + * If the CSS has an input source map without `sourceContent`, + * this method will return an empty string. + * + * ```js + * error.showSourceCode() //=> " 4 | } + * // 5 | a { + * // > 6 | bad + * // | ^ + * // 7 | } + * // 8 | b {" + * ``` + * + * @param color Whether arrow will be colored red by terminal + * color codes. By default, PostCSS will detect + * color support by `process.stdout.isTTY` + * and `process.env.NODE_DISABLE_COLORS`. + * @return Few lines of CSS source that caused the error. + */ + showSourceCode(color?: boolean): string + + /** + * Returns error position, message and source code of the broken part. + * + * ```js + * error.toString() //=> "CssSyntaxError: app.css:1:1: Unclosed block + * // > 1 | a { + * // | ^" + * ``` + * + * @return Error position, message and source code. + */ + toString(): string +} + +declare class CssSyntaxError extends CssSyntaxError_ {} + +export = CssSyntaxError diff --git a/node_modules/postcss/lib/css-syntax-error.js b/node_modules/postcss/lib/css-syntax-error.js new file mode 100644 index 0000000..275a4f6 --- /dev/null +++ b/node_modules/postcss/lib/css-syntax-error.js @@ -0,0 +1,133 @@ +'use strict' + +let pico = require('picocolors') + +let terminalHighlight = require('./terminal-highlight') + +class CssSyntaxError extends Error { + constructor(message, line, column, source, file, plugin) { + super(message) + this.name = 'CssSyntaxError' + this.reason = message + + if (file) { + this.file = file + } + if (source) { + this.source = source + } + if (plugin) { + this.plugin = plugin + } + if (typeof line !== 'undefined' && typeof column !== 'undefined') { + if (typeof line === 'number') { + this.line = line + this.column = column + } else { + this.line = line.line + this.column = line.column + this.endLine = column.line + this.endColumn = column.column + } + } + + this.setMessage() + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, CssSyntaxError) + } + } + + setMessage() { + this.message = this.plugin ? this.plugin + ': ' : '' + this.message += this.file ? this.file : '' + if (typeof this.line !== 'undefined') { + this.message += ':' + this.line + ':' + this.column + } + this.message += ': ' + this.reason + } + + showSourceCode(color) { + if (!this.source) return '' + + let css = this.source + if (color == null) color = pico.isColorSupported + + let aside = text => text + let mark = text => text + let highlight = text => text + if (color) { + let { bold, gray, red } = pico.createColors(true) + mark = text => bold(red(text)) + aside = text => gray(text) + if (terminalHighlight) { + highlight = text => terminalHighlight(text) + } + } + + let lines = css.split(/\r?\n/) + let start = Math.max(this.line - 3, 0) + let end = Math.min(this.line + 2, lines.length) + let maxWidth = String(end).length + + return lines + .slice(start, end) + .map((line, index) => { + let number = start + 1 + index + let gutter = ' ' + (' ' + number).slice(-maxWidth) + ' | ' + if (number === this.line) { + if (line.length > 160) { + let padding = 20 + let subLineStart = Math.max(0, this.column - padding) + let subLineEnd = Math.max( + this.column + padding, + this.endColumn + padding + ) + let subLine = line.slice(subLineStart, subLineEnd) + + let spacing = + aside(gutter.replace(/\d/g, ' ')) + + line + .slice(0, Math.min(this.column - 1, padding - 1)) + .replace(/[^\t]/g, ' ') + + return ( + mark('>') + + aside(gutter) + + highlight(subLine) + + '\n ' + + spacing + + mark('^') + ) + } + + let spacing = + aside(gutter.replace(/\d/g, ' ')) + + line.slice(0, this.column - 1).replace(/[^\t]/g, ' ') + + return ( + mark('>') + + aside(gutter) + + highlight(line) + + '\n ' + + spacing + + mark('^') + ) + } + + return ' ' + aside(gutter) + highlight(line) + }) + .join('\n') + } + + toString() { + let code = this.showSourceCode() + if (code) { + code = '\n\n' + code + '\n' + } + return this.name + ': ' + this.message + code + } +} + +module.exports = CssSyntaxError +CssSyntaxError.default = CssSyntaxError diff --git a/node_modules/postcss/lib/declaration.d.ts b/node_modules/postcss/lib/declaration.d.ts new file mode 100644 index 0000000..e707ad6 --- /dev/null +++ b/node_modules/postcss/lib/declaration.d.ts @@ -0,0 +1,151 @@ +import { ContainerWithChildren } from './container.js' +import Node from './node.js' + +declare namespace Declaration { + export interface DeclarationRaws extends Record { + /** + * The space symbols before the node. It also stores `*` + * and `_` symbols before the declaration (IE hack). + */ + before?: string + + /** + * The symbols between the property and value for declarations. + */ + between?: string + + /** + * The content of the important statement, if it is not just `!important`. + */ + important?: string + + /** + * Declaration value with comments. + */ + value?: { + raw: string + value: string + } + } + + export interface DeclarationProps { + /** Whether the declaration has an `!important` annotation. */ + important?: boolean + /** Name of the declaration. */ + prop: string + /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ + raws?: DeclarationRaws + /** Value of the declaration. */ + value: string + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Declaration_ as default } +} + +/** + * It represents a class that handles + * [CSS declarations](https://developer.mozilla.org/en-US/docs/Web/CSS/Syntax#css_declarations) + * + * ```js + * Once (root, { Declaration }) { + * const color = new Declaration({ prop: 'color', value: 'black' }) + * root.append(color) + * } + * ``` + * + * ```js + * const root = postcss.parse('a { color: black }') + * const decl = root.first?.first + * + * decl.type //=> 'decl' + * decl.toString() //=> ' color: black' + * ``` + */ +declare class Declaration_ extends Node { + parent: ContainerWithChildren | undefined + raws: Declaration.DeclarationRaws + + type: 'decl' + + constructor(defaults?: Declaration.DeclarationProps) + assign(overrides: Declaration.DeclarationProps | object): this + + clone(overrides?: Partial): this + + cloneAfter(overrides?: Partial): this + + cloneBefore(overrides?: Partial): this + /** + * It represents a specificity of the declaration. + * + * If true, the CSS declaration will have an + * [important](https://developer.mozilla.org/en-US/docs/Web/CSS/important) + * specifier. + * + * ```js + * const root = postcss.parse('a { color: black !important; color: red }') + * + * root.first.first.important //=> true + * root.first.last.important //=> undefined + * ``` + */ + get important(): boolean + + set important(value: boolean) + /** + * The property name for a CSS declaration. + * + * ```js + * const root = postcss.parse('a { color: black }') + * const decl = root.first.first + * + * decl.prop //=> 'color' + * ``` + */ + get prop(): string + + set prop(value: string) + /** + * The property value for a CSS declaration. + * + * Any CSS comments inside the value string will be filtered out. + * CSS comments present in the source value will be available in + * the `raws` property. + * + * Assigning new `value` would ignore the comments in `raws` + * property while compiling node to string. + * + * ```js + * const root = postcss.parse('a { color: black }') + * const decl = root.first.first + * + * decl.value //=> 'black' + * ``` + */ + get value(): string + set value(value: string) + /** + * It represents a getter that returns `true` if a declaration starts with + * `--` or `$`, which are used to declare variables in CSS and SASS/SCSS. + * + * ```js + * const root = postcss.parse(':root { --one: 1 }') + * const one = root.first.first + * + * one.variable //=> true + * ``` + * + * ```js + * const root = postcss.parse('$one: 1') + * const one = root.first + * + * one.variable //=> true + * ``` + */ + get variable(): boolean +} + +declare class Declaration extends Declaration_ {} + +export = Declaration diff --git a/node_modules/postcss/lib/declaration.js b/node_modules/postcss/lib/declaration.js new file mode 100644 index 0000000..a04bdec --- /dev/null +++ b/node_modules/postcss/lib/declaration.js @@ -0,0 +1,24 @@ +'use strict' + +let Node = require('./node') + +class Declaration extends Node { + constructor(defaults) { + if ( + defaults && + typeof defaults.value !== 'undefined' && + typeof defaults.value !== 'string' + ) { + defaults = { ...defaults, value: String(defaults.value) } + } + super(defaults) + this.type = 'decl' + } + + get variable() { + return this.prop.startsWith('--') || this.prop[0] === '$' + } +} + +module.exports = Declaration +Declaration.default = Declaration diff --git a/node_modules/postcss/lib/document.d.ts b/node_modules/postcss/lib/document.d.ts new file mode 100644 index 0000000..f9e8063 --- /dev/null +++ b/node_modules/postcss/lib/document.d.ts @@ -0,0 +1,69 @@ +import Container, { ContainerProps } from './container.js' +import { ProcessOptions } from './postcss.js' +import Result from './result.js' +import Root from './root.js' + +declare namespace Document { + export interface DocumentProps extends ContainerProps { + nodes?: readonly Root[] + + /** + * Information to generate byte-to-byte equal node string as it was + * in the origin input. + * + * Every parser saves its own properties. + */ + raws?: Record + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Document_ as default } +} + +/** + * Represents a file and contains all its parsed nodes. + * + * **Experimental:** some aspects of this node could change within minor + * or patch version releases. + * + * ```js + * const document = htmlParser( + * '' + * ) + * document.type //=> 'document' + * document.nodes.length //=> 2 + * ``` + */ +declare class Document_ extends Container { + nodes: Root[] + parent: undefined + type: 'document' + + constructor(defaults?: Document.DocumentProps) + + assign(overrides: Document.DocumentProps | object): this + clone(overrides?: Partial): this + cloneAfter(overrides?: Partial): this + cloneBefore(overrides?: Partial): this + + /** + * Returns a `Result` instance representing the document’s CSS roots. + * + * ```js + * const root1 = postcss.parse(css1, { from: 'a.css' }) + * const root2 = postcss.parse(css2, { from: 'b.css' }) + * const document = postcss.document() + * document.append(root1) + * document.append(root2) + * const result = document.toResult({ to: 'all.css', map: true }) + * ``` + * + * @param opts Options. + * @return Result with current document’s CSS. + */ + toResult(options?: ProcessOptions): Result +} + +declare class Document extends Document_ {} + +export = Document diff --git a/node_modules/postcss/lib/document.js b/node_modules/postcss/lib/document.js new file mode 100644 index 0000000..4468991 --- /dev/null +++ b/node_modules/postcss/lib/document.js @@ -0,0 +1,33 @@ +'use strict' + +let Container = require('./container') + +let LazyResult, Processor + +class Document extends Container { + constructor(defaults) { + // type needs to be passed to super, otherwise child roots won't be normalized correctly + super({ type: 'document', ...defaults }) + + if (!this.nodes) { + this.nodes = [] + } + } + + toResult(opts = {}) { + let lazy = new LazyResult(new Processor(), this, opts) + + return lazy.stringify() + } +} + +Document.registerLazyResult = dependant => { + LazyResult = dependant +} + +Document.registerProcessor = dependant => { + Processor = dependant +} + +module.exports = Document +Document.default = Document diff --git a/node_modules/postcss/lib/fromJSON.d.ts b/node_modules/postcss/lib/fromJSON.d.ts new file mode 100644 index 0000000..e1deedb --- /dev/null +++ b/node_modules/postcss/lib/fromJSON.d.ts @@ -0,0 +1,9 @@ +import { JSONHydrator } from './postcss.js' + +interface FromJSON extends JSONHydrator { + default: FromJSON +} + +declare const fromJSON: FromJSON + +export = fromJSON diff --git a/node_modules/postcss/lib/fromJSON.js b/node_modules/postcss/lib/fromJSON.js new file mode 100644 index 0000000..c9ac1a8 --- /dev/null +++ b/node_modules/postcss/lib/fromJSON.js @@ -0,0 +1,54 @@ +'use strict' + +let AtRule = require('./at-rule') +let Comment = require('./comment') +let Declaration = require('./declaration') +let Input = require('./input') +let PreviousMap = require('./previous-map') +let Root = require('./root') +let Rule = require('./rule') + +function fromJSON(json, inputs) { + if (Array.isArray(json)) return json.map(n => fromJSON(n)) + + let { inputs: ownInputs, ...defaults } = json + if (ownInputs) { + inputs = [] + for (let input of ownInputs) { + let inputHydrated = { ...input, __proto__: Input.prototype } + if (inputHydrated.map) { + inputHydrated.map = { + ...inputHydrated.map, + __proto__: PreviousMap.prototype + } + } + inputs.push(inputHydrated) + } + } + if (defaults.nodes) { + defaults.nodes = json.nodes.map(n => fromJSON(n, inputs)) + } + if (defaults.source) { + let { inputId, ...source } = defaults.source + defaults.source = source + if (inputId != null) { + defaults.source.input = inputs[inputId] + } + } + if (defaults.type === 'root') { + return new Root(defaults) + } else if (defaults.type === 'decl') { + return new Declaration(defaults) + } else if (defaults.type === 'rule') { + return new Rule(defaults) + } else if (defaults.type === 'comment') { + return new Comment(defaults) + } else if (defaults.type === 'atrule') { + return new AtRule(defaults) + } else { + throw new Error('Unknown node type: ' + json.type) + } +} + +module.exports = fromJSON +fromJSON.default = fromJSON diff --git a/node_modules/postcss/lib/input.d.ts b/node_modules/postcss/lib/input.d.ts new file mode 100644 index 0000000..46ded09 --- /dev/null +++ b/node_modules/postcss/lib/input.d.ts @@ -0,0 +1,197 @@ +import { CssSyntaxError, ProcessOptions } from './postcss.js' +import PreviousMap from './previous-map.js' + +declare namespace Input { + export interface FilePosition { + /** + * Column of inclusive start position in source file. + */ + column: number + + /** + * Column of exclusive end position in source file. + */ + endColumn?: number + + /** + * Line of exclusive end position in source file. + */ + endLine?: number + + /** + * Absolute path to the source file. + */ + file?: string + + /** + * Line of inclusive start position in source file. + */ + line: number + + /** + * Source code. + */ + source?: string + + /** + * URL for the source file. + */ + url: string + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Input_ as default } +} + +/** + * Represents the source CSS. + * + * ```js + * const root = postcss.parse(css, { from: file }) + * const input = root.source.input + * ``` + */ +declare class Input_ { + /** + * Input CSS source. + * + * ```js + * const input = postcss.parse('a{}', { from: file }).input + * input.css //=> "a{}" + * ``` + */ + css: string + + /** + * The absolute path to the CSS source file defined + * with the `from` option. + * + * ```js + * const root = postcss.parse(css, { from: 'a.css' }) + * root.source.input.file //=> '/home/ai/a.css' + * ``` + */ + file?: string + + /** + * The flag to indicate whether or not the source code has Unicode BOM. + */ + hasBOM: boolean + + /** + * The unique ID of the CSS source. It will be created if `from` option + * is not provided (because PostCSS does not know the file path). + * + * ```js + * const root = postcss.parse(css) + * root.source.input.file //=> undefined + * root.source.input.id //=> "" + * ``` + */ + id?: string + + /** + * The input source map passed from a compilation step before PostCSS + * (for example, from Sass compiler). + * + * ```js + * root.source.input.map.consumer().sources //=> ['a.sass'] + * ``` + */ + map: PreviousMap + + /** + * @param css Input CSS source. + * @param opts Process options. + */ + constructor(css: string, opts?: ProcessOptions) + + error( + message: string, + start: + | { + column: number + line: number + } + | { + offset: number + }, + end: + | { + column: number + line: number + } + | { + offset: number + }, + opts?: { plugin?: CssSyntaxError['plugin'] } + ): CssSyntaxError + + /** + * Returns `CssSyntaxError` with information about the error and its position. + */ + error( + message: string, + line: number, + column: number, + opts?: { plugin?: CssSyntaxError['plugin'] } + ): CssSyntaxError + + error( + message: string, + offset: number, + opts?: { plugin?: CssSyntaxError['plugin'] } + ): CssSyntaxError + + /** + * Converts source offset to line and column. + * + * @param offset Source offset. + */ + fromOffset(offset: number): { col: number; line: number } | null + /** + * Reads the input source map and returns a symbol position + * in the input source (e.g., in a Sass file that was compiled + * to CSS before being passed to PostCSS). Optionally takes an + * end position, exclusive. + * + * ```js + * root.source.input.origin(1, 1) //=> { file: 'a.css', line: 3, column: 1 } + * root.source.input.origin(1, 1, 1, 4) + * //=> { file: 'a.css', line: 3, column: 1, endLine: 3, endColumn: 4 } + * ``` + * + * @param line Line for inclusive start position in input CSS. + * @param column Column for inclusive start position in input CSS. + * @param endLine Line for exclusive end position in input CSS. + * @param endColumn Column for exclusive end position in input CSS. + * + * @return Position in input source. + */ + origin( + line: number, + column: number, + endLine?: number, + endColumn?: number + ): false | Input.FilePosition + /** Converts this to a JSON-friendly object representation. */ + toJSON(): object + + /** + * The CSS source identifier. Contains `Input#file` if the user + * set the `from` option, or `Input#id` if they did not. + * + * ```js + * const root = postcss.parse(css, { from: 'a.css' }) + * root.source.input.from //=> "/home/ai/a.css" + * + * const root = postcss.parse(css) + * root.source.input.from //=> "" + * ``` + */ + get from(): string +} + +declare class Input extends Input_ {} + +export = Input diff --git a/node_modules/postcss/lib/input.js b/node_modules/postcss/lib/input.js new file mode 100644 index 0000000..685bce7 --- /dev/null +++ b/node_modules/postcss/lib/input.js @@ -0,0 +1,248 @@ +'use strict' + +let { nanoid } = require('nanoid/non-secure') +let { isAbsolute, resolve } = require('path') +let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') +let { fileURLToPath, pathToFileURL } = require('url') + +let CssSyntaxError = require('./css-syntax-error') +let PreviousMap = require('./previous-map') +let terminalHighlight = require('./terminal-highlight') + +let fromOffsetCache = Symbol('fromOffsetCache') + +let sourceMapAvailable = Boolean(SourceMapConsumer && SourceMapGenerator) +let pathAvailable = Boolean(resolve && isAbsolute) + +class Input { + constructor(css, opts = {}) { + if ( + css === null || + typeof css === 'undefined' || + (typeof css === 'object' && !css.toString) + ) { + throw new Error(`PostCSS received ${css} instead of CSS string`) + } + + this.css = css.toString() + + if (this.css[0] === '\uFEFF' || this.css[0] === '\uFFFE') { + this.hasBOM = true + this.css = this.css.slice(1) + } else { + this.hasBOM = false + } + + if (opts.from) { + if ( + !pathAvailable || + /^\w+:\/\//.test(opts.from) || + isAbsolute(opts.from) + ) { + this.file = opts.from + } else { + this.file = resolve(opts.from) + } + } + + if (pathAvailable && sourceMapAvailable) { + let map = new PreviousMap(this.css, opts) + if (map.text) { + this.map = map + let file = map.consumer().file + if (!this.file && file) this.file = this.mapResolve(file) + } + } + + if (!this.file) { + this.id = '' + } + if (this.map) this.map.file = this.from + } + + error(message, line, column, opts = {}) { + let endColumn, endLine, result + + if (line && typeof line === 'object') { + let start = line + let end = column + if (typeof start.offset === 'number') { + let pos = this.fromOffset(start.offset) + line = pos.line + column = pos.col + } else { + line = start.line + column = start.column + } + if (typeof end.offset === 'number') { + let pos = this.fromOffset(end.offset) + endLine = pos.line + endColumn = pos.col + } else { + endLine = end.line + endColumn = end.column + } + } else if (!column) { + let pos = this.fromOffset(line) + line = pos.line + column = pos.col + } + + let origin = this.origin(line, column, endLine, endColumn) + if (origin) { + result = new CssSyntaxError( + message, + origin.endLine === undefined + ? origin.line + : { column: origin.column, line: origin.line }, + origin.endLine === undefined + ? origin.column + : { column: origin.endColumn, line: origin.endLine }, + origin.source, + origin.file, + opts.plugin + ) + } else { + result = new CssSyntaxError( + message, + endLine === undefined ? line : { column, line }, + endLine === undefined ? column : { column: endColumn, line: endLine }, + this.css, + this.file, + opts.plugin + ) + } + + result.input = { column, endColumn, endLine, line, source: this.css } + if (this.file) { + if (pathToFileURL) { + result.input.url = pathToFileURL(this.file).toString() + } + result.input.file = this.file + } + + return result + } + + fromOffset(offset) { + let lastLine, lineToIndex + if (!this[fromOffsetCache]) { + let lines = this.css.split('\n') + lineToIndex = new Array(lines.length) + let prevIndex = 0 + + for (let i = 0, l = lines.length; i < l; i++) { + lineToIndex[i] = prevIndex + prevIndex += lines[i].length + 1 + } + + this[fromOffsetCache] = lineToIndex + } else { + lineToIndex = this[fromOffsetCache] + } + lastLine = lineToIndex[lineToIndex.length - 1] + + let min = 0 + if (offset >= lastLine) { + min = lineToIndex.length - 1 + } else { + let max = lineToIndex.length - 2 + let mid + while (min < max) { + mid = min + ((max - min) >> 1) + if (offset < lineToIndex[mid]) { + max = mid - 1 + } else if (offset >= lineToIndex[mid + 1]) { + min = mid + 1 + } else { + min = mid + break + } + } + } + return { + col: offset - lineToIndex[min] + 1, + line: min + 1 + } + } + + mapResolve(file) { + if (/^\w+:\/\//.test(file)) { + return file + } + return resolve(this.map.consumer().sourceRoot || this.map.root || '.', file) + } + + origin(line, column, endLine, endColumn) { + if (!this.map) return false + let consumer = this.map.consumer() + + let from = consumer.originalPositionFor({ column, line }) + if (!from.source) return false + + let to + if (typeof endLine === 'number') { + to = consumer.originalPositionFor({ column: endColumn, line: endLine }) + } + + let fromUrl + + if (isAbsolute(from.source)) { + fromUrl = pathToFileURL(from.source) + } else { + fromUrl = new URL( + from.source, + this.map.consumer().sourceRoot || pathToFileURL(this.map.mapFile) + ) + } + + let result = { + column: from.column, + endColumn: to && to.column, + endLine: to && to.line, + line: from.line, + url: fromUrl.toString() + } + + if (fromUrl.protocol === 'file:') { + if (fileURLToPath) { + result.file = fileURLToPath(fromUrl) + } else { + /* c8 ignore next 2 */ + throw new Error(`file: protocol is not available in this PostCSS build`) + } + } + + let source = consumer.sourceContentFor(from.source) + if (source) result.source = source + + return result + } + + toJSON() { + let json = {} + for (let name of ['hasBOM', 'css', 'file', 'id']) { + if (this[name] != null) { + json[name] = this[name] + } + } + if (this.map) { + json.map = { ...this.map } + if (json.map.consumerCache) { + json.map.consumerCache = undefined + } + } + return json + } + + get from() { + return this.file || this.id + } +} + +module.exports = Input +Input.default = Input + +if (terminalHighlight && terminalHighlight.registerInput) { + terminalHighlight.registerInput(Input) +} diff --git a/node_modules/postcss/lib/lazy-result.d.ts b/node_modules/postcss/lib/lazy-result.d.ts new file mode 100644 index 0000000..dd291aa --- /dev/null +++ b/node_modules/postcss/lib/lazy-result.d.ts @@ -0,0 +1,190 @@ +import Document from './document.js' +import { SourceMap } from './postcss.js' +import Processor from './processor.js' +import Result, { Message, ResultOptions } from './result.js' +import Root from './root.js' +import Warning from './warning.js' + +declare namespace LazyResult { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { LazyResult_ as default } +} + +/** + * A Promise proxy for the result of PostCSS transformations. + * + * A `LazyResult` instance is returned by `Processor#process`. + * + * ```js + * const lazy = postcss([autoprefixer]).process(css) + * ``` + */ +declare class LazyResult_ + implements PromiseLike> +{ + /** + * Processes input CSS through synchronous and asynchronous plugins + * and calls onRejected for each error thrown in any plugin. + * + * It implements standard Promise API. + * + * ```js + * postcss([autoprefixer]).process(css).then(result => { + * console.log(result.css) + * }).catch(error => { + * console.error(error) + * }) + * ``` + */ + catch: Promise>['catch'] + + /** + * Processes input CSS through synchronous and asynchronous plugins + * and calls onFinally on any error or when all plugins will finish work. + * + * It implements standard Promise API. + * + * ```js + * postcss([autoprefixer]).process(css).finally(() => { + * console.log('processing ended') + * }) + * ``` + */ + finally: Promise>['finally'] + + /** + * Processes input CSS through synchronous and asynchronous plugins + * and calls `onFulfilled` with a Result instance. If a plugin throws + * an error, the `onRejected` callback will be executed. + * + * It implements standard Promise API. + * + * ```js + * postcss([autoprefixer]).process(css, { from: cssPath }).then(result => { + * console.log(result.css) + * }) + * ``` + */ + then: Promise>['then'] + + /** + * @param processor Processor used for this transformation. + * @param css CSS to parse and transform. + * @param opts Options from the `Processor#process` or `Root#toResult`. + */ + constructor(processor: Processor, css: string, opts: ResultOptions) + + /** + * Run plugin in async way and return `Result`. + * + * @return Result with output content. + */ + async(): Promise> + + /** + * Run plugin in sync way and return `Result`. + * + * @return Result with output content. + */ + sync(): Result + + /** + * Alias for the `LazyResult#css` property. + * + * ```js + * lazy + '' === lazy.css + * ``` + * + * @return Output CSS. + */ + toString(): string + + /** + * Processes input CSS through synchronous plugins + * and calls `Result#warnings`. + * + * @return Warnings from plugins. + */ + warnings(): Warning[] + + /** + * An alias for the `css` property. Use it with syntaxes + * that generate non-CSS output. + * + * This property will only work with synchronous plugins. + * If the processor contains any asynchronous plugins + * it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get content(): string + + /** + * Processes input CSS through synchronous plugins, converts `Root` + * to a CSS string and returns `Result#css`. + * + * This property will only work with synchronous plugins. + * If the processor contains any asynchronous plugins + * it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get css(): string + + /** + * Processes input CSS through synchronous plugins + * and returns `Result#map`. + * + * This property will only work with synchronous plugins. + * If the processor contains any asynchronous plugins + * it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get map(): SourceMap + + /** + * Processes input CSS through synchronous plugins + * and returns `Result#messages`. + * + * This property will only work with synchronous plugins. If the processor + * contains any asynchronous plugins it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get messages(): Message[] + + /** + * Options from the `Processor#process` call. + */ + get opts(): ResultOptions + + /** + * Returns a `Processor` instance, which will be used + * for CSS transformations. + */ + get processor(): Processor + + /** + * Processes input CSS through synchronous plugins + * and returns `Result#root`. + * + * This property will only work with synchronous plugins. If the processor + * contains any asynchronous plugins it will throw an error. + * + * PostCSS runners should always use `LazyResult#then`. + */ + get root(): RootNode + + /** + * Returns the default string description of an object. + * Required to implement the Promise interface. + */ + get [Symbol.toStringTag](): string +} + +declare class LazyResult< + RootNode = Document | Root +> extends LazyResult_ {} + +export = LazyResult diff --git a/node_modules/postcss/lib/lazy-result.js b/node_modules/postcss/lib/lazy-result.js new file mode 100644 index 0000000..e27701d --- /dev/null +++ b/node_modules/postcss/lib/lazy-result.js @@ -0,0 +1,550 @@ +'use strict' + +let Container = require('./container') +let Document = require('./document') +let MapGenerator = require('./map-generator') +let parse = require('./parse') +let Result = require('./result') +let Root = require('./root') +let stringify = require('./stringify') +let { isClean, my } = require('./symbols') +let warnOnce = require('./warn-once') + +const TYPE_TO_CLASS_NAME = { + atrule: 'AtRule', + comment: 'Comment', + decl: 'Declaration', + document: 'Document', + root: 'Root', + rule: 'Rule' +} + +const PLUGIN_PROPS = { + AtRule: true, + AtRuleExit: true, + Comment: true, + CommentExit: true, + Declaration: true, + DeclarationExit: true, + Document: true, + DocumentExit: true, + Once: true, + OnceExit: true, + postcssPlugin: true, + prepare: true, + Root: true, + RootExit: true, + Rule: true, + RuleExit: true +} + +const NOT_VISITORS = { + Once: true, + postcssPlugin: true, + prepare: true +} + +const CHILDREN = 0 + +function isPromise(obj) { + return typeof obj === 'object' && typeof obj.then === 'function' +} + +function getEvents(node) { + let key = false + let type = TYPE_TO_CLASS_NAME[node.type] + if (node.type === 'decl') { + key = node.prop.toLowerCase() + } else if (node.type === 'atrule') { + key = node.name.toLowerCase() + } + + if (key && node.append) { + return [ + type, + type + '-' + key, + CHILDREN, + type + 'Exit', + type + 'Exit-' + key + ] + } else if (key) { + return [type, type + '-' + key, type + 'Exit', type + 'Exit-' + key] + } else if (node.append) { + return [type, CHILDREN, type + 'Exit'] + } else { + return [type, type + 'Exit'] + } +} + +function toStack(node) { + let events + if (node.type === 'document') { + events = ['Document', CHILDREN, 'DocumentExit'] + } else if (node.type === 'root') { + events = ['Root', CHILDREN, 'RootExit'] + } else { + events = getEvents(node) + } + + return { + eventIndex: 0, + events, + iterator: 0, + node, + visitorIndex: 0, + visitors: [] + } +} + +function cleanMarks(node) { + node[isClean] = false + if (node.nodes) node.nodes.forEach(i => cleanMarks(i)) + return node +} + +let postcss = {} + +class LazyResult { + constructor(processor, css, opts) { + this.stringified = false + this.processed = false + + let root + if ( + typeof css === 'object' && + css !== null && + (css.type === 'root' || css.type === 'document') + ) { + root = cleanMarks(css) + } else if (css instanceof LazyResult || css instanceof Result) { + root = cleanMarks(css.root) + if (css.map) { + if (typeof opts.map === 'undefined') opts.map = {} + if (!opts.map.inline) opts.map.inline = false + opts.map.prev = css.map + } + } else { + let parser = parse + if (opts.syntax) parser = opts.syntax.parse + if (opts.parser) parser = opts.parser + if (parser.parse) parser = parser.parse + + try { + root = parser(css, opts) + } catch (error) { + this.processed = true + this.error = error + } + + if (root && !root[my]) { + /* c8 ignore next 2 */ + Container.rebuild(root) + } + } + + this.result = new Result(processor, root, opts) + this.helpers = { ...postcss, postcss, result: this.result } + this.plugins = this.processor.plugins.map(plugin => { + if (typeof plugin === 'object' && plugin.prepare) { + return { ...plugin, ...plugin.prepare(this.result) } + } else { + return plugin + } + }) + } + + async() { + if (this.error) return Promise.reject(this.error) + if (this.processed) return Promise.resolve(this.result) + if (!this.processing) { + this.processing = this.runAsync() + } + return this.processing + } + + catch(onRejected) { + return this.async().catch(onRejected) + } + + finally(onFinally) { + return this.async().then(onFinally, onFinally) + } + + getAsyncError() { + throw new Error('Use process(css).then(cb) to work with async plugins') + } + + handleError(error, node) { + let plugin = this.result.lastPlugin + try { + if (node) node.addToError(error) + this.error = error + if (error.name === 'CssSyntaxError' && !error.plugin) { + error.plugin = plugin.postcssPlugin + error.setMessage() + } else if (plugin.postcssVersion) { + if (process.env.NODE_ENV !== 'production') { + let pluginName = plugin.postcssPlugin + let pluginVer = plugin.postcssVersion + let runtimeVer = this.result.processor.version + let a = pluginVer.split('.') + let b = runtimeVer.split('.') + + if (a[0] !== b[0] || parseInt(a[1]) > parseInt(b[1])) { + // eslint-disable-next-line no-console + console.error( + 'Unknown error from PostCSS plugin. Your current PostCSS ' + + 'version is ' + + runtimeVer + + ', but ' + + pluginName + + ' uses ' + + pluginVer + + '. Perhaps this is the source of the error below.' + ) + } + } + } + } catch (err) { + /* c8 ignore next 3 */ + // eslint-disable-next-line no-console + if (console && console.error) console.error(err) + } + return error + } + + prepareVisitors() { + this.listeners = {} + let add = (plugin, type, cb) => { + if (!this.listeners[type]) this.listeners[type] = [] + this.listeners[type].push([plugin, cb]) + } + for (let plugin of this.plugins) { + if (typeof plugin === 'object') { + for (let event in plugin) { + if (!PLUGIN_PROPS[event] && /^[A-Z]/.test(event)) { + throw new Error( + `Unknown event ${event} in ${plugin.postcssPlugin}. ` + + `Try to update PostCSS (${this.processor.version} now).` + ) + } + if (!NOT_VISITORS[event]) { + if (typeof plugin[event] === 'object') { + for (let filter in plugin[event]) { + if (filter === '*') { + add(plugin, event, plugin[event][filter]) + } else { + add( + plugin, + event + '-' + filter.toLowerCase(), + plugin[event][filter] + ) + } + } + } else if (typeof plugin[event] === 'function') { + add(plugin, event, plugin[event]) + } + } + } + } + } + this.hasListener = Object.keys(this.listeners).length > 0 + } + + async runAsync() { + this.plugin = 0 + for (let i = 0; i < this.plugins.length; i++) { + let plugin = this.plugins[i] + let promise = this.runOnRoot(plugin) + if (isPromise(promise)) { + try { + await promise + } catch (error) { + throw this.handleError(error) + } + } + } + + this.prepareVisitors() + if (this.hasListener) { + let root = this.result.root + while (!root[isClean]) { + root[isClean] = true + let stack = [toStack(root)] + while (stack.length > 0) { + let promise = this.visitTick(stack) + if (isPromise(promise)) { + try { + await promise + } catch (e) { + let node = stack[stack.length - 1].node + throw this.handleError(e, node) + } + } + } + } + + if (this.listeners.OnceExit) { + for (let [plugin, visitor] of this.listeners.OnceExit) { + this.result.lastPlugin = plugin + try { + if (root.type === 'document') { + let roots = root.nodes.map(subRoot => + visitor(subRoot, this.helpers) + ) + + await Promise.all(roots) + } else { + await visitor(root, this.helpers) + } + } catch (e) { + throw this.handleError(e) + } + } + } + } + + this.processed = true + return this.stringify() + } + + runOnRoot(plugin) { + this.result.lastPlugin = plugin + try { + if (typeof plugin === 'object' && plugin.Once) { + if (this.result.root.type === 'document') { + let roots = this.result.root.nodes.map(root => + plugin.Once(root, this.helpers) + ) + + if (isPromise(roots[0])) { + return Promise.all(roots) + } + + return roots + } + + return plugin.Once(this.result.root, this.helpers) + } else if (typeof plugin === 'function') { + return plugin(this.result.root, this.result) + } + } catch (error) { + throw this.handleError(error) + } + } + + stringify() { + if (this.error) throw this.error + if (this.stringified) return this.result + this.stringified = true + + this.sync() + + let opts = this.result.opts + let str = stringify + if (opts.syntax) str = opts.syntax.stringify + if (opts.stringifier) str = opts.stringifier + if (str.stringify) str = str.stringify + + let map = new MapGenerator(str, this.result.root, this.result.opts) + let data = map.generate() + this.result.css = data[0] + this.result.map = data[1] + + return this.result + } + + sync() { + if (this.error) throw this.error + if (this.processed) return this.result + this.processed = true + + if (this.processing) { + throw this.getAsyncError() + } + + for (let plugin of this.plugins) { + let promise = this.runOnRoot(plugin) + if (isPromise(promise)) { + throw this.getAsyncError() + } + } + + this.prepareVisitors() + if (this.hasListener) { + let root = this.result.root + while (!root[isClean]) { + root[isClean] = true + this.walkSync(root) + } + if (this.listeners.OnceExit) { + if (root.type === 'document') { + for (let subRoot of root.nodes) { + this.visitSync(this.listeners.OnceExit, subRoot) + } + } else { + this.visitSync(this.listeners.OnceExit, root) + } + } + } + + return this.result + } + + then(onFulfilled, onRejected) { + if (process.env.NODE_ENV !== 'production') { + if (!('from' in this.opts)) { + warnOnce( + 'Without `from` option PostCSS could generate wrong source map ' + + 'and will not find Browserslist config. Set it to CSS file path ' + + 'or to `undefined` to prevent this warning.' + ) + } + } + return this.async().then(onFulfilled, onRejected) + } + + toString() { + return this.css + } + + visitSync(visitors, node) { + for (let [plugin, visitor] of visitors) { + this.result.lastPlugin = plugin + let promise + try { + promise = visitor(node, this.helpers) + } catch (e) { + throw this.handleError(e, node.proxyOf) + } + if (node.type !== 'root' && node.type !== 'document' && !node.parent) { + return true + } + if (isPromise(promise)) { + throw this.getAsyncError() + } + } + } + + visitTick(stack) { + let visit = stack[stack.length - 1] + let { node, visitors } = visit + + if (node.type !== 'root' && node.type !== 'document' && !node.parent) { + stack.pop() + return + } + + if (visitors.length > 0 && visit.visitorIndex < visitors.length) { + let [plugin, visitor] = visitors[visit.visitorIndex] + visit.visitorIndex += 1 + if (visit.visitorIndex === visitors.length) { + visit.visitors = [] + visit.visitorIndex = 0 + } + this.result.lastPlugin = plugin + try { + return visitor(node.toProxy(), this.helpers) + } catch (e) { + throw this.handleError(e, node) + } + } + + if (visit.iterator !== 0) { + let iterator = visit.iterator + let child + while ((child = node.nodes[node.indexes[iterator]])) { + node.indexes[iterator] += 1 + if (!child[isClean]) { + child[isClean] = true + stack.push(toStack(child)) + return + } + } + visit.iterator = 0 + delete node.indexes[iterator] + } + + let events = visit.events + while (visit.eventIndex < events.length) { + let event = events[visit.eventIndex] + visit.eventIndex += 1 + if (event === CHILDREN) { + if (node.nodes && node.nodes.length) { + node[isClean] = true + visit.iterator = node.getIterator() + } + return + } else if (this.listeners[event]) { + visit.visitors = this.listeners[event] + return + } + } + stack.pop() + } + + walkSync(node) { + node[isClean] = true + let events = getEvents(node) + for (let event of events) { + if (event === CHILDREN) { + if (node.nodes) { + node.each(child => { + if (!child[isClean]) this.walkSync(child) + }) + } + } else { + let visitors = this.listeners[event] + if (visitors) { + if (this.visitSync(visitors, node.toProxy())) return + } + } + } + } + + warnings() { + return this.sync().warnings() + } + + get content() { + return this.stringify().content + } + + get css() { + return this.stringify().css + } + + get map() { + return this.stringify().map + } + + get messages() { + return this.sync().messages + } + + get opts() { + return this.result.opts + } + + get processor() { + return this.result.processor + } + + get root() { + return this.sync().root + } + + get [Symbol.toStringTag]() { + return 'LazyResult' + } +} + +LazyResult.registerPostcss = dependant => { + postcss = dependant +} + +module.exports = LazyResult +LazyResult.default = LazyResult + +Root.registerLazyResult(LazyResult) +Document.registerLazyResult(LazyResult) diff --git a/node_modules/postcss/lib/list.d.ts b/node_modules/postcss/lib/list.d.ts new file mode 100644 index 0000000..e262ad3 --- /dev/null +++ b/node_modules/postcss/lib/list.d.ts @@ -0,0 +1,60 @@ +declare namespace list { + type List = { + /** + * Safely splits comma-separated values (such as those for `transition-*` + * and `background` properties). + * + * ```js + * Once (root, { list }) { + * list.comma('black, linear-gradient(white, black)') + * //=> ['black', 'linear-gradient(white, black)'] + * } + * ``` + * + * @param str Comma-separated values. + * @return Split values. + */ + comma(str: string): string[] + + default: List + + /** + * Safely splits space-separated values (such as those for `background`, + * `border-radius`, and other shorthand properties). + * + * ```js + * Once (root, { list }) { + * list.space('1px calc(10% + 1px)') //=> ['1px', 'calc(10% + 1px)'] + * } + * ``` + * + * @param str Space-separated values. + * @return Split values. + */ + space(str: string): string[] + + /** + * Safely splits values. + * + * ```js + * Once (root, { list }) { + * list.split('1px calc(10% + 1px)', [' ', '\n', '\t']) //=> ['1px', 'calc(10% + 1px)'] + * } + * ``` + * + * @param string separated values. + * @param separators array of separators. + * @param last boolean indicator. + * @return Split values. + */ + split( + string: string, + separators: readonly string[], + last: boolean + ): string[] + } +} + +declare const list: list.List + +export = list diff --git a/node_modules/postcss/lib/list.js b/node_modules/postcss/lib/list.js new file mode 100644 index 0000000..1b31f98 --- /dev/null +++ b/node_modules/postcss/lib/list.js @@ -0,0 +1,58 @@ +'use strict' + +let list = { + comma(string) { + return list.split(string, [','], true) + }, + + space(string) { + let spaces = [' ', '\n', '\t'] + return list.split(string, spaces) + }, + + split(string, separators, last) { + let array = [] + let current = '' + let split = false + + let func = 0 + let inQuote = false + let prevQuote = '' + let escape = false + + for (let letter of string) { + if (escape) { + escape = false + } else if (letter === '\\') { + escape = true + } else if (inQuote) { + if (letter === prevQuote) { + inQuote = false + } + } else if (letter === '"' || letter === "'") { + inQuote = true + prevQuote = letter + } else if (letter === '(') { + func += 1 + } else if (letter === ')') { + if (func > 0) func -= 1 + } else if (func === 0) { + if (separators.includes(letter)) split = true + } + + if (split) { + if (current !== '') array.push(current.trim()) + current = '' + split = false + } else { + current += letter + } + } + + if (last || current !== '') array.push(current.trim()) + return array + } +} + +module.exports = list +list.default = list diff --git a/node_modules/postcss/lib/map-generator.js b/node_modules/postcss/lib/map-generator.js new file mode 100644 index 0000000..89069d3 --- /dev/null +++ b/node_modules/postcss/lib/map-generator.js @@ -0,0 +1,368 @@ +'use strict' + +let { dirname, relative, resolve, sep } = require('path') +let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') +let { pathToFileURL } = require('url') + +let Input = require('./input') + +let sourceMapAvailable = Boolean(SourceMapConsumer && SourceMapGenerator) +let pathAvailable = Boolean(dirname && resolve && relative && sep) + +class MapGenerator { + constructor(stringify, root, opts, cssString) { + this.stringify = stringify + this.mapOpts = opts.map || {} + this.root = root + this.opts = opts + this.css = cssString + this.originalCSS = cssString + this.usesFileUrls = !this.mapOpts.from && this.mapOpts.absolute + + this.memoizedFileURLs = new Map() + this.memoizedPaths = new Map() + this.memoizedURLs = new Map() + } + + addAnnotation() { + let content + + if (this.isInline()) { + content = + 'data:application/json;base64,' + this.toBase64(this.map.toString()) + } else if (typeof this.mapOpts.annotation === 'string') { + content = this.mapOpts.annotation + } else if (typeof this.mapOpts.annotation === 'function') { + content = this.mapOpts.annotation(this.opts.to, this.root) + } else { + content = this.outputFile() + '.map' + } + let eol = '\n' + if (this.css.includes('\r\n')) eol = '\r\n' + + this.css += eol + '/*# sourceMappingURL=' + content + ' */' + } + + applyPrevMaps() { + for (let prev of this.previous()) { + let from = this.toUrl(this.path(prev.file)) + let root = prev.root || dirname(prev.file) + let map + + if (this.mapOpts.sourcesContent === false) { + map = new SourceMapConsumer(prev.text) + if (map.sourcesContent) { + map.sourcesContent = null + } + } else { + map = prev.consumer() + } + + this.map.applySourceMap(map, from, this.toUrl(this.path(root))) + } + } + + clearAnnotation() { + if (this.mapOpts.annotation === false) return + + if (this.root) { + let node + for (let i = this.root.nodes.length - 1; i >= 0; i--) { + node = this.root.nodes[i] + if (node.type !== 'comment') continue + if (node.text.startsWith('# sourceMappingURL=')) { + this.root.removeChild(i) + } + } + } else if (this.css) { + this.css = this.css.replace(/\n*\/\*#[\S\s]*?\*\/$/gm, '') + } + } + + generate() { + this.clearAnnotation() + if (pathAvailable && sourceMapAvailable && this.isMap()) { + return this.generateMap() + } else { + let result = '' + this.stringify(this.root, i => { + result += i + }) + return [result] + } + } + + generateMap() { + if (this.root) { + this.generateString() + } else if (this.previous().length === 1) { + let prev = this.previous()[0].consumer() + prev.file = this.outputFile() + this.map = SourceMapGenerator.fromSourceMap(prev, { + ignoreInvalidMapping: true + }) + } else { + this.map = new SourceMapGenerator({ + file: this.outputFile(), + ignoreInvalidMapping: true + }) + this.map.addMapping({ + generated: { column: 0, line: 1 }, + original: { column: 0, line: 1 }, + source: this.opts.from + ? this.toUrl(this.path(this.opts.from)) + : '' + }) + } + + if (this.isSourcesContent()) this.setSourcesContent() + if (this.root && this.previous().length > 0) this.applyPrevMaps() + if (this.isAnnotation()) this.addAnnotation() + + if (this.isInline()) { + return [this.css] + } else { + return [this.css, this.map] + } + } + + generateString() { + this.css = '' + this.map = new SourceMapGenerator({ + file: this.outputFile(), + ignoreInvalidMapping: true + }) + + let line = 1 + let column = 1 + + let noSource = '' + let mapping = { + generated: { column: 0, line: 0 }, + original: { column: 0, line: 0 }, + source: '' + } + + let last, lines + this.stringify(this.root, (str, node, type) => { + this.css += str + + if (node && type !== 'end') { + mapping.generated.line = line + mapping.generated.column = column - 1 + if (node.source && node.source.start) { + mapping.source = this.sourcePath(node) + mapping.original.line = node.source.start.line + mapping.original.column = node.source.start.column - 1 + this.map.addMapping(mapping) + } else { + mapping.source = noSource + mapping.original.line = 1 + mapping.original.column = 0 + this.map.addMapping(mapping) + } + } + + lines = str.match(/\n/g) + if (lines) { + line += lines.length + last = str.lastIndexOf('\n') + column = str.length - last + } else { + column += str.length + } + + if (node && type !== 'start') { + let p = node.parent || { raws: {} } + let childless = + node.type === 'decl' || (node.type === 'atrule' && !node.nodes) + if (!childless || node !== p.last || p.raws.semicolon) { + if (node.source && node.source.end) { + mapping.source = this.sourcePath(node) + mapping.original.line = node.source.end.line + mapping.original.column = node.source.end.column - 1 + mapping.generated.line = line + mapping.generated.column = column - 2 + this.map.addMapping(mapping) + } else { + mapping.source = noSource + mapping.original.line = 1 + mapping.original.column = 0 + mapping.generated.line = line + mapping.generated.column = column - 1 + this.map.addMapping(mapping) + } + } + } + }) + } + + isAnnotation() { + if (this.isInline()) { + return true + } + if (typeof this.mapOpts.annotation !== 'undefined') { + return this.mapOpts.annotation + } + if (this.previous().length) { + return this.previous().some(i => i.annotation) + } + return true + } + + isInline() { + if (typeof this.mapOpts.inline !== 'undefined') { + return this.mapOpts.inline + } + + let annotation = this.mapOpts.annotation + if (typeof annotation !== 'undefined' && annotation !== true) { + return false + } + + if (this.previous().length) { + return this.previous().some(i => i.inline) + } + return true + } + + isMap() { + if (typeof this.opts.map !== 'undefined') { + return !!this.opts.map + } + return this.previous().length > 0 + } + + isSourcesContent() { + if (typeof this.mapOpts.sourcesContent !== 'undefined') { + return this.mapOpts.sourcesContent + } + if (this.previous().length) { + return this.previous().some(i => i.withContent()) + } + return true + } + + outputFile() { + if (this.opts.to) { + return this.path(this.opts.to) + } else if (this.opts.from) { + return this.path(this.opts.from) + } else { + return 'to.css' + } + } + + path(file) { + if (this.mapOpts.absolute) return file + if (file.charCodeAt(0) === 60 /* `<` */) return file + if (/^\w+:\/\//.test(file)) return file + let cached = this.memoizedPaths.get(file) + if (cached) return cached + + let from = this.opts.to ? dirname(this.opts.to) : '.' + + if (typeof this.mapOpts.annotation === 'string') { + from = dirname(resolve(from, this.mapOpts.annotation)) + } + + let path = relative(from, file) + this.memoizedPaths.set(file, path) + + return path + } + + previous() { + if (!this.previousMaps) { + this.previousMaps = [] + if (this.root) { + this.root.walk(node => { + if (node.source && node.source.input.map) { + let map = node.source.input.map + if (!this.previousMaps.includes(map)) { + this.previousMaps.push(map) + } + } + }) + } else { + let input = new Input(this.originalCSS, this.opts) + if (input.map) this.previousMaps.push(input.map) + } + } + + return this.previousMaps + } + + setSourcesContent() { + let already = {} + if (this.root) { + this.root.walk(node => { + if (node.source) { + let from = node.source.input.from + if (from && !already[from]) { + already[from] = true + let fromUrl = this.usesFileUrls + ? this.toFileUrl(from) + : this.toUrl(this.path(from)) + this.map.setSourceContent(fromUrl, node.source.input.css) + } + } + }) + } else if (this.css) { + let from = this.opts.from + ? this.toUrl(this.path(this.opts.from)) + : '' + this.map.setSourceContent(from, this.css) + } + } + + sourcePath(node) { + if (this.mapOpts.from) { + return this.toUrl(this.mapOpts.from) + } else if (this.usesFileUrls) { + return this.toFileUrl(node.source.input.from) + } else { + return this.toUrl(this.path(node.source.input.from)) + } + } + + toBase64(str) { + if (Buffer) { + return Buffer.from(str).toString('base64') + } else { + return window.btoa(unescape(encodeURIComponent(str))) + } + } + + toFileUrl(path) { + let cached = this.memoizedFileURLs.get(path) + if (cached) return cached + + if (pathToFileURL) { + let fileURL = pathToFileURL(path).toString() + this.memoizedFileURLs.set(path, fileURL) + + return fileURL + } else { + throw new Error( + '`map.absolute` option is not available in this PostCSS build' + ) + } + } + + toUrl(path) { + let cached = this.memoizedURLs.get(path) + if (cached) return cached + + if (sep === '\\') { + path = path.replace(/\\/g, '/') + } + + let url = encodeURI(path).replace(/[#?]/g, encodeURIComponent) + this.memoizedURLs.set(path, url) + + return url + } +} + +module.exports = MapGenerator diff --git a/node_modules/postcss/lib/no-work-result.d.ts b/node_modules/postcss/lib/no-work-result.d.ts new file mode 100644 index 0000000..8039076 --- /dev/null +++ b/node_modules/postcss/lib/no-work-result.d.ts @@ -0,0 +1,46 @@ +import LazyResult from './lazy-result.js' +import { SourceMap } from './postcss.js' +import Processor from './processor.js' +import Result, { Message, ResultOptions } from './result.js' +import Root from './root.js' +import Warning from './warning.js' + +declare namespace NoWorkResult { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { NoWorkResult_ as default } +} + +/** + * A Promise proxy for the result of PostCSS transformations. + * This lazy result instance doesn't parse css unless `NoWorkResult#root` or `Result#root` + * are accessed. See the example below for details. + * A `NoWork` instance is returned by `Processor#process` ONLY when no plugins defined. + * + * ```js + * const noWorkResult = postcss().process(css) // No plugins are defined. + * // CSS is not parsed + * let root = noWorkResult.root // now css is parsed because we accessed the root + * ``` + */ +declare class NoWorkResult_ implements LazyResult { + catch: Promise>['catch'] + finally: Promise>['finally'] + then: Promise>['then'] + constructor(processor: Processor, css: string, opts: ResultOptions) + async(): Promise> + sync(): Result + toString(): string + warnings(): Warning[] + get content(): string + get css(): string + get map(): SourceMap + get messages(): Message[] + get opts(): ResultOptions + get processor(): Processor + get root(): Root + get [Symbol.toStringTag](): string +} + +declare class NoWorkResult extends NoWorkResult_ {} + +export = NoWorkResult diff --git a/node_modules/postcss/lib/no-work-result.js b/node_modules/postcss/lib/no-work-result.js new file mode 100644 index 0000000..a84d843 --- /dev/null +++ b/node_modules/postcss/lib/no-work-result.js @@ -0,0 +1,138 @@ +'use strict' + +let MapGenerator = require('./map-generator') +let parse = require('./parse') +const Result = require('./result') +let stringify = require('./stringify') +let warnOnce = require('./warn-once') + +class NoWorkResult { + constructor(processor, css, opts) { + css = css.toString() + this.stringified = false + + this._processor = processor + this._css = css + this._opts = opts + this._map = undefined + let root + + let str = stringify + this.result = new Result(this._processor, root, this._opts) + this.result.css = css + + let self = this + Object.defineProperty(this.result, 'root', { + get() { + return self.root + } + }) + + let map = new MapGenerator(str, root, this._opts, css) + if (map.isMap()) { + let [generatedCSS, generatedMap] = map.generate() + if (generatedCSS) { + this.result.css = generatedCSS + } + if (generatedMap) { + this.result.map = generatedMap + } + } else { + map.clearAnnotation() + this.result.css = map.css + } + } + + async() { + if (this.error) return Promise.reject(this.error) + return Promise.resolve(this.result) + } + + catch(onRejected) { + return this.async().catch(onRejected) + } + + finally(onFinally) { + return this.async().then(onFinally, onFinally) + } + + sync() { + if (this.error) throw this.error + return this.result + } + + then(onFulfilled, onRejected) { + if (process.env.NODE_ENV !== 'production') { + if (!('from' in this._opts)) { + warnOnce( + 'Without `from` option PostCSS could generate wrong source map ' + + 'and will not find Browserslist config. Set it to CSS file path ' + + 'or to `undefined` to prevent this warning.' + ) + } + } + + return this.async().then(onFulfilled, onRejected) + } + + toString() { + return this._css + } + + warnings() { + return [] + } + + get content() { + return this.result.css + } + + get css() { + return this.result.css + } + + get map() { + return this.result.map + } + + get messages() { + return [] + } + + get opts() { + return this.result.opts + } + + get processor() { + return this.result.processor + } + + get root() { + if (this._root) { + return this._root + } + + let root + let parser = parse + + try { + root = parser(this._css, this._opts) + } catch (error) { + this.error = error + } + + if (this.error) { + throw this.error + } else { + this._root = root + return root + } + } + + get [Symbol.toStringTag]() { + return 'NoWorkResult' + } +} + +module.exports = NoWorkResult +NoWorkResult.default = NoWorkResult diff --git a/node_modules/postcss/lib/node.d.ts b/node_modules/postcss/lib/node.d.ts new file mode 100644 index 0000000..3597670 --- /dev/null +++ b/node_modules/postcss/lib/node.d.ts @@ -0,0 +1,541 @@ +import AtRule = require('./at-rule.js') + +import { AtRuleProps } from './at-rule.js' +import Comment, { CommentProps } from './comment.js' +import Container, { NewChild } from './container.js' +import CssSyntaxError from './css-syntax-error.js' +import Declaration, { DeclarationProps } from './declaration.js' +import Document from './document.js' +import Input from './input.js' +import { Stringifier, Syntax } from './postcss.js' +import Result from './result.js' +import Root from './root.js' +import Rule, { RuleProps } from './rule.js' +import Warning, { WarningOptions } from './warning.js' + +declare namespace Node { + export type ChildNode = AtRule.default | Comment | Declaration | Rule + + export type AnyNode = + | AtRule.default + | Comment + | Declaration + | Document + | Root + | Rule + + export type ChildProps = + | AtRuleProps + | CommentProps + | DeclarationProps + | RuleProps + + export interface Position { + /** + * Source line in file. In contrast to `offset` it starts from 1. + */ + column: number + + /** + * Source column in file. + */ + line: number + + /** + * Source offset in file. It starts from 0. + */ + offset: number + } + + export interface Range { + /** + * End position, exclusive. + */ + end: Position + + /** + * Start position, inclusive. + */ + start: Position + } + + /** + * Source represents an interface for the {@link Node.source} property. + */ + export interface Source { + /** + * The inclusive ending position for the source + * code of a node. + */ + end?: Position + + /** + * The source file from where a node has originated. + */ + input: Input + + /** + * The inclusive starting position for the source + * code of a node. + */ + start?: Position + } + + /** + * Interface represents an interface for an object received + * as parameter by Node class constructor. + */ + export interface NodeProps { + source?: Source + } + + export interface NodeErrorOptions { + /** + * An ending index inside a node's string that should be highlighted as + * source of error. + */ + endIndex?: number + /** + * An index inside a node's string that should be highlighted as source + * of error. + */ + index?: number + /** + * Plugin name that created this error. PostCSS will set it automatically. + */ + plugin?: string + /** + * A word inside a node's string, that should be highlighted as source + * of error. + */ + word?: string + } + + // eslint-disable-next-line @typescript-eslint/no-shadow + class Node extends Node_ {} + export { Node as default } +} + +/** + * It represents an abstract class that handles common + * methods for other CSS abstract syntax tree nodes. + * + * Any node that represents CSS selector or value should + * not extend the `Node` class. + */ +declare abstract class Node_ { + /** + * It represents parent of the current node. + * + * ```js + * root.nodes[0].parent === root //=> true + * ``` + */ + parent: Container | Document | undefined + + /** + * It represents unnecessary whitespace and characters present + * in the css source code. + * + * Information to generate byte-to-byte equal node string as it was + * in the origin input. + * + * The properties of the raws object are decided by parser, + * the default parser uses the following properties: + * + * * `before`: the space symbols before the node. It also stores `*` + * and `_` symbols before the declaration (IE hack). + * * `after`: the space symbols after the last child of the node + * to the end of the node. + * * `between`: the symbols between the property and value + * for declarations, selector and `{` for rules, or last parameter + * and `{` for at-rules. + * * `semicolon`: contains true if the last child has + * an (optional) semicolon. + * * `afterName`: the space between the at-rule name and its parameters. + * * `left`: the space symbols between `/*` and the comment’s text. + * * `right`: the space symbols between the comment’s text + * and */. + * - `important`: the content of the important statement, + * if it is not just `!important`. + * + * PostCSS filters out the comments inside selectors, declaration values + * and at-rule parameters but it stores the origin content in raws. + * + * ```js + * const root = postcss.parse('a {\n color:black\n}') + * root.first.first.raws //=> { before: '\n ', between: ':' } + * ``` + */ + raws: any + + /** + * It represents information related to origin of a node and is required + * for generating source maps. + * + * The nodes that are created manually using the public APIs + * provided by PostCSS will have `source` undefined and + * will be absent in the source map. + * + * For this reason, the plugin developer should consider + * duplicating nodes as the duplicate node will have the + * same source as the original node by default or assign + * source to a node created manually. + * + * ```js + * decl.source.input.from //=> '/home/ai/source.css' + * decl.source.start //=> { line: 10, column: 2 } + * decl.source.end //=> { line: 10, column: 12 } + * ``` + * + * ```js + * // Incorrect method, source not specified! + * const prefixed = postcss.decl({ + * prop: '-moz-' + decl.prop, + * value: decl.value + * }) + * + * // Correct method, source is inherited when duplicating. + * const prefixed = decl.clone({ + * prop: '-moz-' + decl.prop + * }) + * ``` + * + * ```js + * if (atrule.name === 'add-link') { + * const rule = postcss.rule({ + * selector: 'a', + * source: atrule.source + * }) + * + * atrule.parent.insertBefore(atrule, rule) + * } + * ``` + */ + source?: Node.Source + + /** + * It represents type of a node in + * an abstract syntax tree. + * + * A type of node helps in identification of a node + * and perform operation based on it's type. + * + * ```js + * const declaration = new Declaration({ + * prop: 'color', + * value: 'black' + * }) + * + * declaration.type //=> 'decl' + * ``` + */ + type: string + + constructor(defaults?: object) + + /** + * If this node isn't already dirty, marks it and its ancestors as such. This + * indicates to the LazyResult processor that the {@link Root} has been + * modified by the current plugin and may need to be processed again by other + * plugins. + */ + protected markDirty(): void + + /** + * Insert new node after current node to current node’s parent. + * + * Just alias for `node.parent.insertAfter(node, add)`. + * + * ```js + * decl.after('color: black') + * ``` + * + * @param newNode New node. + * @return This node for methods chain. + */ + after( + newNode: Node | Node.ChildProps | readonly Node[] | string | undefined + ): this + + /** + * It assigns properties to an existing node instance. + * + * ```js + * decl.assign({ prop: 'word-wrap', value: 'break-word' }) + * ``` + * + * @param overrides New properties to override the node. + * + * @return `this` for method chaining. + */ + assign(overrides: object): this + + /** + * Insert new node before current node to current node’s parent. + * + * Just alias for `node.parent.insertBefore(node, add)`. + * + * ```js + * decl.before('content: ""') + * ``` + * + * @param newNode New node. + * @return This node for methods chain. + */ + before( + newNode: Node | Node.ChildProps | readonly Node[] | string | undefined + ): this + + /** + * Clear the code style properties for the node and its children. + * + * ```js + * node.raws.before //=> ' ' + * node.cleanRaws() + * node.raws.before //=> undefined + * ``` + * + * @param keepBetween Keep the `raws.between` symbols. + */ + cleanRaws(keepBetween?: boolean): void + + /** + * It creates clone of an existing node, which includes all the properties + * and their values, that includes `raws` but not `type`. + * + * ```js + * decl.raws.before //=> "\n " + * const cloned = decl.clone({ prop: '-moz-' + decl.prop }) + * cloned.raws.before //=> "\n " + * cloned.toString() //=> -moz-transform: scale(0) + * ``` + * + * @param overrides New properties to override in the clone. + * + * @return Duplicate of the node instance. + */ + clone(overrides?: object): this + + /** + * Shortcut to clone the node and insert the resulting cloned node + * after the current node. + * + * @param overrides New properties to override in the clone. + * @return New node. + */ + cloneAfter(overrides?: object): this + + /** + * Shortcut to clone the node and insert the resulting cloned node + * before the current node. + * + * ```js + * decl.cloneBefore({ prop: '-moz-' + decl.prop }) + * ``` + * + * @param overrides Mew properties to override in the clone. + * + * @return New node + */ + cloneBefore(overrides?: object): this + + /** + * It creates an instance of the class `CssSyntaxError` and parameters passed + * to this method are assigned to the error instance. + * + * The error instance will have description for the + * error, original position of the node in the + * source, showing line and column number. + * + * If any previous map is present, it would be used + * to get original position of the source. + * + * The Previous Map here is referred to the source map + * generated by previous compilation, example: Less, + * Stylus and Sass. + * + * This method returns the error instance instead of + * throwing it. + * + * ```js + * if (!variables[name]) { + * throw decl.error(`Unknown variable ${name}`, { word: name }) + * // CssSyntaxError: postcss-vars:a.sass:4:3: Unknown variable $black + * // color: $black + * // a + * // ^ + * // background: white + * } + * ``` + * + * @param message Description for the error instance. + * @param options Options for the error instance. + * + * @return Error instance is returned. + */ + error(message: string, options?: Node.NodeErrorOptions): CssSyntaxError + + /** + * Returns the next child of the node’s parent. + * Returns `undefined` if the current node is the last child. + * + * ```js + * if (comment.text === 'delete next') { + * const next = comment.next() + * if (next) { + * next.remove() + * } + * } + * ``` + * + * @return Next node. + */ + next(): Node.ChildNode | undefined + + /** + * Get the position for a word or an index inside the node. + * + * @param opts Options. + * @return Position. + */ + positionBy(opts?: Pick): Node.Position + + /** + * Convert string index to line/column. + * + * @param index The symbol number in the node’s string. + * @return Symbol position in file. + */ + positionInside(index: number): Node.Position + + /** + * Returns the previous child of the node’s parent. + * Returns `undefined` if the current node is the first child. + * + * ```js + * const annotation = decl.prev() + * if (annotation.type === 'comment') { + * readAnnotation(annotation.text) + * } + * ``` + * + * @return Previous node. + */ + prev(): Node.ChildNode | undefined + + /** + * Get the range for a word or start and end index inside the node. + * The start index is inclusive; the end index is exclusive. + * + * @param opts Options. + * @return Range. + */ + rangeBy( + opts?: Pick + ): Node.Range + + /** + * Returns a `raws` value. If the node is missing + * the code style property (because the node was manually built or cloned), + * PostCSS will try to autodetect the code style property by looking + * at other nodes in the tree. + * + * ```js + * const root = postcss.parse('a { background: white }') + * root.nodes[0].append({ prop: 'color', value: 'black' }) + * root.nodes[0].nodes[1].raws.before //=> undefined + * root.nodes[0].nodes[1].raw('before') //=> ' ' + * ``` + * + * @param prop Name of code style property. + * @param defaultType Name of default value, it can be missed + * if the value is the same as prop. + * @return {string} Code style value. + */ + raw(prop: string, defaultType?: string): string + + /** + * It removes the node from its parent and deletes its parent property. + * + * ```js + * if (decl.prop.match(/^-webkit-/)) { + * decl.remove() + * } + * ``` + * + * @return `this` for method chaining. + */ + remove(): this + + /** + * Inserts node(s) before the current node and removes the current node. + * + * ```js + * AtRule: { + * mixin: atrule => { + * atrule.replaceWith(mixinRules[atrule.params]) + * } + * } + * ``` + * + * @param nodes Mode(s) to replace current one. + * @return Current node to methods chain. + */ + replaceWith(...nodes: NewChild[]): this + + /** + * Finds the Root instance of the node’s tree. + * + * ```js + * root.nodes[0].nodes[0].root() === root + * ``` + * + * @return Root parent. + */ + root(): Root + + /** + * Fix circular links on `JSON.stringify()`. + * + * @return Cleaned object. + */ + toJSON(): object + + /** + * It compiles the node to browser readable cascading style sheets string + * depending on it's type. + * + * ```js + * new Rule({ selector: 'a' }).toString() //=> "a {}" + * ``` + * + * @param stringifier A syntax to use in string generation. + * @return CSS string of this node. + */ + toString(stringifier?: Stringifier | Syntax): string + + /** + * It is a wrapper for {@link Result#warn}, providing convenient + * way of generating warnings. + * + * ```js + * Declaration: { + * bad: (decl, { result }) => { + * decl.warn(result, 'Deprecated property: bad') + * } + * } + * ``` + * + * @param result The `Result` instance that will receive the warning. + * @param message Description for the warning. + * @param options Options for the warning. + * + * @return `Warning` instance is returned + */ + warn(result: Result, message: string, options?: WarningOptions): Warning +} + +declare class Node extends Node_ {} + +export = Node diff --git a/node_modules/postcss/lib/node.js b/node_modules/postcss/lib/node.js new file mode 100644 index 0000000..88f382a --- /dev/null +++ b/node_modules/postcss/lib/node.js @@ -0,0 +1,389 @@ +'use strict' + +let CssSyntaxError = require('./css-syntax-error') +let Stringifier = require('./stringifier') +let stringify = require('./stringify') +let { isClean, my } = require('./symbols') + +function cloneNode(obj, parent) { + let cloned = new obj.constructor() + + for (let i in obj) { + if (!Object.prototype.hasOwnProperty.call(obj, i)) { + /* c8 ignore next 2 */ + continue + } + if (i === 'proxyCache') continue + let value = obj[i] + let type = typeof value + + if (i === 'parent' && type === 'object') { + if (parent) cloned[i] = parent + } else if (i === 'source') { + cloned[i] = value + } else if (Array.isArray(value)) { + cloned[i] = value.map(j => cloneNode(j, cloned)) + } else { + if (type === 'object' && value !== null) value = cloneNode(value) + cloned[i] = value + } + } + + return cloned +} + +class Node { + constructor(defaults = {}) { + this.raws = {} + this[isClean] = false + this[my] = true + + for (let name in defaults) { + if (name === 'nodes') { + this.nodes = [] + for (let node of defaults[name]) { + if (typeof node.clone === 'function') { + this.append(node.clone()) + } else { + this.append(node) + } + } + } else { + this[name] = defaults[name] + } + } + } + + addToError(error) { + error.postcssNode = this + if (error.stack && this.source && /\n\s{4}at /.test(error.stack)) { + let s = this.source + error.stack = error.stack.replace( + /\n\s{4}at /, + `$&${s.input.from}:${s.start.line}:${s.start.column}$&` + ) + } + return error + } + + after(add) { + this.parent.insertAfter(this, add) + return this + } + + assign(overrides = {}) { + for (let name in overrides) { + this[name] = overrides[name] + } + return this + } + + before(add) { + this.parent.insertBefore(this, add) + return this + } + + cleanRaws(keepBetween) { + delete this.raws.before + delete this.raws.after + if (!keepBetween) delete this.raws.between + } + + clone(overrides = {}) { + let cloned = cloneNode(this) + for (let name in overrides) { + cloned[name] = overrides[name] + } + return cloned + } + + cloneAfter(overrides = {}) { + let cloned = this.clone(overrides) + this.parent.insertAfter(this, cloned) + return cloned + } + + cloneBefore(overrides = {}) { + let cloned = this.clone(overrides) + this.parent.insertBefore(this, cloned) + return cloned + } + + error(message, opts = {}) { + if (this.source) { + let { end, start } = this.rangeBy(opts) + return this.source.input.error( + message, + { column: start.column, line: start.line }, + { column: end.column, line: end.line }, + opts + ) + } + return new CssSyntaxError(message) + } + + getProxyProcessor() { + return { + get(node, prop) { + if (prop === 'proxyOf') { + return node + } else if (prop === 'root') { + return () => node.root().toProxy() + } else { + return node[prop] + } + }, + + set(node, prop, value) { + if (node[prop] === value) return true + node[prop] = value + if ( + prop === 'prop' || + prop === 'value' || + prop === 'name' || + prop === 'params' || + prop === 'important' || + /* c8 ignore next */ + prop === 'text' + ) { + node.markDirty() + } + return true + } + } + } + + /* c8 ignore next 3 */ + markClean() { + this[isClean] = true + } + + markDirty() { + if (this[isClean]) { + this[isClean] = false + let next = this + while ((next = next.parent)) { + next[isClean] = false + } + } + } + + next() { + if (!this.parent) return undefined + let index = this.parent.index(this) + return this.parent.nodes[index + 1] + } + + positionBy(opts, stringRepresentation) { + let pos = this.source.start + if (opts.index) { + pos = this.positionInside(opts.index, stringRepresentation) + } else if (opts.word) { + stringRepresentation = this.toString() + let index = stringRepresentation.indexOf(opts.word) + if (index !== -1) pos = this.positionInside(index, stringRepresentation) + } + return pos + } + + positionInside(index, stringRepresentation) { + let string = stringRepresentation || this.toString() + let column = this.source.start.column + let line = this.source.start.line + + for (let i = 0; i < index; i++) { + if (string[i] === '\n') { + column = 1 + line += 1 + } else { + column += 1 + } + } + + return { column, line } + } + + prev() { + if (!this.parent) return undefined + let index = this.parent.index(this) + return this.parent.nodes[index - 1] + } + + rangeBy(opts) { + let start = { + column: this.source.start.column, + line: this.source.start.line + } + let end = this.source.end + ? { + column: this.source.end.column + 1, + line: this.source.end.line + } + : { + column: start.column + 1, + line: start.line + } + + if (opts.word) { + let stringRepresentation = this.toString() + let index = stringRepresentation.indexOf(opts.word) + if (index !== -1) { + start = this.positionInside(index, stringRepresentation) + end = this.positionInside( + index + opts.word.length, + stringRepresentation + ) + } + } else { + if (opts.start) { + start = { + column: opts.start.column, + line: opts.start.line + } + } else if (opts.index) { + start = this.positionInside(opts.index) + } + + if (opts.end) { + end = { + column: opts.end.column, + line: opts.end.line + } + } else if (typeof opts.endIndex === 'number') { + end = this.positionInside(opts.endIndex) + } else if (opts.index) { + end = this.positionInside(opts.index + 1) + } + } + + if ( + end.line < start.line || + (end.line === start.line && end.column <= start.column) + ) { + end = { column: start.column + 1, line: start.line } + } + + return { end, start } + } + + raw(prop, defaultType) { + let str = new Stringifier() + return str.raw(this, prop, defaultType) + } + + remove() { + if (this.parent) { + this.parent.removeChild(this) + } + this.parent = undefined + return this + } + + replaceWith(...nodes) { + if (this.parent) { + let bookmark = this + let foundSelf = false + for (let node of nodes) { + if (node === this) { + foundSelf = true + } else if (foundSelf) { + this.parent.insertAfter(bookmark, node) + bookmark = node + } else { + this.parent.insertBefore(bookmark, node) + } + } + + if (!foundSelf) { + this.remove() + } + } + + return this + } + + root() { + let result = this + while (result.parent && result.parent.type !== 'document') { + result = result.parent + } + return result + } + + toJSON(_, inputs) { + let fixed = {} + let emitInputs = inputs == null + inputs = inputs || new Map() + let inputsNextIndex = 0 + + for (let name in this) { + if (!Object.prototype.hasOwnProperty.call(this, name)) { + /* c8 ignore next 2 */ + continue + } + if (name === 'parent' || name === 'proxyCache') continue + let value = this[name] + + if (Array.isArray(value)) { + fixed[name] = value.map(i => { + if (typeof i === 'object' && i.toJSON) { + return i.toJSON(null, inputs) + } else { + return i + } + }) + } else if (typeof value === 'object' && value.toJSON) { + fixed[name] = value.toJSON(null, inputs) + } else if (name === 'source') { + let inputId = inputs.get(value.input) + if (inputId == null) { + inputId = inputsNextIndex + inputs.set(value.input, inputsNextIndex) + inputsNextIndex++ + } + fixed[name] = { + end: value.end, + inputId, + start: value.start + } + } else { + fixed[name] = value + } + } + + if (emitInputs) { + fixed.inputs = [...inputs.keys()].map(input => input.toJSON()) + } + + return fixed + } + + toProxy() { + if (!this.proxyCache) { + this.proxyCache = new Proxy(this, this.getProxyProcessor()) + } + return this.proxyCache + } + + toString(stringifier = stringify) { + if (stringifier.stringify) stringifier = stringifier.stringify + let result = '' + stringifier(this, i => { + result += i + }) + return result + } + + warn(result, text, opts) { + let data = { node: this } + for (let i in opts) data[i] = opts[i] + return result.warn(text, data) + } + + get proxyOf() { + return this + } +} + +module.exports = Node +Node.default = Node diff --git a/node_modules/postcss/lib/parse.d.ts b/node_modules/postcss/lib/parse.d.ts new file mode 100644 index 0000000..4c943a4 --- /dev/null +++ b/node_modules/postcss/lib/parse.d.ts @@ -0,0 +1,9 @@ +import { Parser } from './postcss.js' + +interface Parse extends Parser { + default: Parse +} + +declare const parse: Parse + +export = parse diff --git a/node_modules/postcss/lib/parse.js b/node_modules/postcss/lib/parse.js new file mode 100644 index 0000000..00a1037 --- /dev/null +++ b/node_modules/postcss/lib/parse.js @@ -0,0 +1,42 @@ +'use strict' + +let Container = require('./container') +let Input = require('./input') +let Parser = require('./parser') + +function parse(css, opts) { + let input = new Input(css, opts) + let parser = new Parser(input) + try { + parser.parse() + } catch (e) { + if (process.env.NODE_ENV !== 'production') { + if (e.name === 'CssSyntaxError' && opts && opts.from) { + if (/\.scss$/i.test(opts.from)) { + e.message += + '\nYou tried to parse SCSS with ' + + 'the standard CSS parser; ' + + 'try again with the postcss-scss parser' + } else if (/\.sass/i.test(opts.from)) { + e.message += + '\nYou tried to parse Sass with ' + + 'the standard CSS parser; ' + + 'try again with the postcss-sass parser' + } else if (/\.less$/i.test(opts.from)) { + e.message += + '\nYou tried to parse Less with ' + + 'the standard CSS parser; ' + + 'try again with the postcss-less parser' + } + } + } + throw e + } + + return parser.root +} + +module.exports = parse +parse.default = parse + +Container.registerParse(parse) diff --git a/node_modules/postcss/lib/parser.js b/node_modules/postcss/lib/parser.js new file mode 100644 index 0000000..8a04411 --- /dev/null +++ b/node_modules/postcss/lib/parser.js @@ -0,0 +1,609 @@ +'use strict' + +let AtRule = require('./at-rule') +let Comment = require('./comment') +let Declaration = require('./declaration') +let Root = require('./root') +let Rule = require('./rule') +let tokenizer = require('./tokenize') + +const SAFE_COMMENT_NEIGHBOR = { + empty: true, + space: true +} + +function findLastWithPosition(tokens) { + for (let i = tokens.length - 1; i >= 0; i--) { + let token = tokens[i] + let pos = token[3] || token[2] + if (pos) return pos + } +} + +class Parser { + constructor(input) { + this.input = input + + this.root = new Root() + this.current = this.root + this.spaces = '' + this.semicolon = false + + this.createTokenizer() + this.root.source = { input, start: { column: 1, line: 1, offset: 0 } } + } + + atrule(token) { + let node = new AtRule() + node.name = token[1].slice(1) + if (node.name === '') { + this.unnamedAtrule(node, token) + } + this.init(node, token[2]) + + let type + let prev + let shift + let last = false + let open = false + let params = [] + let brackets = [] + + while (!this.tokenizer.endOfFile()) { + token = this.tokenizer.nextToken() + type = token[0] + + if (type === '(' || type === '[') { + brackets.push(type === '(' ? ')' : ']') + } else if (type === '{' && brackets.length > 0) { + brackets.push('}') + } else if (type === brackets[brackets.length - 1]) { + brackets.pop() + } + + if (brackets.length === 0) { + if (type === ';') { + node.source.end = this.getPosition(token[2]) + node.source.end.offset++ + this.semicolon = true + break + } else if (type === '{') { + open = true + break + } else if (type === '}') { + if (params.length > 0) { + shift = params.length - 1 + prev = params[shift] + while (prev && prev[0] === 'space') { + prev = params[--shift] + } + if (prev) { + node.source.end = this.getPosition(prev[3] || prev[2]) + node.source.end.offset++ + } + } + this.end(token) + break + } else { + params.push(token) + } + } else { + params.push(token) + } + + if (this.tokenizer.endOfFile()) { + last = true + break + } + } + + node.raws.between = this.spacesAndCommentsFromEnd(params) + if (params.length) { + node.raws.afterName = this.spacesAndCommentsFromStart(params) + this.raw(node, 'params', params) + if (last) { + token = params[params.length - 1] + node.source.end = this.getPosition(token[3] || token[2]) + node.source.end.offset++ + this.spaces = node.raws.between + node.raws.between = '' + } + } else { + node.raws.afterName = '' + node.params = '' + } + + if (open) { + node.nodes = [] + this.current = node + } + } + + checkMissedSemicolon(tokens) { + let colon = this.colon(tokens) + if (colon === false) return + + let founded = 0 + let token + for (let j = colon - 1; j >= 0; j--) { + token = tokens[j] + if (token[0] !== 'space') { + founded += 1 + if (founded === 2) break + } + } + // If the token is a word, e.g. `!important`, `red` or any other valid property's value. + // Then we need to return the colon after that word token. [3] is the "end" colon of that word. + // And because we need it after that one we do +1 to get the next one. + throw this.input.error( + 'Missed semicolon', + token[0] === 'word' ? token[3] + 1 : token[2] + ) + } + + colon(tokens) { + let brackets = 0 + let prev, token, type + for (let [i, element] of tokens.entries()) { + token = element + type = token[0] + + if (type === '(') { + brackets += 1 + } + if (type === ')') { + brackets -= 1 + } + if (brackets === 0 && type === ':') { + if (!prev) { + this.doubleColon(token) + } else if (prev[0] === 'word' && prev[1] === 'progid') { + continue + } else { + return i + } + } + + prev = token + } + return false + } + + comment(token) { + let node = new Comment() + this.init(node, token[2]) + node.source.end = this.getPosition(token[3] || token[2]) + node.source.end.offset++ + + let text = token[1].slice(2, -2) + if (/^\s*$/.test(text)) { + node.text = '' + node.raws.left = text + node.raws.right = '' + } else { + let match = text.match(/^(\s*)([^]*\S)(\s*)$/) + node.text = match[2] + node.raws.left = match[1] + node.raws.right = match[3] + } + } + + createTokenizer() { + this.tokenizer = tokenizer(this.input) + } + + decl(tokens, customProperty) { + let node = new Declaration() + this.init(node, tokens[0][2]) + + let last = tokens[tokens.length - 1] + if (last[0] === ';') { + this.semicolon = true + tokens.pop() + } + + node.source.end = this.getPosition( + last[3] || last[2] || findLastWithPosition(tokens) + ) + node.source.end.offset++ + + while (tokens[0][0] !== 'word') { + if (tokens.length === 1) this.unknownWord(tokens) + node.raws.before += tokens.shift()[1] + } + node.source.start = this.getPosition(tokens[0][2]) + + node.prop = '' + while (tokens.length) { + let type = tokens[0][0] + if (type === ':' || type === 'space' || type === 'comment') { + break + } + node.prop += tokens.shift()[1] + } + + node.raws.between = '' + + let token + while (tokens.length) { + token = tokens.shift() + + if (token[0] === ':') { + node.raws.between += token[1] + break + } else { + if (token[0] === 'word' && /\w/.test(token[1])) { + this.unknownWord([token]) + } + node.raws.between += token[1] + } + } + + if (node.prop[0] === '_' || node.prop[0] === '*') { + node.raws.before += node.prop[0] + node.prop = node.prop.slice(1) + } + + let firstSpaces = [] + let next + while (tokens.length) { + next = tokens[0][0] + if (next !== 'space' && next !== 'comment') break + firstSpaces.push(tokens.shift()) + } + + this.precheckMissedSemicolon(tokens) + + for (let i = tokens.length - 1; i >= 0; i--) { + token = tokens[i] + if (token[1].toLowerCase() === '!important') { + node.important = true + let string = this.stringFrom(tokens, i) + string = this.spacesFromEnd(tokens) + string + if (string !== ' !important') node.raws.important = string + break + } else if (token[1].toLowerCase() === 'important') { + let cache = tokens.slice(0) + let str = '' + for (let j = i; j > 0; j--) { + let type = cache[j][0] + if (str.trim().startsWith('!') && type !== 'space') { + break + } + str = cache.pop()[1] + str + } + if (str.trim().startsWith('!')) { + node.important = true + node.raws.important = str + tokens = cache + } + } + + if (token[0] !== 'space' && token[0] !== 'comment') { + break + } + } + + let hasWord = tokens.some(i => i[0] !== 'space' && i[0] !== 'comment') + + if (hasWord) { + node.raws.between += firstSpaces.map(i => i[1]).join('') + firstSpaces = [] + } + this.raw(node, 'value', firstSpaces.concat(tokens), customProperty) + + if (node.value.includes(':') && !customProperty) { + this.checkMissedSemicolon(tokens) + } + } + + doubleColon(token) { + throw this.input.error( + 'Double colon', + { offset: token[2] }, + { offset: token[2] + token[1].length } + ) + } + + emptyRule(token) { + let node = new Rule() + this.init(node, token[2]) + node.selector = '' + node.raws.between = '' + this.current = node + } + + end(token) { + if (this.current.nodes && this.current.nodes.length) { + this.current.raws.semicolon = this.semicolon + } + this.semicolon = false + + this.current.raws.after = (this.current.raws.after || '') + this.spaces + this.spaces = '' + + if (this.current.parent) { + this.current.source.end = this.getPosition(token[2]) + this.current.source.end.offset++ + this.current = this.current.parent + } else { + this.unexpectedClose(token) + } + } + + endFile() { + if (this.current.parent) this.unclosedBlock() + if (this.current.nodes && this.current.nodes.length) { + this.current.raws.semicolon = this.semicolon + } + this.current.raws.after = (this.current.raws.after || '') + this.spaces + this.root.source.end = this.getPosition(this.tokenizer.position()) + } + + freeSemicolon(token) { + this.spaces += token[1] + if (this.current.nodes) { + let prev = this.current.nodes[this.current.nodes.length - 1] + if (prev && prev.type === 'rule' && !prev.raws.ownSemicolon) { + prev.raws.ownSemicolon = this.spaces + this.spaces = '' + } + } + } + + // Helpers + + getPosition(offset) { + let pos = this.input.fromOffset(offset) + return { + column: pos.col, + line: pos.line, + offset + } + } + + init(node, offset) { + this.current.push(node) + node.source = { + input: this.input, + start: this.getPosition(offset) + } + node.raws.before = this.spaces + this.spaces = '' + if (node.type !== 'comment') this.semicolon = false + } + + other(start) { + let end = false + let type = null + let colon = false + let bracket = null + let brackets = [] + let customProperty = start[1].startsWith('--') + + let tokens = [] + let token = start + while (token) { + type = token[0] + tokens.push(token) + + if (type === '(' || type === '[') { + if (!bracket) bracket = token + brackets.push(type === '(' ? ')' : ']') + } else if (customProperty && colon && type === '{') { + if (!bracket) bracket = token + brackets.push('}') + } else if (brackets.length === 0) { + if (type === ';') { + if (colon) { + this.decl(tokens, customProperty) + return + } else { + break + } + } else if (type === '{') { + this.rule(tokens) + return + } else if (type === '}') { + this.tokenizer.back(tokens.pop()) + end = true + break + } else if (type === ':') { + colon = true + } + } else if (type === brackets[brackets.length - 1]) { + brackets.pop() + if (brackets.length === 0) bracket = null + } + + token = this.tokenizer.nextToken() + } + + if (this.tokenizer.endOfFile()) end = true + if (brackets.length > 0) this.unclosedBracket(bracket) + + if (end && colon) { + if (!customProperty) { + while (tokens.length) { + token = tokens[tokens.length - 1][0] + if (token !== 'space' && token !== 'comment') break + this.tokenizer.back(tokens.pop()) + } + } + this.decl(tokens, customProperty) + } else { + this.unknownWord(tokens) + } + } + + parse() { + let token + while (!this.tokenizer.endOfFile()) { + token = this.tokenizer.nextToken() + + switch (token[0]) { + case 'space': + this.spaces += token[1] + break + + case ';': + this.freeSemicolon(token) + break + + case '}': + this.end(token) + break + + case 'comment': + this.comment(token) + break + + case 'at-word': + this.atrule(token) + break + + case '{': + this.emptyRule(token) + break + + default: + this.other(token) + break + } + } + this.endFile() + } + + precheckMissedSemicolon(/* tokens */) { + // Hook for Safe Parser + } + + raw(node, prop, tokens, customProperty) { + let token, type + let length = tokens.length + let value = '' + let clean = true + let next, prev + + for (let i = 0; i < length; i += 1) { + token = tokens[i] + type = token[0] + if (type === 'space' && i === length - 1 && !customProperty) { + clean = false + } else if (type === 'comment') { + prev = tokens[i - 1] ? tokens[i - 1][0] : 'empty' + next = tokens[i + 1] ? tokens[i + 1][0] : 'empty' + if (!SAFE_COMMENT_NEIGHBOR[prev] && !SAFE_COMMENT_NEIGHBOR[next]) { + if (value.slice(-1) === ',') { + clean = false + } else { + value += token[1] + } + } else { + clean = false + } + } else { + value += token[1] + } + } + if (!clean) { + let raw = tokens.reduce((all, i) => all + i[1], '') + node.raws[prop] = { raw, value } + } + node[prop] = value + } + + rule(tokens) { + tokens.pop() + + let node = new Rule() + this.init(node, tokens[0][2]) + + node.raws.between = this.spacesAndCommentsFromEnd(tokens) + this.raw(node, 'selector', tokens) + this.current = node + } + + spacesAndCommentsFromEnd(tokens) { + let lastTokenType + let spaces = '' + while (tokens.length) { + lastTokenType = tokens[tokens.length - 1][0] + if (lastTokenType !== 'space' && lastTokenType !== 'comment') break + spaces = tokens.pop()[1] + spaces + } + return spaces + } + + // Errors + + spacesAndCommentsFromStart(tokens) { + let next + let spaces = '' + while (tokens.length) { + next = tokens[0][0] + if (next !== 'space' && next !== 'comment') break + spaces += tokens.shift()[1] + } + return spaces + } + + spacesFromEnd(tokens) { + let lastTokenType + let spaces = '' + while (tokens.length) { + lastTokenType = tokens[tokens.length - 1][0] + if (lastTokenType !== 'space') break + spaces = tokens.pop()[1] + spaces + } + return spaces + } + + stringFrom(tokens, from) { + let result = '' + for (let i = from; i < tokens.length; i++) { + result += tokens[i][1] + } + tokens.splice(from, tokens.length - from) + return result + } + + unclosedBlock() { + let pos = this.current.source.start + throw this.input.error('Unclosed block', pos.line, pos.column) + } + + unclosedBracket(bracket) { + throw this.input.error( + 'Unclosed bracket', + { offset: bracket[2] }, + { offset: bracket[2] + 1 } + ) + } + + unexpectedClose(token) { + throw this.input.error( + 'Unexpected }', + { offset: token[2] }, + { offset: token[2] + 1 } + ) + } + + unknownWord(tokens) { + throw this.input.error( + 'Unknown word', + { offset: tokens[0][2] }, + { offset: tokens[0][2] + tokens[0][1].length } + ) + } + + unnamedAtrule(node, token) { + throw this.input.error( + 'At-rule without name', + { offset: token[2] }, + { offset: token[2] + token[1].length } + ) + } +} + +module.exports = Parser diff --git a/node_modules/postcss/lib/postcss.d.mts b/node_modules/postcss/lib/postcss.d.mts new file mode 100644 index 0000000..4cf5b49 --- /dev/null +++ b/node_modules/postcss/lib/postcss.d.mts @@ -0,0 +1,69 @@ +export { + // postcss function / namespace + default, + + // Value exports from postcss.mjs + stringify, + fromJSON, + // @ts-expect-error This value exists, but it’s untyped. + plugin, + parse, + list, + document, + comment, + atRule, + rule, + decl, + root, + CssSyntaxError, + Declaration, + Container, + Processor, + Document, + Comment, + Warning, + AtRule, + Result, + Input, + Rule, + Root, + Node, + + // Type-only exports + AcceptedPlugin, + AnyNode, + AtRuleProps, + Builder, + ChildNode, + ChildProps, + CommentProps, + ContainerProps, + DeclarationProps, + DocumentProps, + FilePosition, + Helpers, + JSONHydrator, + Message, + NodeErrorOptions, + NodeProps, + OldPlugin, + Parser, + Plugin, + PluginCreator, + Position, + Postcss, + ProcessOptions, + RootProps, + RuleProps, + Source, + SourceMap, + SourceMapOptions, + Stringifier, + Syntax, + TransformCallback, + Transformer, + WarningOptions, + + // This is a class, but it’s not re-exported. That’s why it’s exported as type-only here. + type LazyResult +} from './postcss.js' diff --git a/node_modules/postcss/lib/postcss.d.ts b/node_modules/postcss/lib/postcss.d.ts new file mode 100644 index 0000000..d0b8b53 --- /dev/null +++ b/node_modules/postcss/lib/postcss.d.ts @@ -0,0 +1,453 @@ +import { RawSourceMap, SourceMapGenerator } from 'source-map-js' + +import AtRule, { AtRuleProps } from './at-rule.js' +import Comment, { CommentProps } from './comment.js' +import Container, { ContainerProps, NewChild } from './container.js' +import CssSyntaxError from './css-syntax-error.js' +import Declaration, { DeclarationProps } from './declaration.js' +import Document, { DocumentProps } from './document.js' +import Input, { FilePosition } from './input.js' +import LazyResult from './lazy-result.js' +import list from './list.js' +import Node, { + AnyNode, + ChildNode, + ChildProps, + NodeErrorOptions, + NodeProps, + Position, + Source +} from './node.js' +import Processor from './processor.js' +import Result, { Message } from './result.js' +import Root, { RootProps } from './root.js' +import Rule, { RuleProps } from './rule.js' +import Warning, { WarningOptions } from './warning.js' + +type DocumentProcessor = ( + document: Document, + helper: postcss.Helpers +) => Promise | void +type RootProcessor = ( + root: Root, + helper: postcss.Helpers +) => Promise | void +type DeclarationProcessor = ( + decl: Declaration, + helper: postcss.Helpers +) => Promise | void +type RuleProcessor = ( + rule: Rule, + helper: postcss.Helpers +) => Promise | void +type AtRuleProcessor = ( + atRule: AtRule, + helper: postcss.Helpers +) => Promise | void +type CommentProcessor = ( + comment: Comment, + helper: postcss.Helpers +) => Promise | void + +interface Processors { + /** + * Will be called on all`AtRule` nodes. + * + * Will be called again on node or children changes. + */ + AtRule?: { [name: string]: AtRuleProcessor } | AtRuleProcessor + + /** + * Will be called on all `AtRule` nodes, when all children will be processed. + * + * Will be called again on node or children changes. + */ + AtRuleExit?: { [name: string]: AtRuleProcessor } | AtRuleProcessor + + /** + * Will be called on all `Comment` nodes. + * + * Will be called again on node or children changes. + */ + Comment?: CommentProcessor + + /** + * Will be called on all `Comment` nodes after listeners + * for `Comment` event. + * + * Will be called again on node or children changes. + */ + CommentExit?: CommentProcessor + + /** + * Will be called on all `Declaration` nodes after listeners + * for `Declaration` event. + * + * Will be called again on node or children changes. + */ + Declaration?: { [prop: string]: DeclarationProcessor } | DeclarationProcessor + + /** + * Will be called on all `Declaration` nodes. + * + * Will be called again on node or children changes. + */ + DeclarationExit?: + | { [prop: string]: DeclarationProcessor } + | DeclarationProcessor + + /** + * Will be called on `Document` node. + * + * Will be called again on children changes. + */ + Document?: DocumentProcessor + + /** + * Will be called on `Document` node, when all children will be processed. + * + * Will be called again on children changes. + */ + DocumentExit?: DocumentProcessor + + /** + * Will be called on `Root` node once. + */ + Once?: RootProcessor + + /** + * Will be called on `Root` node once, when all children will be processed. + */ + OnceExit?: RootProcessor + + /** + * Will be called on `Root` node. + * + * Will be called again on children changes. + */ + Root?: RootProcessor + + /** + * Will be called on `Root` node, when all children will be processed. + * + * Will be called again on children changes. + */ + RootExit?: RootProcessor + + /** + * Will be called on all `Rule` nodes. + * + * Will be called again on node or children changes. + */ + Rule?: RuleProcessor + + /** + * Will be called on all `Rule` nodes, when all children will be processed. + * + * Will be called again on node or children changes. + */ + RuleExit?: RuleProcessor +} + +declare namespace postcss { + export { + AnyNode, + AtRule, + AtRuleProps, + ChildNode, + ChildProps, + Comment, + CommentProps, + Container, + ContainerProps, + CssSyntaxError, + Declaration, + DeclarationProps, + Document, + DocumentProps, + FilePosition, + Input, + LazyResult, + list, + Message, + NewChild, + Node, + NodeErrorOptions, + NodeProps, + Position, + Processor, + Result, + Root, + RootProps, + Rule, + RuleProps, + Source, + Warning, + WarningOptions + } + + export type SourceMap = { + toJSON(): RawSourceMap + } & SourceMapGenerator + + export type Helpers = { postcss: Postcss; result: Result } & Postcss + + export interface Plugin extends Processors { + postcssPlugin: string + prepare?: (result: Result) => Processors + } + + export interface PluginCreator { + (opts?: PluginOptions): Plugin | Processor + postcss: true + } + + export interface Transformer extends TransformCallback { + postcssPlugin: string + postcssVersion: string + } + + export interface TransformCallback { + (root: Root, result: Result): Promise | void + } + + export interface OldPlugin extends Transformer { + (opts?: T): Transformer + postcss: Transformer + } + + export type AcceptedPlugin = + | { + postcss: Processor | TransformCallback + } + | OldPlugin + | Plugin + | PluginCreator + | Processor + | TransformCallback + + export interface Parser { + ( + css: { toString(): string } | string, + opts?: Pick + ): RootNode + } + + export interface Builder { + (part: string, node?: AnyNode, type?: 'end' | 'start'): void + } + + export interface Stringifier { + (node: AnyNode, builder: Builder): void + } + + export interface JSONHydrator { + (data: object): Node + (data: object[]): Node[] + } + + export interface Syntax { + /** + * Function to generate AST by string. + */ + parse?: Parser + + /** + * Class to generate string by AST. + */ + stringify?: Stringifier + } + + export interface SourceMapOptions { + /** + * Use absolute path in generated source map. + */ + absolute?: boolean + + /** + * Indicates that PostCSS should add annotation comments to the CSS. + * By default, PostCSS will always add a comment with a path + * to the source map. PostCSS will not add annotations to CSS files + * that do not contain any comments. + * + * By default, PostCSS presumes that you want to save the source map as + * `opts.to + '.map'` and will use this path in the annotation comment. + * A different path can be set by providing a string value for annotation. + * + * If you have set `inline: true`, annotation cannot be disabled. + */ + annotation?: ((file: string, root: Root) => string) | boolean | string + + /** + * Override `from` in map’s sources. + */ + from?: string + + /** + * Indicates that the source map should be embedded in the output CSS + * as a Base64-encoded comment. By default, it is `true`. + * But if all previous maps are external, not inline, PostCSS will not embed + * the map even if you do not set this option. + * + * If you have an inline source map, the result.map property will be empty, + * as the source map will be contained within the text of `result.css`. + */ + inline?: boolean + + /** + * Source map content from a previous processing step (e.g., Sass). + * + * PostCSS will try to read the previous source map + * automatically (based on comments within the source CSS), but you can use + * this option to identify it manually. + * + * If desired, you can omit the previous map with prev: `false`. + */ + prev?: ((file: string) => string) | boolean | object | string + + /** + * Indicates that PostCSS should set the origin content (e.g., Sass source) + * of the source map. By default, it is true. But if all previous maps do not + * contain sources content, PostCSS will also leave it out even if you + * do not set this option. + */ + sourcesContent?: boolean + } + + export interface ProcessOptions { + /** + * The path of the CSS source file. You should always set `from`, + * because it is used in source map generation and syntax error messages. + */ + from?: string | undefined + + /** + * Source map options + */ + map?: boolean | SourceMapOptions + + /** + * Function to generate AST by string. + */ + parser?: Parser | Syntax + + /** + * Class to generate string by AST. + */ + stringifier?: Stringifier | Syntax + + /** + * Object with parse and stringify. + */ + syntax?: Syntax + + /** + * The path where you'll put the output CSS file. You should always set `to` + * to generate correct source maps. + */ + to?: string + } + + export type Postcss = typeof postcss + + /** + * Default function to convert a node tree into a CSS string. + */ + export let stringify: Stringifier + + /** + * Parses source css and returns a new `Root` or `Document` node, + * which contains the source CSS nodes. + * + * ```js + * // Simple CSS concatenation with source map support + * const root1 = postcss.parse(css1, { from: file1 }) + * const root2 = postcss.parse(css2, { from: file2 }) + * root1.append(root2).toResult().css + * ``` + */ + export let parse: Parser + + /** + * Rehydrate a JSON AST (from `Node#toJSON`) back into the AST classes. + * + * ```js + * const json = root.toJSON() + * // save to file, send by network, etc + * const root2 = postcss.fromJSON(json) + * ``` + */ + export let fromJSON: JSONHydrator + + /** + * Creates a new `Comment` node. + * + * @param defaults Properties for the new node. + * @return New comment node + */ + export function comment(defaults?: CommentProps): Comment + + /** + * Creates a new `AtRule` node. + * + * @param defaults Properties for the new node. + * @return New at-rule node. + */ + export function atRule(defaults?: AtRuleProps): AtRule + + /** + * Creates a new `Declaration` node. + * + * @param defaults Properties for the new node. + * @return New declaration node. + */ + export function decl(defaults?: DeclarationProps): Declaration + + /** + * Creates a new `Rule` node. + * + * @param default Properties for the new node. + * @return New rule node. + */ + export function rule(defaults?: RuleProps): Rule + + /** + * Creates a new `Root` node. + * + * @param defaults Properties for the new node. + * @return New root node. + */ + export function root(defaults?: RootProps): Root + + /** + * Creates a new `Document` node. + * + * @param defaults Properties for the new node. + * @return New document node. + */ + export function document(defaults?: DocumentProps): Document + + export { postcss as default } +} + +/** + * Create a new `Processor` instance that will apply `plugins` + * as CSS processors. + * + * ```js + * let postcss = require('postcss') + * + * postcss(plugins).process(css, { from, to }).then(result => { + * console.log(result.css) + * }) + * ``` + * + * @param plugins PostCSS plugins. + * @return Processor to process multiple CSS. + */ +declare function postcss( + plugins?: readonly postcss.AcceptedPlugin[] +): Processor +declare function postcss(...plugins: postcss.AcceptedPlugin[]): Processor + +export = postcss diff --git a/node_modules/postcss/lib/postcss.js b/node_modules/postcss/lib/postcss.js new file mode 100644 index 0000000..8f0773b --- /dev/null +++ b/node_modules/postcss/lib/postcss.js @@ -0,0 +1,101 @@ +'use strict' + +let AtRule = require('./at-rule') +let Comment = require('./comment') +let Container = require('./container') +let CssSyntaxError = require('./css-syntax-error') +let Declaration = require('./declaration') +let Document = require('./document') +let fromJSON = require('./fromJSON') +let Input = require('./input') +let LazyResult = require('./lazy-result') +let list = require('./list') +let Node = require('./node') +let parse = require('./parse') +let Processor = require('./processor') +let Result = require('./result.js') +let Root = require('./root') +let Rule = require('./rule') +let stringify = require('./stringify') +let Warning = require('./warning') + +function postcss(...plugins) { + if (plugins.length === 1 && Array.isArray(plugins[0])) { + plugins = plugins[0] + } + return new Processor(plugins) +} + +postcss.plugin = function plugin(name, initializer) { + let warningPrinted = false + function creator(...args) { + // eslint-disable-next-line no-console + if (console && console.warn && !warningPrinted) { + warningPrinted = true + // eslint-disable-next-line no-console + console.warn( + name + + ': postcss.plugin was deprecated. Migration guide:\n' + + 'https://evilmartians.com/chronicles/postcss-8-plugin-migration' + ) + if (process.env.LANG && process.env.LANG.startsWith('cn')) { + /* c8 ignore next 7 */ + // eslint-disable-next-line no-console + console.warn( + name + + ': 里面 postcss.plugin 被弃用. 迁移指南:\n' + + 'https://www.w3ctech.com/topic/2226' + ) + } + } + let transformer = initializer(...args) + transformer.postcssPlugin = name + transformer.postcssVersion = new Processor().version + return transformer + } + + let cache + Object.defineProperty(creator, 'postcss', { + get() { + if (!cache) cache = creator() + return cache + } + }) + + creator.process = function (css, processOpts, pluginOpts) { + return postcss([creator(pluginOpts)]).process(css, processOpts) + } + + return creator +} + +postcss.stringify = stringify +postcss.parse = parse +postcss.fromJSON = fromJSON +postcss.list = list + +postcss.comment = defaults => new Comment(defaults) +postcss.atRule = defaults => new AtRule(defaults) +postcss.decl = defaults => new Declaration(defaults) +postcss.rule = defaults => new Rule(defaults) +postcss.root = defaults => new Root(defaults) +postcss.document = defaults => new Document(defaults) + +postcss.CssSyntaxError = CssSyntaxError +postcss.Declaration = Declaration +postcss.Container = Container +postcss.Processor = Processor +postcss.Document = Document +postcss.Comment = Comment +postcss.Warning = Warning +postcss.AtRule = AtRule +postcss.Result = Result +postcss.Input = Input +postcss.Rule = Rule +postcss.Root = Root +postcss.Node = Node + +LazyResult.registerPostcss(postcss) + +module.exports = postcss +postcss.default = postcss diff --git a/node_modules/postcss/lib/postcss.mjs b/node_modules/postcss/lib/postcss.mjs new file mode 100644 index 0000000..3507598 --- /dev/null +++ b/node_modules/postcss/lib/postcss.mjs @@ -0,0 +1,30 @@ +import postcss from './postcss.js' + +export default postcss + +export const stringify = postcss.stringify +export const fromJSON = postcss.fromJSON +export const plugin = postcss.plugin +export const parse = postcss.parse +export const list = postcss.list + +export const document = postcss.document +export const comment = postcss.comment +export const atRule = postcss.atRule +export const rule = postcss.rule +export const decl = postcss.decl +export const root = postcss.root + +export const CssSyntaxError = postcss.CssSyntaxError +export const Declaration = postcss.Declaration +export const Container = postcss.Container +export const Processor = postcss.Processor +export const Document = postcss.Document +export const Comment = postcss.Comment +export const Warning = postcss.Warning +export const AtRule = postcss.AtRule +export const Result = postcss.Result +export const Input = postcss.Input +export const Rule = postcss.Rule +export const Root = postcss.Root +export const Node = postcss.Node diff --git a/node_modules/postcss/lib/previous-map.d.ts b/node_modules/postcss/lib/previous-map.d.ts new file mode 100644 index 0000000..23edeb5 --- /dev/null +++ b/node_modules/postcss/lib/previous-map.d.ts @@ -0,0 +1,81 @@ +import { SourceMapConsumer } from 'source-map-js' + +import { ProcessOptions } from './postcss.js' + +declare namespace PreviousMap { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { PreviousMap_ as default } +} + +/** + * Source map information from input CSS. + * For example, source map after Sass compiler. + * + * This class will automatically find source map in input CSS or in file system + * near input file (according `from` option). + * + * ```js + * const root = parse(css, { from: 'a.sass.css' }) + * root.input.map //=> PreviousMap + * ``` + */ +declare class PreviousMap_ { + /** + * `sourceMappingURL` content. + */ + annotation?: string + + /** + * The CSS source identifier. Contains `Input#file` if the user + * set the `from` option, or `Input#id` if they did not. + */ + file?: string + + /** + * Was source map inlined by data-uri to input CSS. + */ + inline: boolean + + /** + * Path to source map file. + */ + mapFile?: string + + /** + * The directory with source map file, if source map is in separated file. + */ + root?: string + + /** + * Source map file content. + */ + text?: string + + /** + * @param css Input CSS source. + * @param opts Process options. + */ + constructor(css: string, opts?: ProcessOptions) + + /** + * Create a instance of `SourceMapGenerator` class + * from the `source-map` library to work with source map information. + * + * It is lazy method, so it will create object only on first call + * and then it will use cache. + * + * @return Object with source map information. + */ + consumer(): SourceMapConsumer + + /** + * Does source map contains `sourcesContent` with input source text. + * + * @return Is `sourcesContent` present. + */ + withContent(): boolean +} + +declare class PreviousMap extends PreviousMap_ {} + +export = PreviousMap diff --git a/node_modules/postcss/lib/previous-map.js b/node_modules/postcss/lib/previous-map.js new file mode 100644 index 0000000..b123dcd --- /dev/null +++ b/node_modules/postcss/lib/previous-map.js @@ -0,0 +1,144 @@ +'use strict' + +let { existsSync, readFileSync } = require('fs') +let { dirname, join } = require('path') +let { SourceMapConsumer, SourceMapGenerator } = require('source-map-js') + +function fromBase64(str) { + if (Buffer) { + return Buffer.from(str, 'base64').toString() + } else { + /* c8 ignore next 2 */ + return window.atob(str) + } +} + +class PreviousMap { + constructor(css, opts) { + if (opts.map === false) return + this.loadAnnotation(css) + this.inline = this.startWith(this.annotation, 'data:') + + let prev = opts.map ? opts.map.prev : undefined + let text = this.loadMap(opts.from, prev) + if (!this.mapFile && opts.from) { + this.mapFile = opts.from + } + if (this.mapFile) this.root = dirname(this.mapFile) + if (text) this.text = text + } + + consumer() { + if (!this.consumerCache) { + this.consumerCache = new SourceMapConsumer(this.text) + } + return this.consumerCache + } + + decodeInline(text) { + let baseCharsetUri = /^data:application\/json;charset=utf-?8;base64,/ + let baseUri = /^data:application\/json;base64,/ + let charsetUri = /^data:application\/json;charset=utf-?8,/ + let uri = /^data:application\/json,/ + + let uriMatch = text.match(charsetUri) || text.match(uri) + if (uriMatch) { + return decodeURIComponent(text.substr(uriMatch[0].length)) + } + + let baseUriMatch = text.match(baseCharsetUri) || text.match(baseUri) + if (baseUriMatch) { + return fromBase64(text.substr(baseUriMatch[0].length)) + } + + let encoding = text.match(/data:application\/json;([^,]+),/)[1] + throw new Error('Unsupported source map encoding ' + encoding) + } + + getAnnotationURL(sourceMapString) { + return sourceMapString.replace(/^\/\*\s*# sourceMappingURL=/, '').trim() + } + + isMap(map) { + if (typeof map !== 'object') return false + return ( + typeof map.mappings === 'string' || + typeof map._mappings === 'string' || + Array.isArray(map.sections) + ) + } + + loadAnnotation(css) { + let comments = css.match(/\/\*\s*# sourceMappingURL=/g) + if (!comments) return + + // sourceMappingURLs from comments, strings, etc. + let start = css.lastIndexOf(comments.pop()) + let end = css.indexOf('*/', start) + + if (start > -1 && end > -1) { + // Locate the last sourceMappingURL to avoid pickin + this.annotation = this.getAnnotationURL(css.substring(start, end)) + } + } + + loadFile(path) { + this.root = dirname(path) + if (existsSync(path)) { + this.mapFile = path + return readFileSync(path, 'utf-8').toString().trim() + } + } + + loadMap(file, prev) { + if (prev === false) return false + + if (prev) { + if (typeof prev === 'string') { + return prev + } else if (typeof prev === 'function') { + let prevPath = prev(file) + if (prevPath) { + let map = this.loadFile(prevPath) + if (!map) { + throw new Error( + 'Unable to load previous source map: ' + prevPath.toString() + ) + } + return map + } + } else if (prev instanceof SourceMapConsumer) { + return SourceMapGenerator.fromSourceMap(prev).toString() + } else if (prev instanceof SourceMapGenerator) { + return prev.toString() + } else if (this.isMap(prev)) { + return JSON.stringify(prev) + } else { + throw new Error( + 'Unsupported previous source map format: ' + prev.toString() + ) + } + } else if (this.inline) { + return this.decodeInline(this.annotation) + } else if (this.annotation) { + let map = this.annotation + if (file) map = join(dirname(file), map) + return this.loadFile(map) + } + } + + startWith(string, start) { + if (!string) return false + return string.substr(0, start.length) === start + } + + withContent() { + return !!( + this.consumer().sourcesContent && + this.consumer().sourcesContent.length > 0 + ) + } +} + +module.exports = PreviousMap +PreviousMap.default = PreviousMap diff --git a/node_modules/postcss/lib/processor.d.ts b/node_modules/postcss/lib/processor.d.ts new file mode 100644 index 0000000..aa2942c --- /dev/null +++ b/node_modules/postcss/lib/processor.d.ts @@ -0,0 +1,115 @@ +import Document from './document.js' +import LazyResult from './lazy-result.js' +import NoWorkResult from './no-work-result.js' +import { + AcceptedPlugin, + Plugin, + ProcessOptions, + TransformCallback, + Transformer +} from './postcss.js' +import Result from './result.js' +import Root from './root.js' + +declare namespace Processor { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Processor_ as default } +} + +/** + * Contains plugins to process CSS. Create one `Processor` instance, + * initialize its plugins, and then use that instance on numerous CSS files. + * + * ```js + * const processor = postcss([autoprefixer, postcssNested]) + * processor.process(css1).then(result => console.log(result.css)) + * processor.process(css2).then(result => console.log(result.css)) + * ``` + */ +declare class Processor_ { + /** + * Plugins added to this processor. + * + * ```js + * const processor = postcss([autoprefixer, postcssNested]) + * processor.plugins.length //=> 2 + * ``` + */ + plugins: (Plugin | TransformCallback | Transformer)[] + + /** + * Current PostCSS version. + * + * ```js + * if (result.processor.version.split('.')[0] !== '6') { + * throw new Error('This plugin works only with PostCSS 6') + * } + * ``` + */ + version: string + + /** + * @param plugins PostCSS plugins + */ + constructor(plugins?: readonly AcceptedPlugin[]) + + /** + * Parses source CSS and returns a `LazyResult` Promise proxy. + * Because some plugins can be asynchronous it doesn’t make + * any transformations. Transformations will be applied + * in the `LazyResult` methods. + * + * ```js + * processor.process(css, { from: 'a.css', to: 'a.out.css' }) + * .then(result => { + * console.log(result.css) + * }) + * ``` + * + * @param css String with input CSS or any object with a `toString()` method, + * like a Buffer. Optionally, send a `Result` instance + * and the processor will take the `Root` from it. + * @param opts Options. + * @return Promise proxy. + */ + process( + css: { toString(): string } | LazyResult | Result | Root | string + ): LazyResult | NoWorkResult + process( + css: { toString(): string } | LazyResult | Result | Root | string, + options: ProcessOptions + ): LazyResult + + /** + * Adds a plugin to be used as a CSS processor. + * + * PostCSS plugin can be in 4 formats: + * * A plugin in `Plugin` format. + * * A plugin creator function with `pluginCreator.postcss = true`. + * PostCSS will call this function without argument to get plugin. + * * A function. PostCSS will pass the function a {@link Root} + * as the first argument and current `Result` instance + * as the second. + * * Another `Processor` instance. PostCSS will copy plugins + * from that instance into this one. + * + * Plugins can also be added by passing them as arguments when creating + * a `postcss` instance (see [`postcss(plugins)`]). + * + * Asynchronous plugins should return a `Promise` instance. + * + * ```js + * const processor = postcss() + * .use(autoprefixer) + * .use(postcssNested) + * ``` + * + * @param plugin PostCSS plugin or `Processor` with plugins. + * @return Current processor to make methods chain. + */ + use(plugin: AcceptedPlugin): this +} + +declare class Processor extends Processor_ {} + +export = Processor diff --git a/node_modules/postcss/lib/processor.js b/node_modules/postcss/lib/processor.js new file mode 100644 index 0000000..729f725 --- /dev/null +++ b/node_modules/postcss/lib/processor.js @@ -0,0 +1,67 @@ +'use strict' + +let Document = require('./document') +let LazyResult = require('./lazy-result') +let NoWorkResult = require('./no-work-result') +let Root = require('./root') + +class Processor { + constructor(plugins = []) { + this.version = '8.4.47' + this.plugins = this.normalize(plugins) + } + + normalize(plugins) { + let normalized = [] + for (let i of plugins) { + if (i.postcss === true) { + i = i() + } else if (i.postcss) { + i = i.postcss + } + + if (typeof i === 'object' && Array.isArray(i.plugins)) { + normalized = normalized.concat(i.plugins) + } else if (typeof i === 'object' && i.postcssPlugin) { + normalized.push(i) + } else if (typeof i === 'function') { + normalized.push(i) + } else if (typeof i === 'object' && (i.parse || i.stringify)) { + if (process.env.NODE_ENV !== 'production') { + throw new Error( + 'PostCSS syntaxes cannot be used as plugins. Instead, please use ' + + 'one of the syntax/parser/stringifier options as outlined ' + + 'in your PostCSS runner documentation.' + ) + } + } else { + throw new Error(i + ' is not a PostCSS plugin') + } + } + return normalized + } + + process(css, opts = {}) { + if ( + !this.plugins.length && + !opts.parser && + !opts.stringifier && + !opts.syntax + ) { + return new NoWorkResult(this, css, opts) + } else { + return new LazyResult(this, css, opts) + } + } + + use(plugin) { + this.plugins = this.plugins.concat(this.normalize([plugin])) + return this + } +} + +module.exports = Processor +Processor.default = Processor + +Root.registerProcessor(Processor) +Document.registerProcessor(Processor) diff --git a/node_modules/postcss/lib/result.d.ts b/node_modules/postcss/lib/result.d.ts new file mode 100644 index 0000000..40e086f --- /dev/null +++ b/node_modules/postcss/lib/result.d.ts @@ -0,0 +1,205 @@ +import { + Document, + Node, + Plugin, + ProcessOptions, + Root, + SourceMap, + TransformCallback, + Warning, + WarningOptions +} from './postcss.js' +import Processor from './processor.js' + +declare namespace Result { + export interface Message { + [others: string]: any + + /** + * Source PostCSS plugin name. + */ + plugin?: string + + /** + * Message type. + */ + type: string + } + + export interface ResultOptions extends ProcessOptions { + /** + * The CSS node that was the source of the warning. + */ + node?: Node + + /** + * Name of plugin that created this warning. `Result#warn` will fill it + * automatically with `Plugin#postcssPlugin` value. + */ + plugin?: string + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Result_ as default } +} + +/** + * Provides the result of the PostCSS transformations. + * + * A Result instance is returned by `LazyResult#then` + * or `Root#toResult` methods. + * + * ```js + * postcss([autoprefixer]).process(css).then(result => { + * console.log(result.css) + * }) + * ``` + * + * ```js + * const result2 = postcss.parse(css).toResult() + * ``` + */ +declare class Result_ { + /** + * A CSS string representing of `Result#root`. + * + * ```js + * postcss.parse('a{}').toResult().css //=> "a{}" + * ``` + */ + css: string + + /** + * Last runned PostCSS plugin. + */ + lastPlugin: Plugin | TransformCallback + + /** + * An instance of `SourceMapGenerator` class from the `source-map` library, + * representing changes to the `Result#root` instance. + * + * ```js + * result.map.toJSON() //=> { version: 3, file: 'a.css', … } + * ``` + * + * ```js + * if (result.map) { + * fs.writeFileSync(result.opts.to + '.map', result.map.toString()) + * } + * ``` + */ + map: SourceMap + + /** + * Contains messages from plugins (e.g., warnings or custom messages). + * Each message should have type and plugin properties. + * + * ```js + * AtRule: { + * import: (atRule, { result }) { + * const importedFile = parseImport(atRule) + * result.messages.push({ + * type: 'dependency', + * plugin: 'postcss-import', + * file: importedFile, + * parent: result.opts.from + * }) + * } + * } + * ``` + */ + messages: Result.Message[] + + /** + * Options from the `Processor#process` or `Root#toResult` call + * that produced this Result instance.] + * + * ```js + * root.toResult(opts).opts === opts + * ``` + */ + opts: Result.ResultOptions + + /** + * The Processor instance used for this transformation. + * + * ```js + * for (const plugin of result.processor.plugins) { + * if (plugin.postcssPlugin === 'postcss-bad') { + * throw 'postcss-good is incompatible with postcss-bad' + * } + * }) + * ``` + */ + processor: Processor + + /** + * Root node after all transformations. + * + * ```js + * root.toResult().root === root + * ``` + */ + root: RootNode + + /** + * @param processor Processor used for this transformation. + * @param root Root node after all transformations. + * @param opts Options from the `Processor#process` or `Root#toResult`. + */ + constructor(processor: Processor, root: RootNode, opts: Result.ResultOptions) + + /** + * Returns for `Result#css` content. + * + * ```js + * result + '' === result.css + * ``` + * + * @return String representing of `Result#root`. + */ + toString(): string + + /** + * Creates an instance of `Warning` and adds it to `Result#messages`. + * + * ```js + * if (decl.important) { + * result.warn('Avoid !important', { node: decl, word: '!important' }) + * } + * ``` + * + * @param text Warning message. + * @param opts Warning options. + * @return Created warning. + */ + warn(message: string, options?: WarningOptions): Warning + + /** + * Returns warnings from plugins. Filters `Warning` instances + * from `Result#messages`. + * + * ```js + * result.warnings().forEach(warn => { + * console.warn(warn.toString()) + * }) + * ``` + * + * @return Warnings from plugins. + */ + warnings(): Warning[] + + /** + * An alias for the `Result#css` property. + * Use it with syntaxes that generate non-CSS output. + * + * ```js + * result.css === result.content + * ``` + */ + get content(): string +} + +declare class Result extends Result_ {} + +export = Result diff --git a/node_modules/postcss/lib/result.js b/node_modules/postcss/lib/result.js new file mode 100644 index 0000000..a39751d --- /dev/null +++ b/node_modules/postcss/lib/result.js @@ -0,0 +1,42 @@ +'use strict' + +let Warning = require('./warning') + +class Result { + constructor(processor, root, opts) { + this.processor = processor + this.messages = [] + this.root = root + this.opts = opts + this.css = undefined + this.map = undefined + } + + toString() { + return this.css + } + + warn(text, opts = {}) { + if (!opts.plugin) { + if (this.lastPlugin && this.lastPlugin.postcssPlugin) { + opts.plugin = this.lastPlugin.postcssPlugin + } + } + + let warning = new Warning(text, opts) + this.messages.push(warning) + + return warning + } + + warnings() { + return this.messages.filter(i => i.type === 'warning') + } + + get content() { + return this.css + } +} + +module.exports = Result +Result.default = Result diff --git a/node_modules/postcss/lib/root.d.ts b/node_modules/postcss/lib/root.d.ts new file mode 100644 index 0000000..5c91139 --- /dev/null +++ b/node_modules/postcss/lib/root.d.ts @@ -0,0 +1,87 @@ +import Container, { ContainerProps } from './container.js' +import Document from './document.js' +import { ProcessOptions } from './postcss.js' +import Result from './result.js' + +declare namespace Root { + export interface RootRaws extends Record { + /** + * The space symbols after the last child to the end of file. + */ + after?: string + + /** + * Non-CSS code after `Root`, when `Root` is inside `Document`. + * + * **Experimental:** some aspects of this node could change within minor + * or patch version releases. + */ + codeAfter?: string + + /** + * Non-CSS code before `Root`, when `Root` is inside `Document`. + * + * **Experimental:** some aspects of this node could change within minor + * or patch version releases. + */ + codeBefore?: string + + /** + * Is the last child has an (optional) semicolon. + */ + semicolon?: boolean + } + + export interface RootProps extends ContainerProps { + /** + * Information used to generate byte-to-byte equal node string + * as it was in the origin input. + * */ + raws?: RootRaws + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Root_ as default } +} + +/** + * Represents a CSS file and contains all its parsed nodes. + * + * ```js + * const root = postcss.parse('a{color:black} b{z-index:2}') + * root.type //=> 'root' + * root.nodes.length //=> 2 + * ``` + */ +declare class Root_ extends Container { + nodes: NonNullable + parent: Document | undefined + raws: Root.RootRaws + type: 'root' + + constructor(defaults?: Root.RootProps) + + assign(overrides: object | Root.RootProps): this + clone(overrides?: Partial): this + cloneAfter(overrides?: Partial): this + cloneBefore(overrides?: Partial): this + + /** + * Returns a `Result` instance representing the root’s CSS. + * + * ```js + * const root1 = postcss.parse(css1, { from: 'a.css' }) + * const root2 = postcss.parse(css2, { from: 'b.css' }) + * root1.append(root2) + * const result = root1.toResult({ to: 'all.css', map: true }) + * ``` + * + * @param options Options. + * @return Result with current root’s CSS. + */ + toResult(options?: ProcessOptions): Result +} + +declare class Root extends Root_ {} + +export = Root diff --git a/node_modules/postcss/lib/root.js b/node_modules/postcss/lib/root.js new file mode 100644 index 0000000..ea574ed --- /dev/null +++ b/node_modules/postcss/lib/root.js @@ -0,0 +1,61 @@ +'use strict' + +let Container = require('./container') + +let LazyResult, Processor + +class Root extends Container { + constructor(defaults) { + super(defaults) + this.type = 'root' + if (!this.nodes) this.nodes = [] + } + + normalize(child, sample, type) { + let nodes = super.normalize(child) + + if (sample) { + if (type === 'prepend') { + if (this.nodes.length > 1) { + sample.raws.before = this.nodes[1].raws.before + } else { + delete sample.raws.before + } + } else if (this.first !== sample) { + for (let node of nodes) { + node.raws.before = sample.raws.before + } + } + } + + return nodes + } + + removeChild(child, ignore) { + let index = this.index(child) + + if (!ignore && index === 0 && this.nodes.length > 1) { + this.nodes[1].raws.before = this.nodes[index].raws.before + } + + return super.removeChild(child) + } + + toResult(opts = {}) { + let lazy = new LazyResult(new Processor(), this, opts) + return lazy.stringify() + } +} + +Root.registerLazyResult = dependant => { + LazyResult = dependant +} + +Root.registerProcessor = dependant => { + Processor = dependant +} + +module.exports = Root +Root.default = Root + +Container.registerRoot(Root) diff --git a/node_modules/postcss/lib/rule.d.ts b/node_modules/postcss/lib/rule.d.ts new file mode 100644 index 0000000..da8aae7 --- /dev/null +++ b/node_modules/postcss/lib/rule.d.ts @@ -0,0 +1,126 @@ +import Container, { + ContainerProps, + ContainerWithChildren +} from './container.js' + +declare namespace Rule { + export interface RuleRaws extends Record { + /** + * The space symbols after the last child of the node to the end of the node. + */ + after?: string + + /** + * The space symbols before the node. It also stores `*` + * and `_` symbols before the declaration (IE hack). + */ + before?: string + + /** + * The symbols between the selector and `{` for rules. + */ + between?: string + + /** + * Contains `true` if there is semicolon after rule. + */ + ownSemicolon?: string + + /** + * The rule’s selector with comments. + */ + selector?: { + raw: string + value: string + } + + /** + * Contains `true` if the last child has an (optional) semicolon. + */ + semicolon?: boolean + } + + export type RuleProps = { + /** Information used to generate byte-to-byte equal node string as it was in the origin input. */ + raws?: RuleRaws + } & ( + | { + /** Selector or selectors of the rule. */ + selector: string + selectors?: never + } + | { + selector?: never + /** Selectors of the rule represented as an array of strings. */ + selectors: readonly string[] + } + ) & ContainerProps + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Rule_ as default } +} + +/** + * Represents a CSS rule: a selector followed by a declaration block. + * + * ```js + * Once (root, { Rule }) { + * let a = new Rule({ selector: 'a' }) + * a.append(…) + * root.append(a) + * } + * ``` + * + * ```js + * const root = postcss.parse('a{}') + * const rule = root.first + * rule.type //=> 'rule' + * rule.toString() //=> 'a{}' + * ``` + */ +declare class Rule_ extends Container { + nodes: NonNullable + parent: ContainerWithChildren | undefined + raws: Rule.RuleRaws + type: 'rule' + constructor(defaults?: Rule.RuleProps) + + assign(overrides: object | Rule.RuleProps): this + clone(overrides?: Partial): this + + cloneAfter(overrides?: Partial): this + + cloneBefore(overrides?: Partial): this + /** + * The rule’s full selector represented as a string. + * + * ```js + * const root = postcss.parse('a, b { }') + * const rule = root.first + * rule.selector //=> 'a, b' + * ``` + */ + get selector(): string + set selector(value: string) + /** + * An array containing the rule’s individual selectors. + * Groups of selectors are split at commas. + * + * ```js + * const root = postcss.parse('a, b { }') + * const rule = root.first + * + * rule.selector //=> 'a, b' + * rule.selectors //=> ['a', 'b'] + * + * rule.selectors = ['a', 'strong'] + * rule.selector //=> 'a, strong' + * ``` + */ + get selectors(): string[] + set selectors(values: string[]) +} + +declare class Rule extends Rule_ {} + +export = Rule diff --git a/node_modules/postcss/lib/rule.js b/node_modules/postcss/lib/rule.js new file mode 100644 index 0000000..a93ab25 --- /dev/null +++ b/node_modules/postcss/lib/rule.js @@ -0,0 +1,27 @@ +'use strict' + +let Container = require('./container') +let list = require('./list') + +class Rule extends Container { + constructor(defaults) { + super(defaults) + this.type = 'rule' + if (!this.nodes) this.nodes = [] + } + + get selectors() { + return list.comma(this.selector) + } + + set selectors(values) { + let match = this.selector ? this.selector.match(/,\s*/) : null + let sep = match ? match[0] : ',' + this.raw('between', 'beforeOpen') + this.selector = values.join(sep) + } +} + +module.exports = Rule +Rule.default = Rule + +Container.registerRule(Rule) diff --git a/node_modules/postcss/lib/stringifier.d.ts b/node_modules/postcss/lib/stringifier.d.ts new file mode 100644 index 0000000..f707a6a --- /dev/null +++ b/node_modules/postcss/lib/stringifier.d.ts @@ -0,0 +1,46 @@ +import { + AnyNode, + AtRule, + Builder, + Comment, + Container, + Declaration, + Document, + Root, + Rule +} from './postcss.js' + +declare namespace Stringifier { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Stringifier_ as default } +} + +declare class Stringifier_ { + builder: Builder + constructor(builder: Builder) + atrule(node: AtRule, semicolon?: boolean): void + beforeAfter(node: AnyNode, detect: 'after' | 'before'): string + block(node: AnyNode, start: string): void + body(node: Container): void + comment(node: Comment): void + decl(node: Declaration, semicolon?: boolean): void + document(node: Document): void + raw(node: AnyNode, own: null | string, detect?: string): string + rawBeforeClose(root: Root): string | undefined + rawBeforeComment(root: Root, node: Comment): string | undefined + rawBeforeDecl(root: Root, node: Declaration): string | undefined + rawBeforeOpen(root: Root): string | undefined + rawBeforeRule(root: Root): string | undefined + rawColon(root: Root): string | undefined + rawEmptyBody(root: Root): string | undefined + rawIndent(root: Root): string | undefined + rawSemicolon(root: Root): boolean | undefined + rawValue(node: AnyNode, prop: string): string + root(node: Root): void + rule(node: Rule): void + stringify(node: AnyNode, semicolon?: boolean): void +} + +declare class Stringifier extends Stringifier_ {} + +export = Stringifier diff --git a/node_modules/postcss/lib/stringifier.js b/node_modules/postcss/lib/stringifier.js new file mode 100644 index 0000000..e07ad12 --- /dev/null +++ b/node_modules/postcss/lib/stringifier.js @@ -0,0 +1,353 @@ +'use strict' + +const DEFAULT_RAW = { + after: '\n', + beforeClose: '\n', + beforeComment: '\n', + beforeDecl: '\n', + beforeOpen: ' ', + beforeRule: '\n', + colon: ': ', + commentLeft: ' ', + commentRight: ' ', + emptyBody: '', + indent: ' ', + semicolon: false +} + +function capitalize(str) { + return str[0].toUpperCase() + str.slice(1) +} + +class Stringifier { + constructor(builder) { + this.builder = builder + } + + atrule(node, semicolon) { + let name = '@' + node.name + let params = node.params ? this.rawValue(node, 'params') : '' + + if (typeof node.raws.afterName !== 'undefined') { + name += node.raws.afterName + } else if (params) { + name += ' ' + } + + if (node.nodes) { + this.block(node, name + params) + } else { + let end = (node.raws.between || '') + (semicolon ? ';' : '') + this.builder(name + params + end, node) + } + } + + beforeAfter(node, detect) { + let value + if (node.type === 'decl') { + value = this.raw(node, null, 'beforeDecl') + } else if (node.type === 'comment') { + value = this.raw(node, null, 'beforeComment') + } else if (detect === 'before') { + value = this.raw(node, null, 'beforeRule') + } else { + value = this.raw(node, null, 'beforeClose') + } + + let buf = node.parent + let depth = 0 + while (buf && buf.type !== 'root') { + depth += 1 + buf = buf.parent + } + + if (value.includes('\n')) { + let indent = this.raw(node, null, 'indent') + if (indent.length) { + for (let step = 0; step < depth; step++) value += indent + } + } + + return value + } + + block(node, start) { + let between = this.raw(node, 'between', 'beforeOpen') + this.builder(start + between + '{', node, 'start') + + let after + if (node.nodes && node.nodes.length) { + this.body(node) + after = this.raw(node, 'after') + } else { + after = this.raw(node, 'after', 'emptyBody') + } + + if (after) this.builder(after) + this.builder('}', node, 'end') + } + + body(node) { + let last = node.nodes.length - 1 + while (last > 0) { + if (node.nodes[last].type !== 'comment') break + last -= 1 + } + + let semicolon = this.raw(node, 'semicolon') + for (let i = 0; i < node.nodes.length; i++) { + let child = node.nodes[i] + let before = this.raw(child, 'before') + if (before) this.builder(before) + this.stringify(child, last !== i || semicolon) + } + } + + comment(node) { + let left = this.raw(node, 'left', 'commentLeft') + let right = this.raw(node, 'right', 'commentRight') + this.builder('/*' + left + node.text + right + '*/', node) + } + + decl(node, semicolon) { + let between = this.raw(node, 'between', 'colon') + let string = node.prop + between + this.rawValue(node, 'value') + + if (node.important) { + string += node.raws.important || ' !important' + } + + if (semicolon) string += ';' + this.builder(string, node) + } + + document(node) { + this.body(node) + } + + raw(node, own, detect) { + let value + if (!detect) detect = own + + // Already had + if (own) { + value = node.raws[own] + if (typeof value !== 'undefined') return value + } + + let parent = node.parent + + if (detect === 'before') { + // Hack for first rule in CSS + if (!parent || (parent.type === 'root' && parent.first === node)) { + return '' + } + + // `root` nodes in `document` should use only their own raws + if (parent && parent.type === 'document') { + return '' + } + } + + // Floating child without parent + if (!parent) return DEFAULT_RAW[detect] + + // Detect style by other nodes + let root = node.root() + if (!root.rawCache) root.rawCache = {} + if (typeof root.rawCache[detect] !== 'undefined') { + return root.rawCache[detect] + } + + if (detect === 'before' || detect === 'after') { + return this.beforeAfter(node, detect) + } else { + let method = 'raw' + capitalize(detect) + if (this[method]) { + value = this[method](root, node) + } else { + root.walk(i => { + value = i.raws[own] + if (typeof value !== 'undefined') return false + }) + } + } + + if (typeof value === 'undefined') value = DEFAULT_RAW[detect] + + root.rawCache[detect] = value + return value + } + + rawBeforeClose(root) { + let value + root.walk(i => { + if (i.nodes && i.nodes.length > 0) { + if (typeof i.raws.after !== 'undefined') { + value = i.raws.after + if (value.includes('\n')) { + value = value.replace(/[^\n]+$/, '') + } + return false + } + } + }) + if (value) value = value.replace(/\S/g, '') + return value + } + + rawBeforeComment(root, node) { + let value + root.walkComments(i => { + if (typeof i.raws.before !== 'undefined') { + value = i.raws.before + if (value.includes('\n')) { + value = value.replace(/[^\n]+$/, '') + } + return false + } + }) + if (typeof value === 'undefined') { + value = this.raw(node, null, 'beforeDecl') + } else if (value) { + value = value.replace(/\S/g, '') + } + return value + } + + rawBeforeDecl(root, node) { + let value + root.walkDecls(i => { + if (typeof i.raws.before !== 'undefined') { + value = i.raws.before + if (value.includes('\n')) { + value = value.replace(/[^\n]+$/, '') + } + return false + } + }) + if (typeof value === 'undefined') { + value = this.raw(node, null, 'beforeRule') + } else if (value) { + value = value.replace(/\S/g, '') + } + return value + } + + rawBeforeOpen(root) { + let value + root.walk(i => { + if (i.type !== 'decl') { + value = i.raws.between + if (typeof value !== 'undefined') return false + } + }) + return value + } + + rawBeforeRule(root) { + let value + root.walk(i => { + if (i.nodes && (i.parent !== root || root.first !== i)) { + if (typeof i.raws.before !== 'undefined') { + value = i.raws.before + if (value.includes('\n')) { + value = value.replace(/[^\n]+$/, '') + } + return false + } + } + }) + if (value) value = value.replace(/\S/g, '') + return value + } + + rawColon(root) { + let value + root.walkDecls(i => { + if (typeof i.raws.between !== 'undefined') { + value = i.raws.between.replace(/[^\s:]/g, '') + return false + } + }) + return value + } + + rawEmptyBody(root) { + let value + root.walk(i => { + if (i.nodes && i.nodes.length === 0) { + value = i.raws.after + if (typeof value !== 'undefined') return false + } + }) + return value + } + + rawIndent(root) { + if (root.raws.indent) return root.raws.indent + let value + root.walk(i => { + let p = i.parent + if (p && p !== root && p.parent && p.parent === root) { + if (typeof i.raws.before !== 'undefined') { + let parts = i.raws.before.split('\n') + value = parts[parts.length - 1] + value = value.replace(/\S/g, '') + return false + } + } + }) + return value + } + + rawSemicolon(root) { + let value + root.walk(i => { + if (i.nodes && i.nodes.length && i.last.type === 'decl') { + value = i.raws.semicolon + if (typeof value !== 'undefined') return false + } + }) + return value + } + + rawValue(node, prop) { + let value = node[prop] + let raw = node.raws[prop] + if (raw && raw.value === value) { + return raw.raw + } + + return value + } + + root(node) { + this.body(node) + if (node.raws.after) this.builder(node.raws.after) + } + + rule(node) { + this.block(node, this.rawValue(node, 'selector')) + if (node.raws.ownSemicolon) { + this.builder(node.raws.ownSemicolon, node, 'end') + } + } + + stringify(node, semicolon) { + /* c8 ignore start */ + if (!this[node.type]) { + throw new Error( + 'Unknown AST node type ' + + node.type + + '. ' + + 'Maybe you need to change PostCSS stringifier.' + ) + } + /* c8 ignore stop */ + this[node.type](node, semicolon) + } +} + +module.exports = Stringifier +Stringifier.default = Stringifier diff --git a/node_modules/postcss/lib/stringify.d.ts b/node_modules/postcss/lib/stringify.d.ts new file mode 100644 index 0000000..06ad0b4 --- /dev/null +++ b/node_modules/postcss/lib/stringify.d.ts @@ -0,0 +1,9 @@ +import { Stringifier } from './postcss.js' + +interface Stringify extends Stringifier { + default: Stringify +} + +declare const stringify: Stringify + +export = stringify diff --git a/node_modules/postcss/lib/stringify.js b/node_modules/postcss/lib/stringify.js new file mode 100644 index 0000000..77bd017 --- /dev/null +++ b/node_modules/postcss/lib/stringify.js @@ -0,0 +1,11 @@ +'use strict' + +let Stringifier = require('./stringifier') + +function stringify(node, builder) { + let str = new Stringifier(builder) + str.stringify(node) +} + +module.exports = stringify +stringify.default = stringify diff --git a/node_modules/postcss/lib/symbols.js b/node_modules/postcss/lib/symbols.js new file mode 100644 index 0000000..a142c26 --- /dev/null +++ b/node_modules/postcss/lib/symbols.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports.isClean = Symbol('isClean') + +module.exports.my = Symbol('my') diff --git a/node_modules/postcss/lib/terminal-highlight.js b/node_modules/postcss/lib/terminal-highlight.js new file mode 100644 index 0000000..6196c9d --- /dev/null +++ b/node_modules/postcss/lib/terminal-highlight.js @@ -0,0 +1,70 @@ +'use strict' + +let pico = require('picocolors') + +let tokenizer = require('./tokenize') + +let Input + +function registerInput(dependant) { + Input = dependant +} + +const HIGHLIGHT_THEME = { + ';': pico.yellow, + ':': pico.yellow, + '(': pico.cyan, + ')': pico.cyan, + '[': pico.yellow, + ']': pico.yellow, + '{': pico.yellow, + '}': pico.yellow, + 'at-word': pico.cyan, + 'brackets': pico.cyan, + 'call': pico.cyan, + 'class': pico.yellow, + 'comment': pico.gray, + 'hash': pico.magenta, + 'string': pico.green +} + +function getTokenType([type, value], processor) { + if (type === 'word') { + if (value[0] === '.') { + return 'class' + } + if (value[0] === '#') { + return 'hash' + } + } + + if (!processor.endOfFile()) { + let next = processor.nextToken() + processor.back(next) + if (next[0] === 'brackets' || next[0] === '(') return 'call' + } + + return type +} + +function terminalHighlight(css) { + let processor = tokenizer(new Input(css), { ignoreErrors: true }) + let result = '' + while (!processor.endOfFile()) { + let token = processor.nextToken() + let color = HIGHLIGHT_THEME[getTokenType(token, processor)] + if (color) { + result += token[1] + .split(/\r?\n/) + .map(i => color(i)) + .join('\n') + } else { + result += token[1] + } + } + return result +} + +terminalHighlight.registerInput = registerInput + +module.exports = terminalHighlight diff --git a/node_modules/postcss/lib/tokenize.js b/node_modules/postcss/lib/tokenize.js new file mode 100644 index 0000000..1d41284 --- /dev/null +++ b/node_modules/postcss/lib/tokenize.js @@ -0,0 +1,266 @@ +'use strict' + +const SINGLE_QUOTE = "'".charCodeAt(0) +const DOUBLE_QUOTE = '"'.charCodeAt(0) +const BACKSLASH = '\\'.charCodeAt(0) +const SLASH = '/'.charCodeAt(0) +const NEWLINE = '\n'.charCodeAt(0) +const SPACE = ' '.charCodeAt(0) +const FEED = '\f'.charCodeAt(0) +const TAB = '\t'.charCodeAt(0) +const CR = '\r'.charCodeAt(0) +const OPEN_SQUARE = '['.charCodeAt(0) +const CLOSE_SQUARE = ']'.charCodeAt(0) +const OPEN_PARENTHESES = '('.charCodeAt(0) +const CLOSE_PARENTHESES = ')'.charCodeAt(0) +const OPEN_CURLY = '{'.charCodeAt(0) +const CLOSE_CURLY = '}'.charCodeAt(0) +const SEMICOLON = ';'.charCodeAt(0) +const ASTERISK = '*'.charCodeAt(0) +const COLON = ':'.charCodeAt(0) +const AT = '@'.charCodeAt(0) + +const RE_AT_END = /[\t\n\f\r "#'()/;[\\\]{}]/g +const RE_WORD_END = /[\t\n\f\r !"#'():;@[\\\]{}]|\/(?=\*)/g +const RE_BAD_BRACKET = /.[\r\n"'(/\\]/ +const RE_HEX_ESCAPE = /[\da-f]/i + +module.exports = function tokenizer(input, options = {}) { + let css = input.css.valueOf() + let ignore = options.ignoreErrors + + let code, content, escape, next, quote + let currentToken, escaped, escapePos, n, prev + + let length = css.length + let pos = 0 + let buffer = [] + let returned = [] + + function position() { + return pos + } + + function unclosed(what) { + throw input.error('Unclosed ' + what, pos) + } + + function endOfFile() { + return returned.length === 0 && pos >= length + } + + function nextToken(opts) { + if (returned.length) return returned.pop() + if (pos >= length) return + + let ignoreUnclosed = opts ? opts.ignoreUnclosed : false + + code = css.charCodeAt(pos) + + switch (code) { + case NEWLINE: + case SPACE: + case TAB: + case CR: + case FEED: { + next = pos + do { + next += 1 + code = css.charCodeAt(next) + } while ( + code === SPACE || + code === NEWLINE || + code === TAB || + code === CR || + code === FEED + ) + + currentToken = ['space', css.slice(pos, next)] + pos = next - 1 + break + } + + case OPEN_SQUARE: + case CLOSE_SQUARE: + case OPEN_CURLY: + case CLOSE_CURLY: + case COLON: + case SEMICOLON: + case CLOSE_PARENTHESES: { + let controlChar = String.fromCharCode(code) + currentToken = [controlChar, controlChar, pos] + break + } + + case OPEN_PARENTHESES: { + prev = buffer.length ? buffer.pop()[1] : '' + n = css.charCodeAt(pos + 1) + if ( + prev === 'url' && + n !== SINGLE_QUOTE && + n !== DOUBLE_QUOTE && + n !== SPACE && + n !== NEWLINE && + n !== TAB && + n !== FEED && + n !== CR + ) { + next = pos + do { + escaped = false + next = css.indexOf(')', next + 1) + if (next === -1) { + if (ignore || ignoreUnclosed) { + next = pos + break + } else { + unclosed('bracket') + } + } + escapePos = next + while (css.charCodeAt(escapePos - 1) === BACKSLASH) { + escapePos -= 1 + escaped = !escaped + } + } while (escaped) + + currentToken = ['brackets', css.slice(pos, next + 1), pos, next] + + pos = next + } else { + next = css.indexOf(')', pos + 1) + content = css.slice(pos, next + 1) + + if (next === -1 || RE_BAD_BRACKET.test(content)) { + currentToken = ['(', '(', pos] + } else { + currentToken = ['brackets', content, pos, next] + pos = next + } + } + + break + } + + case SINGLE_QUOTE: + case DOUBLE_QUOTE: { + quote = code === SINGLE_QUOTE ? "'" : '"' + next = pos + do { + escaped = false + next = css.indexOf(quote, next + 1) + if (next === -1) { + if (ignore || ignoreUnclosed) { + next = pos + 1 + break + } else { + unclosed('string') + } + } + escapePos = next + while (css.charCodeAt(escapePos - 1) === BACKSLASH) { + escapePos -= 1 + escaped = !escaped + } + } while (escaped) + + currentToken = ['string', css.slice(pos, next + 1), pos, next] + pos = next + break + } + + case AT: { + RE_AT_END.lastIndex = pos + 1 + RE_AT_END.test(css) + if (RE_AT_END.lastIndex === 0) { + next = css.length - 1 + } else { + next = RE_AT_END.lastIndex - 2 + } + + currentToken = ['at-word', css.slice(pos, next + 1), pos, next] + + pos = next + break + } + + case BACKSLASH: { + next = pos + escape = true + while (css.charCodeAt(next + 1) === BACKSLASH) { + next += 1 + escape = !escape + } + code = css.charCodeAt(next + 1) + if ( + escape && + code !== SLASH && + code !== SPACE && + code !== NEWLINE && + code !== TAB && + code !== CR && + code !== FEED + ) { + next += 1 + if (RE_HEX_ESCAPE.test(css.charAt(next))) { + while (RE_HEX_ESCAPE.test(css.charAt(next + 1))) { + next += 1 + } + if (css.charCodeAt(next + 1) === SPACE) { + next += 1 + } + } + } + + currentToken = ['word', css.slice(pos, next + 1), pos, next] + + pos = next + break + } + + default: { + if (code === SLASH && css.charCodeAt(pos + 1) === ASTERISK) { + next = css.indexOf('*/', pos + 2) + 1 + if (next === 0) { + if (ignore || ignoreUnclosed) { + next = css.length + } else { + unclosed('comment') + } + } + + currentToken = ['comment', css.slice(pos, next + 1), pos, next] + pos = next + } else { + RE_WORD_END.lastIndex = pos + 1 + RE_WORD_END.test(css) + if (RE_WORD_END.lastIndex === 0) { + next = css.length - 1 + } else { + next = RE_WORD_END.lastIndex - 2 + } + + currentToken = ['word', css.slice(pos, next + 1), pos, next] + buffer.push(currentToken) + pos = next + } + + break + } + } + + pos++ + return currentToken + } + + function back(token) { + returned.push(token) + } + + return { + back, + endOfFile, + nextToken, + position + } +} diff --git a/node_modules/postcss/lib/warn-once.js b/node_modules/postcss/lib/warn-once.js new file mode 100644 index 0000000..316e1cf --- /dev/null +++ b/node_modules/postcss/lib/warn-once.js @@ -0,0 +1,13 @@ +/* eslint-disable no-console */ +'use strict' + +let printed = {} + +module.exports = function warnOnce(message) { + if (printed[message]) return + printed[message] = true + + if (typeof console !== 'undefined' && console.warn) { + console.warn(message) + } +} diff --git a/node_modules/postcss/lib/warning.d.ts b/node_modules/postcss/lib/warning.d.ts new file mode 100644 index 0000000..b25bba8 --- /dev/null +++ b/node_modules/postcss/lib/warning.d.ts @@ -0,0 +1,147 @@ +import { RangePosition } from './css-syntax-error.js' +import Node from './node.js' + +declare namespace Warning { + export interface WarningOptions { + /** + * End position, exclusive, in CSS node string that caused the warning. + */ + end?: RangePosition + + /** + * End index, exclusive, in CSS node string that caused the warning. + */ + endIndex?: number + + /** + * Start index, inclusive, in CSS node string that caused the warning. + */ + index?: number + + /** + * CSS node that caused the warning. + */ + node?: Node + + /** + * Name of the plugin that created this warning. `Result#warn` fills + * this property automatically. + */ + plugin?: string + + /** + * Start position, inclusive, in CSS node string that caused the warning. + */ + start?: RangePosition + + /** + * Word in CSS source that caused the warning. + */ + word?: string + } + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + export { Warning_ as default } +} + +/** + * Represents a plugin’s warning. It can be created using `Node#warn`. + * + * ```js + * if (decl.important) { + * decl.warn(result, 'Avoid !important', { word: '!important' }) + * } + * ``` + */ +declare class Warning_ { + /** + * Column for inclusive start position in the input file with this warning’s source. + * + * ```js + * warning.column //=> 6 + * ``` + */ + column: number + + /** + * Column for exclusive end position in the input file with this warning’s source. + * + * ```js + * warning.endColumn //=> 4 + * ``` + */ + endColumn?: number + + /** + * Line for exclusive end position in the input file with this warning’s source. + * + * ```js + * warning.endLine //=> 6 + * ``` + */ + endLine?: number + + /** + * Line for inclusive start position in the input file with this warning’s source. + * + * ```js + * warning.line //=> 5 + * ``` + */ + line: number + + /** + * Contains the CSS node that caused the warning. + * + * ```js + * warning.node.toString() //=> 'color: white !important' + * ``` + */ + node: Node + + /** + * The name of the plugin that created this warning. + * When you call `Node#warn` it will fill this property automatically. + * + * ```js + * warning.plugin //=> 'postcss-important' + * ``` + */ + plugin: string + + /** + * The warning message. + * + * ```js + * warning.text //=> 'Try to avoid !important' + * ``` + */ + text: string + + /** + * Type to filter warnings from `Result#messages`. + * Always equal to `"warning"`. + */ + type: 'warning' + + /** + * @param text Warning message. + * @param opts Warning options. + */ + constructor(text: string, opts?: Warning.WarningOptions) + + /** + * Returns a warning position and message. + * + * ```js + * warning.toString() //=> 'postcss-lint:a.css:10:14: Avoid !important' + * ``` + * + * @return Warning position and message. + */ + toString(): string +} + +declare class Warning extends Warning_ {} + +export = Warning diff --git a/node_modules/postcss/lib/warning.js b/node_modules/postcss/lib/warning.js new file mode 100644 index 0000000..3a3d79c --- /dev/null +++ b/node_modules/postcss/lib/warning.js @@ -0,0 +1,37 @@ +'use strict' + +class Warning { + constructor(text, opts = {}) { + this.type = 'warning' + this.text = text + + if (opts.node && opts.node.source) { + let range = opts.node.rangeBy(opts) + this.line = range.start.line + this.column = range.start.column + this.endLine = range.end.line + this.endColumn = range.end.column + } + + for (let opt in opts) this[opt] = opts[opt] + } + + toString() { + if (this.node) { + return this.node.error(this.text, { + index: this.index, + plugin: this.plugin, + word: this.word + }).message + } + + if (this.plugin) { + return this.plugin + ': ' + this.text + } + + return this.text + } +} + +module.exports = Warning +Warning.default = Warning diff --git a/node_modules/postcss/node_modules/.bin/nanoid b/node_modules/postcss/node_modules/.bin/nanoid new file mode 120000 index 0000000..7b4e5ad --- /dev/null +++ b/node_modules/postcss/node_modules/.bin/nanoid @@ -0,0 +1 @@ +../../../nanoid/bin/nanoid.cjs \ No newline at end of file diff --git a/node_modules/postcss/package.json b/node_modules/postcss/package.json new file mode 100755 index 0000000..bd8992a --- /dev/null +++ b/node_modules/postcss/package.json @@ -0,0 +1,88 @@ +{ + "name": "postcss", + "version": "8.4.47", + "description": "Tool for transforming styles with JS plugins", + "engines": { + "node": "^10 || ^12 || >=14" + }, + "exports": { + ".": { + "require": "./lib/postcss.js", + "import": "./lib/postcss.mjs" + }, + "./lib/at-rule": "./lib/at-rule.js", + "./lib/comment": "./lib/comment.js", + "./lib/container": "./lib/container.js", + "./lib/css-syntax-error": "./lib/css-syntax-error.js", + "./lib/declaration": "./lib/declaration.js", + "./lib/fromJSON": "./lib/fromJSON.js", + "./lib/input": "./lib/input.js", + "./lib/lazy-result": "./lib/lazy-result.js", + "./lib/no-work-result": "./lib/no-work-result.js", + "./lib/list": "./lib/list.js", + "./lib/map-generator": "./lib/map-generator.js", + "./lib/node": "./lib/node.js", + "./lib/parse": "./lib/parse.js", + "./lib/parser": "./lib/parser.js", + "./lib/postcss": "./lib/postcss.js", + "./lib/previous-map": "./lib/previous-map.js", + "./lib/processor": "./lib/processor.js", + "./lib/result": "./lib/result.js", + "./lib/root": "./lib/root.js", + "./lib/rule": "./lib/rule.js", + "./lib/stringifier": "./lib/stringifier.js", + "./lib/stringify": "./lib/stringify.js", + "./lib/symbols": "./lib/symbols.js", + "./lib/terminal-highlight": "./lib/terminal-highlight.js", + "./lib/tokenize": "./lib/tokenize.js", + "./lib/warn-once": "./lib/warn-once.js", + "./lib/warning": "./lib/warning.js", + "./package.json": "./package.json" + }, + "main": "./lib/postcss.js", + "types": "./lib/postcss.d.ts", + "keywords": [ + "css", + "postcss", + "rework", + "preprocessor", + "parser", + "source map", + "transform", + "manipulation", + "transpiler" + ], + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "author": "Andrey Sitnik ", + "license": "MIT", + "homepage": "https://postcss.org/", + "repository": "postcss/postcss", + "bugs": { + "url": "https://github.com/postcss/postcss/issues" + }, + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.1.0", + "source-map-js": "^1.2.1" + }, + "browser": { + "./lib/terminal-highlight": false, + "source-map-js": false, + "path": false, + "url": false, + "fs": false + } +} diff --git a/node_modules/rollup/LICENSE.md b/node_modules/rollup/LICENSE.md new file mode 100644 index 0000000..ce44fa2 --- /dev/null +++ b/node_modules/rollup/LICENSE.md @@ -0,0 +1,653 @@ +# Rollup core license +Rollup is released under the MIT license: + +The MIT License (MIT) + +Copyright (c) 2017 [these people](https://github.com/rollup/rollup/graphs/contributors) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +# Licenses of bundled dependencies +The published Rollup artifact additionally contains code with the following licenses: +MIT, ISC + +# Bundled dependencies: +## @jridgewell/sourcemap-codec +License: MIT +By: Rich Harris +Repository: git+https://github.com/jridgewell/sourcemap-codec.git + +> The MIT License +> +> Copyright (c) 2015 Rich Harris +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## @rollup/pluginutils +License: MIT +By: Rich Harris +Repository: rollup/plugins + +> The MIT License (MIT) +> +> Copyright (c) 2019 RollupJS Plugin Contributors (https://github.com/rollup/plugins/graphs/contributors) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## anymatch +License: ISC +By: Elan Shanker +Repository: https://github.com/micromatch/anymatch + +> The ISC License +> +> Copyright (c) 2019 Elan Shanker, Paul Miller (https://paulmillr.com) +> +> Permission to use, copy, modify, and/or distribute this software for any +> purpose with or without fee is hereby granted, provided that the above +> copyright notice and this permission notice appear in all copies. +> +> THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +> WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +> MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +> ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +> WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +> ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +> IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +--------------------------------------- + +## binary-extensions +License: MIT +By: Sindre Sorhus +Repository: sindresorhus/binary-extensions + +> MIT License +> +> Copyright (c) Sindre Sorhus (https://sindresorhus.com) +> Copyright (c) Paul Miller (https://paulmillr.com) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## braces +License: MIT +By: Jon Schlinkert, Brian Woodward, Elan Shanker, Eugene Sharygin, hemanth.hm +Repository: micromatch/braces + +> The MIT License (MIT) +> +> Copyright (c) 2014-present, Jon Schlinkert. +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## builtin-modules +License: MIT +By: Sindre Sorhus +Repository: sindresorhus/builtin-modules + +> MIT License +> +> Copyright (c) Sindre Sorhus (https://sindresorhus.com) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## chokidar +License: MIT +By: Paul Miller, Elan Shanker +Repository: git+https://github.com/paulmillr/chokidar.git + +> The MIT License (MIT) +> +> Copyright (c) 2012-2019 Paul Miller (https://paulmillr.com), Elan Shanker +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the “Software”), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## colorette +License: MIT +By: Jorge Bucaran +Repository: jorgebucaran/colorette + +> Copyright © Jorge Bucaran <> +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## date-time +License: MIT +By: Sindre Sorhus +Repository: sindresorhus/date-time + +> MIT License +> +> Copyright (c) Sindre Sorhus (https://sindresorhus.com) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## fill-range +License: MIT +By: Jon Schlinkert, Edo Rivai, Paul Miller, Rouven Weßling +Repository: jonschlinkert/fill-range + +> The MIT License (MIT) +> +> Copyright (c) 2014-present, Jon Schlinkert. +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## flru +License: MIT +By: Luke Edwards +Repository: lukeed/flru + +> MIT License +> +> Copyright (c) Luke Edwards (lukeed.com) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## glob-parent +License: ISC +By: Gulp Team, Elan Shanker, Blaine Bublitz +Repository: gulpjs/glob-parent + +> The ISC License +> +> Copyright (c) 2015, 2019 Elan Shanker +> +> Permission to use, copy, modify, and/or distribute this software for any +> purpose with or without fee is hereby granted, provided that the above +> copyright notice and this permission notice appear in all copies. +> +> THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +> WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +> MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +> ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +> WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +> ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +> IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +--------------------------------------- + +## is-binary-path +License: MIT +By: Sindre Sorhus +Repository: sindresorhus/is-binary-path + +> MIT License +> +> Copyright (c) 2019 Sindre Sorhus (https://sindresorhus.com), Paul Miller (https://paulmillr.com) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## is-extglob +License: MIT +By: Jon Schlinkert +Repository: jonschlinkert/is-extglob + +> The MIT License (MIT) +> +> Copyright (c) 2014-2016, Jon Schlinkert +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## is-glob +License: MIT +By: Jon Schlinkert, Brian Woodward, Daniel Perez +Repository: micromatch/is-glob + +> The MIT License (MIT) +> +> Copyright (c) 2014-2017, Jon Schlinkert. +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## is-number +License: MIT +By: Jon Schlinkert, Olsten Larck, Rouven Weßling +Repository: jonschlinkert/is-number + +> The MIT License (MIT) +> +> Copyright (c) 2014-present, Jon Schlinkert. +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## is-reference +License: MIT +By: Rich Harris +Repository: git+https://github.com/Rich-Harris/is-reference.git + +--------------------------------------- + +## locate-character +License: MIT +By: Rich Harris +Repository: git+https://gitlab.com/Rich-Harris/locate-character.git + +--------------------------------------- + +## magic-string +License: MIT +By: Rich Harris +Repository: https://github.com/rich-harris/magic-string + +> Copyright 2018 Rich Harris +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## normalize-path +License: MIT +By: Jon Schlinkert, Blaine Bublitz +Repository: jonschlinkert/normalize-path + +> The MIT License (MIT) +> +> Copyright (c) 2014-2018, Jon Schlinkert. +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## parse-ms +License: MIT +By: Sindre Sorhus +Repository: sindresorhus/parse-ms + +> MIT License +> +> Copyright (c) Sindre Sorhus (https://sindresorhus.com) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## picomatch +License: MIT +By: Jon Schlinkert +Repository: micromatch/picomatch + +> The MIT License (MIT) +> +> Copyright (c) 2017-present, Jon Schlinkert. +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## pretty-bytes +License: MIT +By: Sindre Sorhus +Repository: sindresorhus/pretty-bytes + +> MIT License +> +> Copyright (c) Sindre Sorhus (https://sindresorhus.com) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## pretty-ms +License: MIT +By: Sindre Sorhus +Repository: sindresorhus/pretty-ms + +> MIT License +> +> Copyright (c) Sindre Sorhus (https://sindresorhus.com) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## readdirp +License: MIT +By: Thorsten Lorenz, Paul Miller +Repository: git://github.com/paulmillr/readdirp.git + +> MIT License +> +> Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all +> copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +> SOFTWARE. + +--------------------------------------- + +## signal-exit +License: ISC +By: Ben Coe +Repository: https://github.com/tapjs/signal-exit.git + +> The ISC License +> +> Copyright (c) 2015-2023 Benjamin Coe, Isaac Z. Schlueter, and Contributors +> +> Permission to use, copy, modify, and/or distribute this software +> for any purpose with or without fee is hereby granted, provided +> that the above copyright notice and this permission notice +> appear in all copies. +> +> THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +> WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +> OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +> LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +> OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +> WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +> ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +--------------------------------------- + +## time-zone +License: MIT +By: Sindre Sorhus +Repository: sindresorhus/time-zone + +> MIT License +> +> Copyright (c) Sindre Sorhus (https://sindresorhus.com) +> +> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +--------------------------------------- + +## to-regex-range +License: MIT +By: Jon Schlinkert, Rouven Weßling +Repository: micromatch/to-regex-range + +> The MIT License (MIT) +> +> Copyright (c) 2015-present, Jon Schlinkert. +> +> Permission is hereby granted, free of charge, to any person obtaining a copy +> of this software and associated documentation files (the "Software"), to deal +> in the Software without restriction, including without limitation the rights +> to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +> copies of the Software, and to permit persons to whom the Software is +> furnished to do so, subject to the following conditions: +> +> The above copyright notice and this permission notice shall be included in +> all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +> IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +> FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +> AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +> LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +> OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +> THE SOFTWARE. + +--------------------------------------- + +## yargs-parser +License: ISC +By: Ben Coe +Repository: https://github.com/yargs/yargs-parser.git + +> Copyright (c) 2016, Contributors +> +> Permission to use, copy, modify, and/or distribute this software +> for any purpose with or without fee is hereby granted, provided +> that the above copyright notice and this permission notice +> appear in all copies. +> +> THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +> WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +> OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +> LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +> OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +> WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +> ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/rollup/README.md b/node_modules/rollup/README.md new file mode 100644 index 0000000..a1b6b9e --- /dev/null +++ b/node_modules/rollup/README.md @@ -0,0 +1,134 @@ +

+ +

+ +

+ + npm version + + + node compatibility + + + install size + + + code coverage + + + backers + + + sponsors + + + license + + + Join the chat at https://is.gd/rollup_chat + +

+ +

Rollup

+ +## Overview + +Rollup is a module bundler for JavaScript which compiles small pieces of code into something larger and more complex, such as a library or application. It uses the standardized ES module format for code, instead of previous idiosyncratic solutions such as CommonJS and AMD. ES modules let you freely and seamlessly combine the most useful individual functions from your favorite libraries. Rollup can optimize ES modules for faster native loading in modern browsers, or output a legacy module format allowing ES module workflows today. + +## Quick Start Guide + +Install with `npm install --global rollup`. Rollup can be used either through a [command line interface](https://rollupjs.org/command-line-interface/) with an optional configuration file or else through its [JavaScript API](https://rollupjs.org/javascript-api/). Run `rollup --help` to see the available options and parameters. The starter project templates, [rollup-starter-lib](https://github.com/rollup/rollup-starter-lib) and [rollup-starter-app](https://github.com/rollup/rollup-starter-app), demonstrate common configuration options, and more detailed instructions are available throughout the [user guide](https://rollupjs.org/introduction/). + +### Commands + +These commands assume the entry point to your application is named main.js, and that you'd like all imports compiled into a single file named bundle.js. + +For browsers: + +```bash +# compile to a + \ No newline at end of file diff --git a/test/test.jpg b/test/test.jpg new file mode 100644 index 0000000..195fe62 Binary files /dev/null and b/test/test.jpg differ diff --git a/webp/config.h b/webp/config.h new file mode 100644 index 0000000..ed3c69d --- /dev/null +++ b/webp/config.h @@ -0,0 +1,150 @@ +/* Adapted from the autotools src/webp/config.h.in. */ + +/* Define if building universal (internal helper macro) */ +/* TODO: handle properly in CMake */ +/* #undef AC_APPLE_UNIVERSAL_BUILD */ + +/* Set to 1 if __builtin_bswap16 is available */ +/* #undef HAVE_BUILTIN_BSWAP16 */ + +/* Set to 1 if __builtin_bswap32 is available */ +/* #undef HAVE_BUILTIN_BSWAP32 */ + +/* Set to 1 if __builtin_bswap64 is available */ +/* #undef HAVE_BUILTIN_BSWAP64 */ + +/* Define to 1 if you have the header file. */ +/* #undef HAVE_CPU_FEATURES_H */ + +/* Define to 1 if you have the header file. */ +/* #undef HAVE_DLFCN_H */ + +/* Define to 1 if you have the header file. */ +/* #undef HAVE_GLUT_GLUT_H */ + +/* Define to 1 if you have the header file. */ +/* #undef HAVE_GL_GLUT_H */ + +/* Define to 1 if you have the header file. */ +#define HAVE_INTTYPES_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_MEMORY_H 1 + +/* Define to 1 if you have the header file. */ +/* #undef HAVE_OPENGL_GLUT_H */ + +/* Have PTHREAD_PRIO_INHERIT. */ +/* #undef HAVE_PTHREAD_PRIO_INHERIT */ + +/* Define to 1 if you have the header file. */ +#define HAVE_SHLWAPI_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_STDINT_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_STDLIB_H 1 + +/* Define to 1 if you have the header file. */ +/* #undef HAVE_STRINGS_H */ + +/* Define to 1 if you have the header file. */ +#define HAVE_STRING_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_SYS_STAT_H 1 + +/* Define to 1 if you have the header file. */ +#define HAVE_SYS_TYPES_H 1 + +/* Define to 1 if you have the header file. */ + #undef HAVE_UNISTD_H + +/* Define to 1 if you have the header file. */ +//#define HAVE_WINCODEC_H 1 + +/* Define to 1 if you have the header file. */ +//#define HAVE_WINDOWS_H 1 + +/* Define to the sub-directory in which libtool stores uninstalled libraries. + */ +/* TODO: handle properly in CMake */ +#define LT_OBJDIR ".libs/" + +/* Name of package */ +#define PACKAGE "WebP" + +/* Define to the address where bug reports for this package should be sent. */ +#define PACKAGE_BUGREPORT "https://bugs.chromium.org/p/webp" + +/* Define to the full name of this package. */ +#define PACKAGE_NAME "WebP" + +/* Define to the full name and version of this package. */ +#define PACKAGE_STRING "WebP 1.2.4" + +/* Define to the one symbol short name of this package. */ +#define PACKAGE_TARNAME "WebP" + +/* Define to the home page for this package. */ +#define PACKAGE_URL "https://developers.google.com/speed/webp" + +/* Define to the version of this package. */ +#define PACKAGE_VERSION "1.2.4" + +/* Define to 1 if you have the ANSI C header files. */ +#define STDC_HEADERS 1 + +/* Version number of package */ +#define VERSION "1.2.4" + +/* Set to 1 if GIF library is installed */ + //#undef WEBP_HAVE_GIF + +/* Set to 1 if OpenGL is supported */ +#define WEBP_HAVE_GL 1 + +/* Set to 1 if JPEG library is installed */ + //#undef WEBP_HAVE_JPEG + +/* Set to 1 if NEON is supported */ +/* #undef WEBP_HAVE_NEON */ + +/* Set to 1 if runtime detection of NEON is enabled */ +/* TODO: handle properly in CMake */ +/* #undef WEBP_HAVE_NEON_RTCD */ + +/* Set to 1 if PNG library is installed */ +//#undef WEBP_HAVE_PNG +//#define WEBP_HAVE_PNG + +/* Set to 1 if SDL library is installed */ + //#undef WEBP_HAVE_SDL + +/* Set to 1 if SSE2 is supported */ +#define WEBP_HAVE_SSE2 1 + +/* Set to 1 if SSE4.1 is supported */ +#define WEBP_HAVE_SSE41 1 + +/* Set to 1 if TIFF library is installed */ + #undef WEBP_HAVE_TIFF + +/* Enable near lossless encoding */ +#define WEBP_NEAR_LOSSLESS 1 + +/* Undefine this to disable thread support. */ +#define WEBP_USE_THREAD 1 + +/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most + significant byte first (like Motorola and SPARC, unlike Intel). */ +#if defined AC_APPLE_UNIVERSAL_BUILD +# if defined __BIG_ENDIAN__ +# define WORDS_BIGENDIAN 1 +# endif +#else +# ifndef WORDS_BIGENDIAN +# undef WORDS_BIGENDIAN +# endif +#endif diff --git a/webp/decode.h b/webp/decode.h new file mode 100644 index 0000000..d982475 --- /dev/null +++ b/webp/decode.h @@ -0,0 +1,503 @@ +// Copyright 2010 Google Inc. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the COPYING file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. +// ----------------------------------------------------------------------------- +// +// Main decoding functions for WebP images. +// +// Author: Skal (pascal.massimino@gmail.com) + +#ifndef WEBP_WEBP_DECODE_H_ +#define WEBP_WEBP_DECODE_H_ + +#include "./types.h" + +#ifdef __cplusplus +extern "C" { +#endif + +#define WEBP_DECODER_ABI_VERSION 0x0209 // MAJOR(8b) + MINOR(8b) + +// Note: forward declaring enumerations is not allowed in (strict) C and C++, +// the types are left here for reference. +// typedef enum VP8StatusCode VP8StatusCode; +// typedef enum WEBP_CSP_MODE WEBP_CSP_MODE; +typedef struct WebPRGBABuffer WebPRGBABuffer; +typedef struct WebPYUVABuffer WebPYUVABuffer; +typedef struct WebPDecBuffer WebPDecBuffer; +typedef struct WebPIDecoder WebPIDecoder; +typedef struct WebPBitstreamFeatures WebPBitstreamFeatures; +typedef struct WebPDecoderOptions WebPDecoderOptions; +typedef struct WebPDecoderConfig WebPDecoderConfig; + +// Return the decoder's version number, packed in hexadecimal using 8bits for +// each of major/minor/revision. E.g: v2.5.7 is 0x020507. +WEBP_EXTERN int WebPGetDecoderVersion(void); + +// Retrieve basic header information: width, height. +// This function will also validate the header, returning true on success, +// false otherwise. '*width' and '*height' are only valid on successful return. +// Pointers 'width' and 'height' can be passed NULL if deemed irrelevant. +// Note: The following chunk sequences (before the raw VP8/VP8L data) are +// considered valid by this function: +// RIFF + VP8(L) +// RIFF + VP8X + (optional chunks) + VP8(L) +// ALPH + VP8 <-- Not a valid WebP format: only allowed for internal purpose. +// VP8(L) <-- Not a valid WebP format: only allowed for internal purpose. +WEBP_EXTERN int WebPGetInfo(const uint8_t* data, size_t data_size, + int* width, int* height); + +// Decodes WebP images pointed to by 'data' and returns RGBA samples, along +// with the dimensions in *width and *height. The ordering of samples in +// memory is R, G, B, A, R, G, B, A... in scan order (endian-independent). +// The returned pointer should be deleted calling WebPFree(). +// Returns NULL in case of error. +WEBP_EXTERN uint8_t* WebPDecodeRGBA(const uint8_t* data, size_t data_size, + int* width, int* height); + +// Same as WebPDecodeRGBA, but returning A, R, G, B, A, R, G, B... ordered data. +WEBP_EXTERN uint8_t* WebPDecodeARGB(const uint8_t* data, size_t data_size, + int* width, int* height); + +// Same as WebPDecodeRGBA, but returning B, G, R, A, B, G, R, A... ordered data. +WEBP_EXTERN uint8_t* WebPDecodeBGRA(const uint8_t* data, size_t data_size, + int* width, int* height); + +// Same as WebPDecodeRGBA, but returning R, G, B, R, G, B... ordered data. +// If the bitstream contains transparency, it is ignored. +WEBP_EXTERN uint8_t* WebPDecodeRGB(const uint8_t* data, size_t data_size, + int* width, int* height); + +// Same as WebPDecodeRGB, but returning B, G, R, B, G, R... ordered data. +WEBP_EXTERN uint8_t* WebPDecodeBGR(const uint8_t* data, size_t data_size, + int* width, int* height); + + +// Decode WebP images pointed to by 'data' to Y'UV format(*). The pointer +// returned is the Y samples buffer. Upon return, *u and *v will point to +// the U and V chroma data. These U and V buffers need NOT be passed to +// WebPFree(), unlike the returned Y luma one. The dimension of the U and V +// planes are both (*width + 1) / 2 and (*height + 1)/ 2. +// Upon return, the Y buffer has a stride returned as '*stride', while U and V +// have a common stride returned as '*uv_stride'. +// Return NULL in case of error. +// (*) Also named Y'CbCr. See: https://en.wikipedia.org/wiki/YCbCr +WEBP_EXTERN uint8_t* WebPDecodeYUV(const uint8_t* data, size_t data_size, + int* width, int* height, + uint8_t** u, uint8_t** v, + int* stride, int* uv_stride); + +// These five functions are variants of the above ones, that decode the image +// directly into a pre-allocated buffer 'output_buffer'. The maximum storage +// available in this buffer is indicated by 'output_buffer_size'. If this +// storage is not sufficient (or an error occurred), NULL is returned. +// Otherwise, output_buffer is returned, for convenience. +// The parameter 'output_stride' specifies the distance (in bytes) +// between scanlines. Hence, output_buffer_size is expected to be at least +// output_stride x picture-height. +WEBP_EXTERN uint8_t* WebPDecodeRGBAInto( + const uint8_t* data, size_t data_size, + uint8_t* output_buffer, size_t output_buffer_size, int output_stride); +WEBP_EXTERN uint8_t* WebPDecodeARGBInto( + const uint8_t* data, size_t data_size, + uint8_t* output_buffer, size_t output_buffer_size, int output_stride); +WEBP_EXTERN uint8_t* WebPDecodeBGRAInto( + const uint8_t* data, size_t data_size, + uint8_t* output_buffer, size_t output_buffer_size, int output_stride); + +// RGB and BGR variants. Here too the transparency information, if present, +// will be dropped and ignored. +WEBP_EXTERN uint8_t* WebPDecodeRGBInto( + const uint8_t* data, size_t data_size, + uint8_t* output_buffer, size_t output_buffer_size, int output_stride); +WEBP_EXTERN uint8_t* WebPDecodeBGRInto( + const uint8_t* data, size_t data_size, + uint8_t* output_buffer, size_t output_buffer_size, int output_stride); + +// WebPDecodeYUVInto() is a variant of WebPDecodeYUV() that operates directly +// into pre-allocated luma/chroma plane buffers. This function requires the +// strides to be passed: one for the luma plane and one for each of the +// chroma ones. The size of each plane buffer is passed as 'luma_size', +// 'u_size' and 'v_size' respectively. +// Pointer to the luma plane ('*luma') is returned or NULL if an error occurred +// during decoding (or because some buffers were found to be too small). +WEBP_EXTERN uint8_t* WebPDecodeYUVInto( + const uint8_t* data, size_t data_size, + uint8_t* luma, size_t luma_size, int luma_stride, + uint8_t* u, size_t u_size, int u_stride, + uint8_t* v, size_t v_size, int v_stride); + +//------------------------------------------------------------------------------ +// Output colorspaces and buffer + +// Colorspaces +// Note: the naming describes the byte-ordering of packed samples in memory. +// For instance, MODE_BGRA relates to samples ordered as B,G,R,A,B,G,R,A,... +// Non-capital names (e.g.:MODE_Argb) relates to pre-multiplied RGB channels. +// RGBA-4444 and RGB-565 colorspaces are represented by following byte-order: +// RGBA-4444: [r3 r2 r1 r0 g3 g2 g1 g0], [b3 b2 b1 b0 a3 a2 a1 a0], ... +// RGB-565: [r4 r3 r2 r1 r0 g5 g4 g3], [g2 g1 g0 b4 b3 b2 b1 b0], ... +// In the case WEBP_SWAP_16BITS_CSP is defined, the bytes are swapped for +// these two modes: +// RGBA-4444: [b3 b2 b1 b0 a3 a2 a1 a0], [r3 r2 r1 r0 g3 g2 g1 g0], ... +// RGB-565: [g2 g1 g0 b4 b3 b2 b1 b0], [r4 r3 r2 r1 r0 g5 g4 g3], ... + +typedef enum WEBP_CSP_MODE { + MODE_RGB = 0, MODE_RGBA = 1, + MODE_BGR = 2, MODE_BGRA = 3, + MODE_ARGB = 4, MODE_RGBA_4444 = 5, + MODE_RGB_565 = 6, + // RGB-premultiplied transparent modes (alpha value is preserved) + MODE_rgbA = 7, + MODE_bgrA = 8, + MODE_Argb = 9, + MODE_rgbA_4444 = 10, + // YUV modes must come after RGB ones. + MODE_YUV = 11, MODE_YUVA = 12, // yuv 4:2:0 + MODE_LAST = 13 +} WEBP_CSP_MODE; + +// Some useful macros: +static WEBP_INLINE int WebPIsPremultipliedMode(WEBP_CSP_MODE mode) { + return (mode == MODE_rgbA || mode == MODE_bgrA || mode == MODE_Argb || + mode == MODE_rgbA_4444); +} + +static WEBP_INLINE int WebPIsAlphaMode(WEBP_CSP_MODE mode) { + return (mode == MODE_RGBA || mode == MODE_BGRA || mode == MODE_ARGB || + mode == MODE_RGBA_4444 || mode == MODE_YUVA || + WebPIsPremultipliedMode(mode)); +} + +static WEBP_INLINE int WebPIsRGBMode(WEBP_CSP_MODE mode) { + return (mode < MODE_YUV); +} + +//------------------------------------------------------------------------------ +// WebPDecBuffer: Generic structure for describing the output sample buffer. + +struct WebPRGBABuffer { // view as RGBA + uint8_t* rgba; // pointer to RGBA samples + int stride; // stride in bytes from one scanline to the next. + size_t size; // total size of the *rgba buffer. +}; + +struct WebPYUVABuffer { // view as YUVA + uint8_t* y, *u, *v, *a; // pointer to luma, chroma U/V, alpha samples + int y_stride; // luma stride + int u_stride, v_stride; // chroma strides + int a_stride; // alpha stride + size_t y_size; // luma plane size + size_t u_size, v_size; // chroma planes size + size_t a_size; // alpha-plane size +}; + +// Output buffer +struct WebPDecBuffer { + WEBP_CSP_MODE colorspace; // Colorspace. + int width, height; // Dimensions. + int is_external_memory; // If non-zero, 'internal_memory' pointer is not + // used. If value is '2' or more, the external + // memory is considered 'slow' and multiple + // read/write will be avoided. + union { + WebPRGBABuffer RGBA; + WebPYUVABuffer YUVA; + } u; // Nameless union of buffer parameters. + uint32_t pad[4]; // padding for later use + + uint8_t* private_memory; // Internally allocated memory (only when + // is_external_memory is 0). Should not be used + // externally, but accessed via the buffer union. +}; + +// Internal, version-checked, entry point +WEBP_EXTERN int WebPInitDecBufferInternal(WebPDecBuffer*, int); + +// Initialize the structure as empty. Must be called before any other use. +// Returns false in case of version mismatch +static WEBP_INLINE int WebPInitDecBuffer(WebPDecBuffer* buffer) { + return WebPInitDecBufferInternal(buffer, WEBP_DECODER_ABI_VERSION); +} + +// Free any memory associated with the buffer. Must always be called last. +// Note: doesn't free the 'buffer' structure itself. +WEBP_EXTERN void WebPFreeDecBuffer(WebPDecBuffer* buffer); + +//------------------------------------------------------------------------------ +// Enumeration of the status codes + +typedef enum VP8StatusCode { + VP8_STATUS_OK = 0, + VP8_STATUS_OUT_OF_MEMORY, + VP8_STATUS_INVALID_PARAM, + VP8_STATUS_BITSTREAM_ERROR, + VP8_STATUS_UNSUPPORTED_FEATURE, + VP8_STATUS_SUSPENDED, + VP8_STATUS_USER_ABORT, + VP8_STATUS_NOT_ENOUGH_DATA +} VP8StatusCode; + +//------------------------------------------------------------------------------ +// Incremental decoding +// +// This API allows streamlined decoding of partial data. +// Picture can be incrementally decoded as data become available thanks to the +// WebPIDecoder object. This object can be left in a SUSPENDED state if the +// picture is only partially decoded, pending additional input. +// Code example: +// +// WebPInitDecBuffer(&output_buffer); +// output_buffer.colorspace = mode; +// ... +// WebPIDecoder* idec = WebPINewDecoder(&output_buffer); +// while (additional_data_is_available) { +// // ... (get additional data in some new_data[] buffer) +// status = WebPIAppend(idec, new_data, new_data_size); +// if (status != VP8_STATUS_OK && status != VP8_STATUS_SUSPENDED) { +// break; // an error occurred. +// } +// +// // The above call decodes the current available buffer. +// // Part of the image can now be refreshed by calling +// // WebPIDecGetRGB()/WebPIDecGetYUVA() etc. +// } +// WebPIDelete(idec); + +// Creates a new incremental decoder with the supplied buffer parameter. +// This output_buffer can be passed NULL, in which case a default output buffer +// is used (with MODE_RGB). Otherwise, an internal reference to 'output_buffer' +// is kept, which means that the lifespan of 'output_buffer' must be larger than +// that of the returned WebPIDecoder object. +// The supplied 'output_buffer' content MUST NOT be changed between calls to +// WebPIAppend() or WebPIUpdate() unless 'output_buffer.is_external_memory' is +// not set to 0. In such a case, it is allowed to modify the pointers, size and +// stride of output_buffer.u.RGBA or output_buffer.u.YUVA, provided they remain +// within valid bounds. +// All other fields of WebPDecBuffer MUST remain constant between calls. +// Returns NULL if the allocation failed. +WEBP_EXTERN WebPIDecoder* WebPINewDecoder(WebPDecBuffer* output_buffer); + +// This function allocates and initializes an incremental-decoder object, which +// will output the RGB/A samples specified by 'csp' into a preallocated +// buffer 'output_buffer'. The size of this buffer is at least +// 'output_buffer_size' and the stride (distance in bytes between two scanlines) +// is specified by 'output_stride'. +// Additionally, output_buffer can be passed NULL in which case the output +// buffer will be allocated automatically when the decoding starts. The +// colorspace 'csp' is taken into account for allocating this buffer. All other +// parameters are ignored. +// Returns NULL if the allocation failed, or if some parameters are invalid. +WEBP_EXTERN WebPIDecoder* WebPINewRGB( + WEBP_CSP_MODE csp, + uint8_t* output_buffer, size_t output_buffer_size, int output_stride); + +// This function allocates and initializes an incremental-decoder object, which +// will output the raw luma/chroma samples into a preallocated planes if +// supplied. The luma plane is specified by its pointer 'luma', its size +// 'luma_size' and its stride 'luma_stride'. Similarly, the chroma-u plane +// is specified by the 'u', 'u_size' and 'u_stride' parameters, and the chroma-v +// plane by 'v' and 'v_size'. And same for the alpha-plane. The 'a' pointer +// can be pass NULL in case one is not interested in the transparency plane. +// Conversely, 'luma' can be passed NULL if no preallocated planes are supplied. +// In this case, the output buffer will be automatically allocated (using +// MODE_YUVA) when decoding starts. All parameters are then ignored. +// Returns NULL if the allocation failed or if a parameter is invalid. +WEBP_EXTERN WebPIDecoder* WebPINewYUVA( + uint8_t* luma, size_t luma_size, int luma_stride, + uint8_t* u, size_t u_size, int u_stride, + uint8_t* v, size_t v_size, int v_stride, + uint8_t* a, size_t a_size, int a_stride); + +// Deprecated version of the above, without the alpha plane. +// Kept for backward compatibility. +WEBP_EXTERN WebPIDecoder* WebPINewYUV( + uint8_t* luma, size_t luma_size, int luma_stride, + uint8_t* u, size_t u_size, int u_stride, + uint8_t* v, size_t v_size, int v_stride); + +// Deletes the WebPIDecoder object and associated memory. Must always be called +// if WebPINewDecoder, WebPINewRGB or WebPINewYUV succeeded. +WEBP_EXTERN void WebPIDelete(WebPIDecoder* idec); + +// Copies and decodes the next available data. Returns VP8_STATUS_OK when +// the image is successfully decoded. Returns VP8_STATUS_SUSPENDED when more +// data is expected. Returns error in other cases. +WEBP_EXTERN VP8StatusCode WebPIAppend( + WebPIDecoder* idec, const uint8_t* data, size_t data_size); + +// A variant of the above function to be used when data buffer contains +// partial data from the beginning. In this case data buffer is not copied +// to the internal memory. +// Note that the value of the 'data' pointer can change between calls to +// WebPIUpdate, for instance when the data buffer is resized to fit larger data. +WEBP_EXTERN VP8StatusCode WebPIUpdate( + WebPIDecoder* idec, const uint8_t* data, size_t data_size); + +// Returns the RGB/A image decoded so far. Returns NULL if output params +// are not initialized yet. The RGB/A output type corresponds to the colorspace +// specified during call to WebPINewDecoder() or WebPINewRGB(). +// *last_y is the index of last decoded row in raster scan order. Some pointers +// (*last_y, *width etc.) can be NULL if corresponding information is not +// needed. The values in these pointers are only valid on successful (non-NULL) +// return. +WEBP_EXTERN uint8_t* WebPIDecGetRGB( + const WebPIDecoder* idec, int* last_y, + int* width, int* height, int* stride); + +// Same as above function to get a YUVA image. Returns pointer to the luma +// plane or NULL in case of error. If there is no alpha information +// the alpha pointer '*a' will be returned NULL. +WEBP_EXTERN uint8_t* WebPIDecGetYUVA( + const WebPIDecoder* idec, int* last_y, + uint8_t** u, uint8_t** v, uint8_t** a, + int* width, int* height, int* stride, int* uv_stride, int* a_stride); + +// Deprecated alpha-less version of WebPIDecGetYUVA(): it will ignore the +// alpha information (if present). Kept for backward compatibility. +static WEBP_INLINE uint8_t* WebPIDecGetYUV( + const WebPIDecoder* idec, int* last_y, uint8_t** u, uint8_t** v, + int* width, int* height, int* stride, int* uv_stride) { + return WebPIDecGetYUVA(idec, last_y, u, v, NULL, width, height, + stride, uv_stride, NULL); +} + +// Generic call to retrieve information about the displayable area. +// If non NULL, the left/right/width/height pointers are filled with the visible +// rectangular area so far. +// Returns NULL in case the incremental decoder object is in an invalid state. +// Otherwise returns the pointer to the internal representation. This structure +// is read-only, tied to WebPIDecoder's lifespan and should not be modified. +WEBP_EXTERN const WebPDecBuffer* WebPIDecodedArea( + const WebPIDecoder* idec, int* left, int* top, int* width, int* height); + +//------------------------------------------------------------------------------ +// Advanced decoding parametrization +// +// Code sample for using the advanced decoding API +/* + // A) Init a configuration object + WebPDecoderConfig config; + CHECK(WebPInitDecoderConfig(&config)); + + // B) optional: retrieve the bitstream's features. + CHECK(WebPGetFeatures(data, data_size, &config.input) == VP8_STATUS_OK); + + // C) Adjust 'config', if needed + config.no_fancy_upsampling = 1; + config.output.colorspace = MODE_BGRA; + // etc. + + // Note that you can also make config.output point to an externally + // supplied memory buffer, provided it's big enough to store the decoded + // picture. Otherwise, config.output will just be used to allocate memory + // and store the decoded picture. + + // D) Decode! + CHECK(WebPDecode(data, data_size, &config) == VP8_STATUS_OK); + + // E) Decoded image is now in config.output (and config.output.u.RGBA) + + // F) Reclaim memory allocated in config's object. It's safe to call + // this function even if the memory is external and wasn't allocated + // by WebPDecode(). + WebPFreeDecBuffer(&config.output); +*/ + +// Features gathered from the bitstream +struct WebPBitstreamFeatures { + int width; // Width in pixels, as read from the bitstream. + int height; // Height in pixels, as read from the bitstream. + int has_alpha; // True if the bitstream contains an alpha channel. + int has_animation; // True if the bitstream is an animation. + int format; // 0 = undefined (/mixed), 1 = lossy, 2 = lossless + + uint32_t pad[5]; // padding for later use +}; + +// Internal, version-checked, entry point +WEBP_EXTERN VP8StatusCode WebPGetFeaturesInternal( + const uint8_t*, size_t, WebPBitstreamFeatures*, int); + +// Retrieve features from the bitstream. The *features structure is filled +// with information gathered from the bitstream. +// Returns VP8_STATUS_OK when the features are successfully retrieved. Returns +// VP8_STATUS_NOT_ENOUGH_DATA when more data is needed to retrieve the +// features from headers. Returns error in other cases. +// Note: The following chunk sequences (before the raw VP8/VP8L data) are +// considered valid by this function: +// RIFF + VP8(L) +// RIFF + VP8X + (optional chunks) + VP8(L) +// ALPH + VP8 <-- Not a valid WebP format: only allowed for internal purpose. +// VP8(L) <-- Not a valid WebP format: only allowed for internal purpose. +static WEBP_INLINE VP8StatusCode WebPGetFeatures( + const uint8_t* data, size_t data_size, + WebPBitstreamFeatures* features) { + return WebPGetFeaturesInternal(data, data_size, features, + WEBP_DECODER_ABI_VERSION); +} + +// Decoding options +struct WebPDecoderOptions { + int bypass_filtering; // if true, skip the in-loop filtering + int no_fancy_upsampling; // if true, use faster pointwise upsampler + int use_cropping; // if true, cropping is applied _first_ + int crop_left, crop_top; // top-left position for cropping. + // Will be snapped to even values. + int crop_width, crop_height; // dimension of the cropping area + int use_scaling; // if true, scaling is applied _afterward_ + int scaled_width, scaled_height; // final resolution + int use_threads; // if true, use multi-threaded decoding + int dithering_strength; // dithering strength (0=Off, 100=full) + int flip; // if true, flip output vertically + int alpha_dithering_strength; // alpha dithering strength in [0..100] + + uint32_t pad[5]; // padding for later use +}; + +// Main object storing the configuration for advanced decoding. +struct WebPDecoderConfig { + WebPBitstreamFeatures input; // Immutable bitstream features (optional) + WebPDecBuffer output; // Output buffer (can point to external mem) + WebPDecoderOptions options; // Decoding options +}; + +// Internal, version-checked, entry point +WEBP_EXTERN int WebPInitDecoderConfigInternal(WebPDecoderConfig*, int); + +// Initialize the configuration as empty. This function must always be +// called first, unless WebPGetFeatures() is to be called. +// Returns false in case of mismatched version. +static WEBP_INLINE int WebPInitDecoderConfig(WebPDecoderConfig* config) { + return WebPInitDecoderConfigInternal(config, WEBP_DECODER_ABI_VERSION); +} + +// Instantiate a new incremental decoder object with the requested +// configuration. The bitstream can be passed using 'data' and 'data_size' +// parameter, in which case the features will be parsed and stored into +// config->input. Otherwise, 'data' can be NULL and no parsing will occur. +// Note that 'config' can be NULL too, in which case a default configuration +// is used. If 'config' is not NULL, it must outlive the WebPIDecoder object +// as some references to its fields will be used. No internal copy of 'config' +// is made. +// The return WebPIDecoder object must always be deleted calling WebPIDelete(). +// Returns NULL in case of error (and config->status will then reflect +// the error condition, if available). +WEBP_EXTERN WebPIDecoder* WebPIDecode(const uint8_t* data, size_t data_size, + WebPDecoderConfig* config); + +// Non-incremental version. This version decodes the full data at once, taking +// 'config' into account. Returns decoding status (which should be VP8_STATUS_OK +// if the decoding was successful). Note that 'config' cannot be NULL. +WEBP_EXTERN VP8StatusCode WebPDecode(const uint8_t* data, size_t data_size, + WebPDecoderConfig* config); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // WEBP_WEBP_DECODE_H_ diff --git a/webp/demux.h b/webp/demux.h new file mode 100644 index 0000000..846eeb1 --- /dev/null +++ b/webp/demux.h @@ -0,0 +1,363 @@ +// Copyright 2012 Google Inc. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the COPYING file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. +// ----------------------------------------------------------------------------- +// +// Demux API. +// Enables extraction of image and extended format data from WebP files. + +// Code Example: Demuxing WebP data to extract all the frames, ICC profile +// and EXIF/XMP metadata. +/* + WebPDemuxer* demux = WebPDemux(&webp_data); + + uint32_t width = WebPDemuxGetI(demux, WEBP_FF_CANVAS_WIDTH); + uint32_t height = WebPDemuxGetI(demux, WEBP_FF_CANVAS_HEIGHT); + // ... (Get information about the features present in the WebP file). + uint32_t flags = WebPDemuxGetI(demux, WEBP_FF_FORMAT_FLAGS); + + // ... (Iterate over all frames). + WebPIterator iter; + if (WebPDemuxGetFrame(demux, 1, &iter)) { + do { + // ... (Consume 'iter'; e.g. Decode 'iter.fragment' with WebPDecode(), + // ... and get other frame properties like width, height, offsets etc. + // ... see 'struct WebPIterator' below for more info). + } while (WebPDemuxNextFrame(&iter)); + WebPDemuxReleaseIterator(&iter); + } + + // ... (Extract metadata). + WebPChunkIterator chunk_iter; + if (flags & ICCP_FLAG) WebPDemuxGetChunk(demux, "ICCP", 1, &chunk_iter); + // ... (Consume the ICC profile in 'chunk_iter.chunk'). + WebPDemuxReleaseChunkIterator(&chunk_iter); + if (flags & EXIF_FLAG) WebPDemuxGetChunk(demux, "EXIF", 1, &chunk_iter); + // ... (Consume the EXIF metadata in 'chunk_iter.chunk'). + WebPDemuxReleaseChunkIterator(&chunk_iter); + if (flags & XMP_FLAG) WebPDemuxGetChunk(demux, "XMP ", 1, &chunk_iter); + // ... (Consume the XMP metadata in 'chunk_iter.chunk'). + WebPDemuxReleaseChunkIterator(&chunk_iter); + WebPDemuxDelete(demux); +*/ + +#ifndef WEBP_WEBP_DEMUX_H_ +#define WEBP_WEBP_DEMUX_H_ + +#include "./decode.h" // for WEBP_CSP_MODE +#include "./mux_types.h" + +#ifdef __cplusplus +extern "C" { +#endif + +#define WEBP_DEMUX_ABI_VERSION 0x0107 // MAJOR(8b) + MINOR(8b) + +// Note: forward declaring enumerations is not allowed in (strict) C and C++, +// the types are left here for reference. +// typedef enum WebPDemuxState WebPDemuxState; +// typedef enum WebPFormatFeature WebPFormatFeature; +typedef struct WebPDemuxer WebPDemuxer; +typedef struct WebPIterator WebPIterator; +typedef struct WebPChunkIterator WebPChunkIterator; +typedef struct WebPAnimInfo WebPAnimInfo; +typedef struct WebPAnimDecoderOptions WebPAnimDecoderOptions; + +//------------------------------------------------------------------------------ + +// Returns the version number of the demux library, packed in hexadecimal using +// 8bits for each of major/minor/revision. E.g: v2.5.7 is 0x020507. +WEBP_EXTERN int WebPGetDemuxVersion(void); + +//------------------------------------------------------------------------------ +// Life of a Demux object + +typedef enum WebPDemuxState { + WEBP_DEMUX_PARSE_ERROR = -1, // An error occurred while parsing. + WEBP_DEMUX_PARSING_HEADER = 0, // Not enough data to parse full header. + WEBP_DEMUX_PARSED_HEADER = 1, // Header parsing complete, + // data may be available. + WEBP_DEMUX_DONE = 2 // Entire file has been parsed. +} WebPDemuxState; + +// Internal, version-checked, entry point +WEBP_EXTERN WebPDemuxer* WebPDemuxInternal( + const WebPData*, int, WebPDemuxState*, int); + +// Parses the full WebP file given by 'data'. For single images the WebP file +// header alone or the file header and the chunk header may be absent. +// Returns a WebPDemuxer object on successful parse, NULL otherwise. +static WEBP_INLINE WebPDemuxer* WebPDemux(const WebPData* data) { + return WebPDemuxInternal(data, 0, NULL, WEBP_DEMUX_ABI_VERSION); +} + +// Parses the possibly incomplete WebP file given by 'data'. +// If 'state' is non-NULL it will be set to indicate the status of the demuxer. +// Returns NULL in case of error or if there isn't enough data to start parsing; +// and a WebPDemuxer object on successful parse. +// Note that WebPDemuxer keeps internal pointers to 'data' memory segment. +// If this data is volatile, the demuxer object should be deleted (by calling +// WebPDemuxDelete()) and WebPDemuxPartial() called again on the new data. +// This is usually an inexpensive operation. +static WEBP_INLINE WebPDemuxer* WebPDemuxPartial( + const WebPData* data, WebPDemuxState* state) { + return WebPDemuxInternal(data, 1, state, WEBP_DEMUX_ABI_VERSION); +} + +// Frees memory associated with 'dmux'. +WEBP_EXTERN void WebPDemuxDelete(WebPDemuxer* dmux); + +//------------------------------------------------------------------------------ +// Data/information extraction. + +typedef enum WebPFormatFeature { + WEBP_FF_FORMAT_FLAGS, // bit-wise combination of WebPFeatureFlags + // corresponding to the 'VP8X' chunk (if present). + WEBP_FF_CANVAS_WIDTH, + WEBP_FF_CANVAS_HEIGHT, + WEBP_FF_LOOP_COUNT, // only relevant for animated file + WEBP_FF_BACKGROUND_COLOR, // idem. + WEBP_FF_FRAME_COUNT // Number of frames present in the demux object. + // In case of a partial demux, this is the number + // of frames seen so far, with the last frame + // possibly being partial. +} WebPFormatFeature; + +// Get the 'feature' value from the 'dmux'. +// NOTE: values are only valid if WebPDemux() was used or WebPDemuxPartial() +// returned a state > WEBP_DEMUX_PARSING_HEADER. +// If 'feature' is WEBP_FF_FORMAT_FLAGS, the returned value is a bit-wise +// combination of WebPFeatureFlags values. +// If 'feature' is WEBP_FF_LOOP_COUNT, WEBP_FF_BACKGROUND_COLOR, the returned +// value is only meaningful if the bitstream is animated. +WEBP_EXTERN uint32_t WebPDemuxGetI( + const WebPDemuxer* dmux, WebPFormatFeature feature); + +//------------------------------------------------------------------------------ +// Frame iteration. + +struct WebPIterator { + int frame_num; + int num_frames; // equivalent to WEBP_FF_FRAME_COUNT. + int x_offset, y_offset; // offset relative to the canvas. + int width, height; // dimensions of this frame. + int duration; // display duration in milliseconds. + WebPMuxAnimDispose dispose_method; // dispose method for the frame. + int complete; // true if 'fragment' contains a full frame. partial images + // may still be decoded with the WebP incremental decoder. + WebPData fragment; // The frame given by 'frame_num'. Note for historical + // reasons this is called a fragment. + int has_alpha; // True if the frame contains transparency. + WebPMuxAnimBlend blend_method; // Blend operation for the frame. + + uint32_t pad[2]; // padding for later use. + void* private_; // for internal use only. +}; + +// Retrieves frame 'frame_number' from 'dmux'. +// 'iter->fragment' points to the frame on return from this function. +// Setting 'frame_number' equal to 0 will return the last frame of the image. +// Returns false if 'dmux' is NULL or frame 'frame_number' is not present. +// Call WebPDemuxReleaseIterator() when use of the iterator is complete. +// NOTE: 'dmux' must persist for the lifetime of 'iter'. +WEBP_EXTERN int WebPDemuxGetFrame( + const WebPDemuxer* dmux, int frame_number, WebPIterator* iter); + +// Sets 'iter->fragment' to point to the next ('iter->frame_num' + 1) or +// previous ('iter->frame_num' - 1) frame. These functions do not loop. +// Returns true on success, false otherwise. +WEBP_EXTERN int WebPDemuxNextFrame(WebPIterator* iter); +WEBP_EXTERN int WebPDemuxPrevFrame(WebPIterator* iter); + +// Releases any memory associated with 'iter'. +// Must be called before any subsequent calls to WebPDemuxGetChunk() on the same +// iter. Also, must be called before destroying the associated WebPDemuxer with +// WebPDemuxDelete(). +WEBP_EXTERN void WebPDemuxReleaseIterator(WebPIterator* iter); + +//------------------------------------------------------------------------------ +// Chunk iteration. + +struct WebPChunkIterator { + // The current and total number of chunks with the fourcc given to + // WebPDemuxGetChunk(). + int chunk_num; + int num_chunks; + WebPData chunk; // The payload of the chunk. + + uint32_t pad[6]; // padding for later use + void* private_; +}; + +// Retrieves the 'chunk_number' instance of the chunk with id 'fourcc' from +// 'dmux'. +// 'fourcc' is a character array containing the fourcc of the chunk to return, +// e.g., "ICCP", "XMP ", "EXIF", etc. +// Setting 'chunk_number' equal to 0 will return the last chunk in a set. +// Returns true if the chunk is found, false otherwise. Image related chunk +// payloads are accessed through WebPDemuxGetFrame() and related functions. +// Call WebPDemuxReleaseChunkIterator() when use of the iterator is complete. +// NOTE: 'dmux' must persist for the lifetime of the iterator. +WEBP_EXTERN int WebPDemuxGetChunk(const WebPDemuxer* dmux, + const char fourcc[4], int chunk_number, + WebPChunkIterator* iter); + +// Sets 'iter->chunk' to point to the next ('iter->chunk_num' + 1) or previous +// ('iter->chunk_num' - 1) chunk. These functions do not loop. +// Returns true on success, false otherwise. +WEBP_EXTERN int WebPDemuxNextChunk(WebPChunkIterator* iter); +WEBP_EXTERN int WebPDemuxPrevChunk(WebPChunkIterator* iter); + +// Releases any memory associated with 'iter'. +// Must be called before destroying the associated WebPDemuxer with +// WebPDemuxDelete(). +WEBP_EXTERN void WebPDemuxReleaseChunkIterator(WebPChunkIterator* iter); + +//------------------------------------------------------------------------------ +// WebPAnimDecoder API +// +// This API allows decoding (possibly) animated WebP images. +// +// Code Example: +/* + WebPAnimDecoderOptions dec_options; + WebPAnimDecoderOptionsInit(&dec_options); + // Tune 'dec_options' as needed. + WebPAnimDecoder* dec = WebPAnimDecoderNew(webp_data, &dec_options); + WebPAnimInfo anim_info; + WebPAnimDecoderGetInfo(dec, &anim_info); + for (uint32_t i = 0; i < anim_info.loop_count; ++i) { + while (WebPAnimDecoderHasMoreFrames(dec)) { + uint8_t* buf; + int timestamp; + WebPAnimDecoderGetNext(dec, &buf, ×tamp); + // ... (Render 'buf' based on 'timestamp'). + // ... (Do NOT free 'buf', as it is owned by 'dec'). + } + WebPAnimDecoderReset(dec); + } + const WebPDemuxer* demuxer = WebPAnimDecoderGetDemuxer(dec); + // ... (Do something using 'demuxer'; e.g. get EXIF/XMP/ICC data). + WebPAnimDecoderDelete(dec); +*/ + +typedef struct WebPAnimDecoder WebPAnimDecoder; // Main opaque object. + +// Global options. +struct WebPAnimDecoderOptions { + // Output colorspace. Only the following modes are supported: + // MODE_RGBA, MODE_BGRA, MODE_rgbA and MODE_bgrA. + WEBP_CSP_MODE color_mode; + int use_threads; // If true, use multi-threaded decoding. + uint32_t padding[7]; // Padding for later use. +}; + +// Internal, version-checked, entry point. +WEBP_EXTERN int WebPAnimDecoderOptionsInitInternal( + WebPAnimDecoderOptions*, int); + +// Should always be called, to initialize a fresh WebPAnimDecoderOptions +// structure before modification. Returns false in case of version mismatch. +// WebPAnimDecoderOptionsInit() must have succeeded before using the +// 'dec_options' object. +static WEBP_INLINE int WebPAnimDecoderOptionsInit( + WebPAnimDecoderOptions* dec_options) { + return WebPAnimDecoderOptionsInitInternal(dec_options, + WEBP_DEMUX_ABI_VERSION); +} + +// Internal, version-checked, entry point. +WEBP_EXTERN WebPAnimDecoder* WebPAnimDecoderNewInternal( + const WebPData*, const WebPAnimDecoderOptions*, int); + +// Creates and initializes a WebPAnimDecoder object. +// Parameters: +// webp_data - (in) WebP bitstream. This should remain unchanged during the +// lifetime of the output WebPAnimDecoder object. +// dec_options - (in) decoding options. Can be passed NULL to choose +// reasonable defaults (in particular, color mode MODE_RGBA +// will be picked). +// Returns: +// A pointer to the newly created WebPAnimDecoder object, or NULL in case of +// parsing error, invalid option or memory error. +static WEBP_INLINE WebPAnimDecoder* WebPAnimDecoderNew( + const WebPData* webp_data, const WebPAnimDecoderOptions* dec_options) { + return WebPAnimDecoderNewInternal(webp_data, dec_options, + WEBP_DEMUX_ABI_VERSION); +} + +// Global information about the animation.. +struct WebPAnimInfo { + uint32_t canvas_width; + uint32_t canvas_height; + uint32_t loop_count; + uint32_t bgcolor; + uint32_t frame_count; + uint32_t pad[4]; // padding for later use +}; + +// Get global information about the animation. +// Parameters: +// dec - (in) decoder instance to get information from. +// info - (out) global information fetched from the animation. +// Returns: +// True on success. +WEBP_EXTERN int WebPAnimDecoderGetInfo(const WebPAnimDecoder* dec, + WebPAnimInfo* info); + +// Fetch the next frame from 'dec' based on options supplied to +// WebPAnimDecoderNew(). This will be a fully reconstructed canvas of size +// 'canvas_width * 4 * canvas_height', and not just the frame sub-rectangle. The +// returned buffer 'buf' is valid only until the next call to +// WebPAnimDecoderGetNext(), WebPAnimDecoderReset() or WebPAnimDecoderDelete(). +// Parameters: +// dec - (in/out) decoder instance from which the next frame is to be fetched. +// buf - (out) decoded frame. +// timestamp - (out) timestamp of the frame in milliseconds. +// Returns: +// False if any of the arguments are NULL, or if there is a parsing or +// decoding error, or if there are no more frames. Otherwise, returns true. +WEBP_EXTERN int WebPAnimDecoderGetNext(WebPAnimDecoder* dec, + uint8_t** buf, int* timestamp); + +// Check if there are more frames left to decode. +// Parameters: +// dec - (in) decoder instance to be checked. +// Returns: +// True if 'dec' is not NULL and some frames are yet to be decoded. +// Otherwise, returns false. +WEBP_EXTERN int WebPAnimDecoderHasMoreFrames(const WebPAnimDecoder* dec); + +// Resets the WebPAnimDecoder object, so that next call to +// WebPAnimDecoderGetNext() will restart decoding from 1st frame. This would be +// helpful when all frames need to be decoded multiple times (e.g. +// info.loop_count times) without destroying and recreating the 'dec' object. +// Parameters: +// dec - (in/out) decoder instance to be reset +WEBP_EXTERN void WebPAnimDecoderReset(WebPAnimDecoder* dec); + +// Grab the internal demuxer object. +// Getting the demuxer object can be useful if one wants to use operations only +// available through demuxer; e.g. to get XMP/EXIF/ICC metadata. The returned +// demuxer object is owned by 'dec' and is valid only until the next call to +// WebPAnimDecoderDelete(). +// +// Parameters: +// dec - (in) decoder instance from which the demuxer object is to be fetched. +WEBP_EXTERN const WebPDemuxer* WebPAnimDecoderGetDemuxer( + const WebPAnimDecoder* dec); + +// Deletes the WebPAnimDecoder object. +// Parameters: +// dec - (in/out) decoder instance to be deleted +WEBP_EXTERN void WebPAnimDecoderDelete(WebPAnimDecoder* dec); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // WEBP_WEBP_DEMUX_H_ diff --git a/webp/encode.h b/webp/encode.h new file mode 100644 index 0000000..56b68e2 --- /dev/null +++ b/webp/encode.h @@ -0,0 +1,552 @@ +// Copyright 2011 Google Inc. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the COPYING file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. +// ----------------------------------------------------------------------------- +// +// WebP encoder: main interface +// +// Author: Skal (pascal.massimino@gmail.com) + +#ifndef WEBP_WEBP_ENCODE_H_ +#define WEBP_WEBP_ENCODE_H_ + +#include "./types.h" + +#ifdef __cplusplus +extern "C" { +#endif + +#define WEBP_ENCODER_ABI_VERSION 0x020f // MAJOR(8b) + MINOR(8b) + +// Note: forward declaring enumerations is not allowed in (strict) C and C++, +// the types are left here for reference. +// typedef enum WebPImageHint WebPImageHint; +// typedef enum WebPEncCSP WebPEncCSP; +// typedef enum WebPPreset WebPPreset; +// typedef enum WebPEncodingError WebPEncodingError; +typedef struct WebPConfig WebPConfig; +typedef struct WebPPicture WebPPicture; // main structure for I/O +typedef struct WebPAuxStats WebPAuxStats; +typedef struct WebPMemoryWriter WebPMemoryWriter; + +// Return the encoder's version number, packed in hexadecimal using 8bits for +// each of major/minor/revision. E.g: v2.5.7 is 0x020507. +WEBP_EXTERN int WebPGetEncoderVersion(void); + +//------------------------------------------------------------------------------ +// One-stop-shop call! No questions asked: + +// Returns the size of the compressed data (pointed to by *output), or 0 if +// an error occurred. The compressed data must be released by the caller +// using the call 'WebPFree(*output)'. +// These functions compress using the lossy format, and the quality_factor +// can go from 0 (smaller output, lower quality) to 100 (best quality, +// larger output). +WEBP_EXTERN size_t WebPEncodeRGB(const uint8_t* rgb, + int width, int height, int stride, + float quality_factor, uint8_t** output); +WEBP_EXTERN size_t WebPEncodeBGR(const uint8_t* bgr, + int width, int height, int stride, + float quality_factor, uint8_t** output); +WEBP_EXTERN size_t WebPEncodeRGBA(const uint8_t* rgba, + int width, int height, int stride, + float quality_factor, uint8_t** output); +WEBP_EXTERN size_t WebPEncodeBGRA(const uint8_t* bgra, + int width, int height, int stride, + float quality_factor, uint8_t** output); + +// These functions are the equivalent of the above, but compressing in a +// lossless manner. Files are usually larger than lossy format, but will +// not suffer any compression loss. +// Note these functions, like the lossy versions, use the library's default +// settings. For lossless this means 'exact' is disabled. RGB values in +// transparent areas will be modified to improve compression. To avoid this, +// use WebPEncode() and set WebPConfig::exact to 1. +WEBP_EXTERN size_t WebPEncodeLosslessRGB(const uint8_t* rgb, + int width, int height, int stride, + uint8_t** output); +WEBP_EXTERN size_t WebPEncodeLosslessBGR(const uint8_t* bgr, + int width, int height, int stride, + uint8_t** output); +WEBP_EXTERN size_t WebPEncodeLosslessRGBA(const uint8_t* rgba, + int width, int height, int stride, + uint8_t** output); +WEBP_EXTERN size_t WebPEncodeLosslessBGRA(const uint8_t* bgra, + int width, int height, int stride, + uint8_t** output); + +//------------------------------------------------------------------------------ +// Coding parameters + +// Image characteristics hint for the underlying encoder. +typedef enum WebPImageHint { + WEBP_HINT_DEFAULT = 0, // default preset. + WEBP_HINT_PICTURE, // digital picture, like portrait, inner shot + WEBP_HINT_PHOTO, // outdoor photograph, with natural lighting + WEBP_HINT_GRAPH, // Discrete tone image (graph, map-tile etc). + WEBP_HINT_LAST +} WebPImageHint; + +// Compression parameters. +struct WebPConfig { + int lossless; // Lossless encoding (0=lossy(default), 1=lossless). + float quality; // between 0 and 100. For lossy, 0 gives the smallest + // size and 100 the largest. For lossless, this + // parameter is the amount of effort put into the + // compression: 0 is the fastest but gives larger + // files compared to the slowest, but best, 100. + int method; // quality/speed trade-off (0=fast, 6=slower-better) + + WebPImageHint image_hint; // Hint for image type (lossless only for now). + + int target_size; // if non-zero, set the desired target size in bytes. + // Takes precedence over the 'compression' parameter. + float target_PSNR; // if non-zero, specifies the minimal distortion to + // try to achieve. Takes precedence over target_size. + int segments; // maximum number of segments to use, in [1..4] + int sns_strength; // Spatial Noise Shaping. 0=off, 100=maximum. + int filter_strength; // range: [0 = off .. 100 = strongest] + int filter_sharpness; // range: [0 = off .. 7 = least sharp] + int filter_type; // filtering type: 0 = simple, 1 = strong (only used + // if filter_strength > 0 or autofilter > 0) + int autofilter; // Auto adjust filter's strength [0 = off, 1 = on] + int alpha_compression; // Algorithm for encoding the alpha plane (0 = none, + // 1 = compressed with WebP lossless). Default is 1. + int alpha_filtering; // Predictive filtering method for alpha plane. + // 0: none, 1: fast, 2: best. Default if 1. + int alpha_quality; // Between 0 (smallest size) and 100 (lossless). + // Default is 100. + int pass; // number of entropy-analysis passes (in [1..10]). + + int show_compressed; // if true, export the compressed picture back. + // In-loop filtering is not applied. + int preprocessing; // preprocessing filter: + // 0=none, 1=segment-smooth, 2=pseudo-random dithering + int partitions; // log2(number of token partitions) in [0..3]. Default + // is set to 0 for easier progressive decoding. + int partition_limit; // quality degradation allowed to fit the 512k limit + // on prediction modes coding (0: no degradation, + // 100: maximum possible degradation). + int emulate_jpeg_size; // If true, compression parameters will be remapped + // to better match the expected output size from + // JPEG compression. Generally, the output size will + // be similar but the degradation will be lower. + int thread_level; // If non-zero, try and use multi-threaded encoding. + int low_memory; // If set, reduce memory usage (but increase CPU use). + + int near_lossless; // Near lossless encoding [0 = max loss .. 100 = off + // (default)]. + int exact; // if non-zero, preserve the exact RGB values under + // transparent area. Otherwise, discard this invisible + // RGB information for better compression. The default + // value is 0. + + int use_delta_palette; // reserved for future lossless feature + int use_sharp_yuv; // if needed, use sharp (and slow) RGB->YUV conversion + + int qmin; // minimum permissible quality factor + int qmax; // maximum permissible quality factor +}; + +// Enumerate some predefined settings for WebPConfig, depending on the type +// of source picture. These presets are used when calling WebPConfigPreset(). +typedef enum WebPPreset { + WEBP_PRESET_DEFAULT = 0, // default preset. + WEBP_PRESET_PICTURE, // digital picture, like portrait, inner shot + WEBP_PRESET_PHOTO, // outdoor photograph, with natural lighting + WEBP_PRESET_DRAWING, // hand or line drawing, with high-contrast details + WEBP_PRESET_ICON, // small-sized colorful images + WEBP_PRESET_TEXT // text-like +} WebPPreset; + +// Internal, version-checked, entry point +WEBP_EXTERN int WebPConfigInitInternal(WebPConfig*, WebPPreset, float, int); + +// Should always be called, to initialize a fresh WebPConfig structure before +// modification. Returns false in case of version mismatch. WebPConfigInit() +// must have succeeded before using the 'config' object. +// Note that the default values are lossless=0 and quality=75. +static WEBP_INLINE int WebPConfigInit(WebPConfig* config) { + return WebPConfigInitInternal(config, WEBP_PRESET_DEFAULT, 75.f, + WEBP_ENCODER_ABI_VERSION); +} + +// This function will initialize the configuration according to a predefined +// set of parameters (referred to by 'preset') and a given quality factor. +// This function can be called as a replacement to WebPConfigInit(). Will +// return false in case of error. +static WEBP_INLINE int WebPConfigPreset(WebPConfig* config, + WebPPreset preset, float quality) { + return WebPConfigInitInternal(config, preset, quality, + WEBP_ENCODER_ABI_VERSION); +} + +// Activate the lossless compression mode with the desired efficiency level +// between 0 (fastest, lowest compression) and 9 (slower, best compression). +// A good default level is '6', providing a fair tradeoff between compression +// speed and final compressed size. +// This function will overwrite several fields from config: 'method', 'quality' +// and 'lossless'. Returns false in case of parameter error. +WEBP_EXTERN int WebPConfigLosslessPreset(WebPConfig* config, int level); + +// Returns true if 'config' is non-NULL and all configuration parameters are +// within their valid ranges. +WEBP_EXTERN int WebPValidateConfig(const WebPConfig* config); + +//------------------------------------------------------------------------------ +// Input / Output +// Structure for storing auxiliary statistics. + +struct WebPAuxStats { + int coded_size; // final size + + float PSNR[5]; // peak-signal-to-noise ratio for Y/U/V/All/Alpha + int block_count[3]; // number of intra4/intra16/skipped macroblocks + int header_bytes[2]; // approximate number of bytes spent for header + // and mode-partition #0 + int residual_bytes[3][4]; // approximate number of bytes spent for + // DC/AC/uv coefficients for each (0..3) segments. + int segment_size[4]; // number of macroblocks in each segments + int segment_quant[4]; // quantizer values for each segments + int segment_level[4]; // filtering strength for each segments [0..63] + + int alpha_data_size; // size of the transparency data + int layer_data_size; // size of the enhancement layer data + + // lossless encoder statistics + uint32_t lossless_features; // bit0:predictor bit1:cross-color transform + // bit2:subtract-green bit3:color indexing + int histogram_bits; // number of precision bits of histogram + int transform_bits; // precision bits for transform + int cache_bits; // number of bits for color cache lookup + int palette_size; // number of color in palette, if used + int lossless_size; // final lossless size + int lossless_hdr_size; // lossless header (transform, huffman etc) size + int lossless_data_size; // lossless image data size + + uint32_t pad[2]; // padding for later use +}; + +// Signature for output function. Should return true if writing was successful. +// data/data_size is the segment of data to write, and 'picture' is for +// reference (and so one can make use of picture->custom_ptr). +typedef int (*WebPWriterFunction)(const uint8_t* data, size_t data_size, + const WebPPicture* picture); + +// WebPMemoryWrite: a special WebPWriterFunction that writes to memory using +// the following WebPMemoryWriter object (to be set as a custom_ptr). +struct WebPMemoryWriter { + uint8_t* mem; // final buffer (of size 'max_size', larger than 'size'). + size_t size; // final size + size_t max_size; // total capacity + uint32_t pad[1]; // padding for later use +}; + +// The following must be called first before any use. +WEBP_EXTERN void WebPMemoryWriterInit(WebPMemoryWriter* writer); + +// The following must be called to deallocate writer->mem memory. The 'writer' +// object itself is not deallocated. +WEBP_EXTERN void WebPMemoryWriterClear(WebPMemoryWriter* writer); +// The custom writer to be used with WebPMemoryWriter as custom_ptr. Upon +// completion, writer.mem and writer.size will hold the coded data. +// writer.mem must be freed by calling WebPMemoryWriterClear. +WEBP_EXTERN int WebPMemoryWrite(const uint8_t* data, size_t data_size, + const WebPPicture* picture); + +// Progress hook, called from time to time to report progress. It can return +// false to request an abort of the encoding process, or true otherwise if +// everything is OK. +typedef int (*WebPProgressHook)(int percent, const WebPPicture* picture); + +// Color spaces. +typedef enum WebPEncCSP { + // chroma sampling + WEBP_YUV420 = 0, // 4:2:0 + WEBP_YUV420A = 4, // alpha channel variant + WEBP_CSP_UV_MASK = 3, // bit-mask to get the UV sampling factors + WEBP_CSP_ALPHA_BIT = 4 // bit that is set if alpha is present +} WebPEncCSP; + +// Encoding error conditions. +typedef enum WebPEncodingError { + VP8_ENC_OK = 0, + VP8_ENC_ERROR_OUT_OF_MEMORY, // memory error allocating objects + VP8_ENC_ERROR_BITSTREAM_OUT_OF_MEMORY, // memory error while flushing bits + VP8_ENC_ERROR_NULL_PARAMETER, // a pointer parameter is NULL + VP8_ENC_ERROR_INVALID_CONFIGURATION, // configuration is invalid + VP8_ENC_ERROR_BAD_DIMENSION, // picture has invalid width/height + VP8_ENC_ERROR_PARTITION0_OVERFLOW, // partition is bigger than 512k + VP8_ENC_ERROR_PARTITION_OVERFLOW, // partition is bigger than 16M + VP8_ENC_ERROR_BAD_WRITE, // error while flushing bytes + VP8_ENC_ERROR_FILE_TOO_BIG, // file is bigger than 4G + VP8_ENC_ERROR_USER_ABORT, // abort request by user + VP8_ENC_ERROR_LAST // list terminator. always last. +} WebPEncodingError; + +// maximum width/height allowed (inclusive), in pixels +#define WEBP_MAX_DIMENSION 16383 + +// Main exchange structure (input samples, output bytes, statistics) +// +// Once WebPPictureInit() has been called, it's ok to make all the INPUT fields +// (use_argb, y/u/v, argb, ...) point to user-owned data, even if +// WebPPictureAlloc() has been called. Depending on the value use_argb, +// it's guaranteed that either *argb or *y/*u/*v content will be kept untouched. +struct WebPPicture { + // INPUT + ////////////// + // Main flag for encoder selecting between ARGB or YUV input. + // It is recommended to use ARGB input (*argb, argb_stride) for lossless + // compression, and YUV input (*y, *u, *v, etc.) for lossy compression + // since these are the respective native colorspace for these formats. + int use_argb; + + // YUV input (mostly used for input to lossy compression) + WebPEncCSP colorspace; // colorspace: should be YUV420 for now (=Y'CbCr). + int width, height; // dimensions (less or equal to WEBP_MAX_DIMENSION) + uint8_t* y, *u, *v; // pointers to luma/chroma planes. + int y_stride, uv_stride; // luma/chroma strides. + uint8_t* a; // pointer to the alpha plane + int a_stride; // stride of the alpha plane + uint32_t pad1[2]; // padding for later use + + // ARGB input (mostly used for input to lossless compression) + uint32_t* argb; // Pointer to argb (32 bit) plane. + int argb_stride; // This is stride in pixels units, not bytes. + uint32_t pad2[3]; // padding for later use + + // OUTPUT + /////////////// + // Byte-emission hook, to store compressed bytes as they are ready. + WebPWriterFunction writer; // can be NULL + void* custom_ptr; // can be used by the writer. + + // map for extra information (only for lossy compression mode) + int extra_info_type; // 1: intra type, 2: segment, 3: quant + // 4: intra-16 prediction mode, + // 5: chroma prediction mode, + // 6: bit cost, 7: distortion + uint8_t* extra_info; // if not NULL, points to an array of size + // ((width + 15) / 16) * ((height + 15) / 16) that + // will be filled with a macroblock map, depending + // on extra_info_type. + + // STATS AND REPORTS + /////////////////////////// + // Pointer to side statistics (updated only if not NULL) + WebPAuxStats* stats; + + // Error code for the latest error encountered during encoding + WebPEncodingError error_code; + + // If not NULL, report progress during encoding. + WebPProgressHook progress_hook; + + void* user_data; // this field is free to be set to any value and + // used during callbacks (like progress-report e.g.). + + uint32_t pad3[3]; // padding for later use + + // Unused for now + uint8_t* pad4, *pad5; + uint32_t pad6[8]; // padding for later use + + // PRIVATE FIELDS + //////////////////// + void* memory_; // row chunk of memory for yuva planes + void* memory_argb_; // and for argb too. + void* pad7[2]; // padding for later use +}; + +// Internal, version-checked, entry point +WEBP_EXTERN int WebPPictureInitInternal(WebPPicture*, int); + +// Should always be called, to initialize the structure. Returns false in case +// of version mismatch. WebPPictureInit() must have succeeded before using the +// 'picture' object. +// Note that, by default, use_argb is false and colorspace is WEBP_YUV420. +static WEBP_INLINE int WebPPictureInit(WebPPicture* picture) { + return WebPPictureInitInternal(picture, WEBP_ENCODER_ABI_VERSION); +} + +//------------------------------------------------------------------------------ +// WebPPicture utils + +// Convenience allocation / deallocation based on picture->width/height: +// Allocate y/u/v buffers as per colorspace/width/height specification. +// Note! This function will free the previous buffer if needed. +// Returns false in case of memory error. +WEBP_EXTERN int WebPPictureAlloc(WebPPicture* picture); + +// Release the memory allocated by WebPPictureAlloc() or WebPPictureImport*(). +// Note that this function does _not_ free the memory used by the 'picture' +// object itself. +// Besides memory (which is reclaimed) all other fields of 'picture' are +// preserved. +WEBP_EXTERN void WebPPictureFree(WebPPicture* picture); + +// Copy the pixels of *src into *dst, using WebPPictureAlloc. Upon return, *dst +// will fully own the copied pixels (this is not a view). The 'dst' picture need +// not be initialized as its content is overwritten. +// Returns false in case of memory allocation error. +WEBP_EXTERN int WebPPictureCopy(const WebPPicture* src, WebPPicture* dst); + +// Compute the single distortion for packed planes of samples. +// 'src' will be compared to 'ref', and the raw distortion stored into +// '*distortion'. The refined metric (log(MSE), log(1 - ssim),...' will be +// stored in '*result'. +// 'x_step' is the horizontal stride (in bytes) between samples. +// 'src/ref_stride' is the byte distance between rows. +// Returns false in case of error (bad parameter, memory allocation error, ...). +WEBP_EXTERN int WebPPlaneDistortion(const uint8_t* src, size_t src_stride, + const uint8_t* ref, size_t ref_stride, + int width, int height, + size_t x_step, + int type, // 0 = PSNR, 1 = SSIM, 2 = LSIM + float* distortion, float* result); + +// Compute PSNR, SSIM or LSIM distortion metric between two pictures. Results +// are in dB, stored in result[] in the B/G/R/A/All order. The distortion is +// always performed using ARGB samples. Hence if the input is YUV(A), the +// picture will be internally converted to ARGB (just for the measurement). +// Warning: this function is rather CPU-intensive. +WEBP_EXTERN int WebPPictureDistortion( + const WebPPicture* src, const WebPPicture* ref, + int metric_type, // 0 = PSNR, 1 = SSIM, 2 = LSIM + float result[5]); + +// self-crops a picture to the rectangle defined by top/left/width/height. +// Returns false in case of memory allocation error, or if the rectangle is +// outside of the source picture. +// The rectangle for the view is defined by the top-left corner pixel +// coordinates (left, top) as well as its width and height. This rectangle +// must be fully be comprised inside the 'src' source picture. If the source +// picture uses the YUV420 colorspace, the top and left coordinates will be +// snapped to even values. +WEBP_EXTERN int WebPPictureCrop(WebPPicture* picture, + int left, int top, int width, int height); + +// Extracts a view from 'src' picture into 'dst'. The rectangle for the view +// is defined by the top-left corner pixel coordinates (left, top) as well +// as its width and height. This rectangle must be fully be comprised inside +// the 'src' source picture. If the source picture uses the YUV420 colorspace, +// the top and left coordinates will be snapped to even values. +// Picture 'src' must out-live 'dst' picture. Self-extraction of view is allowed +// ('src' equal to 'dst') as a mean of fast-cropping (but note that doing so, +// the original dimension will be lost). Picture 'dst' need not be initialized +// with WebPPictureInit() if it is different from 'src', since its content will +// be overwritten. +// Returns false in case of invalid parameters. +WEBP_EXTERN int WebPPictureView(const WebPPicture* src, + int left, int top, int width, int height, + WebPPicture* dst); + +// Returns true if the 'picture' is actually a view and therefore does +// not own the memory for pixels. +WEBP_EXTERN int WebPPictureIsView(const WebPPicture* picture); + +// Rescale a picture to new dimension width x height. +// If either 'width' or 'height' (but not both) is 0 the corresponding +// dimension will be calculated preserving the aspect ratio. +// No gamma correction is applied. +// Returns false in case of error (invalid parameter or insufficient memory). +WEBP_EXTERN int WebPPictureRescale(WebPPicture* picture, int width, int height); + +// Colorspace conversion function to import RGB samples. +// Previous buffer will be free'd, if any. +// *rgb buffer should have a size of at least height * rgb_stride. +// Returns false in case of memory error. +WEBP_EXTERN int WebPPictureImportRGB( + WebPPicture* picture, const uint8_t* rgb, int rgb_stride); +// Same, but for RGBA buffer. +WEBP_EXTERN int WebPPictureImportRGBA( + WebPPicture* picture, const uint8_t* rgba, int rgba_stride); +// Same, but for RGBA buffer. Imports the RGB direct from the 32-bit format +// input buffer ignoring the alpha channel. Avoids needing to copy the data +// to a temporary 24-bit RGB buffer to import the RGB only. +WEBP_EXTERN int WebPPictureImportRGBX( + WebPPicture* picture, const uint8_t* rgbx, int rgbx_stride); + +// Variants of the above, but taking BGR(A|X) input. +WEBP_EXTERN int WebPPictureImportBGR( + WebPPicture* picture, const uint8_t* bgr, int bgr_stride); +WEBP_EXTERN int WebPPictureImportBGRA( + WebPPicture* picture, const uint8_t* bgra, int bgra_stride); +WEBP_EXTERN int WebPPictureImportBGRX( + WebPPicture* picture, const uint8_t* bgrx, int bgrx_stride); + +// Converts picture->argb data to the YUV420A format. The 'colorspace' +// parameter is deprecated and should be equal to WEBP_YUV420. +// Upon return, picture->use_argb is set to false. The presence of real +// non-opaque transparent values is detected, and 'colorspace' will be +// adjusted accordingly. Note that this method is lossy. +// Returns false in case of error. +WEBP_EXTERN int WebPPictureARGBToYUVA(WebPPicture* picture, + WebPEncCSP /*colorspace = WEBP_YUV420*/); + +// Same as WebPPictureARGBToYUVA(), but the conversion is done using +// pseudo-random dithering with a strength 'dithering' between +// 0.0 (no dithering) and 1.0 (maximum dithering). This is useful +// for photographic picture. +WEBP_EXTERN int WebPPictureARGBToYUVADithered( + WebPPicture* picture, WebPEncCSP colorspace, float dithering); + +// Performs 'sharp' RGBA->YUVA420 downsampling and colorspace conversion. +// Downsampling is handled with extra care in case of color clipping. This +// method is roughly 2x slower than WebPPictureARGBToYUVA() but produces better +// and sharper YUV representation. +// Returns false in case of error. +WEBP_EXTERN int WebPPictureSharpARGBToYUVA(WebPPicture* picture); +// kept for backward compatibility: +WEBP_EXTERN int WebPPictureSmartARGBToYUVA(WebPPicture* picture); + +// Converts picture->yuv to picture->argb and sets picture->use_argb to true. +// The input format must be YUV_420 or YUV_420A. The conversion from YUV420 to +// ARGB incurs a small loss too. +// Note that the use of this colorspace is discouraged if one has access to the +// raw ARGB samples, since using YUV420 is comparatively lossy. +// Returns false in case of error. +WEBP_EXTERN int WebPPictureYUVAToARGB(WebPPicture* picture); + +// Helper function: given a width x height plane of RGBA or YUV(A) samples +// clean-up or smoothen the YUV or RGB samples under fully transparent area, +// to help compressibility (no guarantee, though). +WEBP_EXTERN void WebPCleanupTransparentArea(WebPPicture* picture); + +// Scan the picture 'picture' for the presence of non fully opaque alpha values. +// Returns true in such case. Otherwise returns false (indicating that the +// alpha plane can be ignored altogether e.g.). +WEBP_EXTERN int WebPPictureHasTransparency(const WebPPicture* picture); + +// Remove the transparency information (if present) by blending the color with +// the background color 'background_rgb' (specified as 24bit RGB triplet). +// After this call, all alpha values are reset to 0xff. +WEBP_EXTERN void WebPBlendAlpha(WebPPicture* picture, uint32_t background_rgb); + +//------------------------------------------------------------------------------ +// Main call + +// Main encoding call, after config and picture have been initialized. +// 'picture' must be less than 16384x16384 in dimension (cf WEBP_MAX_DIMENSION), +// and the 'config' object must be a valid one. +// Returns false in case of error, true otherwise. +// In case of error, picture->error_code is updated accordingly. +// 'picture' can hold the source samples in both YUV(A) or ARGB input, depending +// on the value of 'picture->use_argb'. It is highly recommended to use +// the former for lossy encoding, and the latter for lossless encoding +// (when config.lossless is true). Automatic conversion from one format to +// another is provided but they both incur some loss. +WEBP_EXTERN int WebPEncode(const WebPConfig* config, WebPPicture* picture); + +//------------------------------------------------------------------------------ + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // WEBP_WEBP_ENCODE_H_ diff --git a/webp/format_constants.h b/webp/format_constants.h new file mode 100644 index 0000000..eca6981 --- /dev/null +++ b/webp/format_constants.h @@ -0,0 +1,87 @@ +// Copyright 2012 Google Inc. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the COPYING file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. +// ----------------------------------------------------------------------------- +// +// Internal header for constants related to WebP file format. +// +// Author: Urvang (urvang@google.com) + +#ifndef WEBP_WEBP_FORMAT_CONSTANTS_H_ +#define WEBP_WEBP_FORMAT_CONSTANTS_H_ + +// Create fourcc of the chunk from the chunk tag characters. +#define MKFOURCC(a, b, c, d) ((a) | (b) << 8 | (c) << 16 | (uint32_t)(d) << 24) + +// VP8 related constants. +#define VP8_SIGNATURE 0x9d012a // Signature in VP8 data. +#define VP8_MAX_PARTITION0_SIZE (1 << 19) // max size of mode partition +#define VP8_MAX_PARTITION_SIZE (1 << 24) // max size for token partition +#define VP8_FRAME_HEADER_SIZE 10 // Size of the frame header within VP8 data. + +// VP8L related constants. +#define VP8L_SIGNATURE_SIZE 1 // VP8L signature size. +#define VP8L_MAGIC_BYTE 0x2f // VP8L signature byte. +#define VP8L_IMAGE_SIZE_BITS 14 // Number of bits used to store + // width and height. +#define VP8L_VERSION_BITS 3 // 3 bits reserved for version. +#define VP8L_VERSION 0 // version 0 +#define VP8L_FRAME_HEADER_SIZE 5 // Size of the VP8L frame header. + +#define MAX_PALETTE_SIZE 256 +#define MAX_CACHE_BITS 11 +#define HUFFMAN_CODES_PER_META_CODE 5 +#define ARGB_BLACK 0xff000000 + +#define DEFAULT_CODE_LENGTH 8 +#define MAX_ALLOWED_CODE_LENGTH 15 + +#define NUM_LITERAL_CODES 256 +#define NUM_LENGTH_CODES 24 +#define NUM_DISTANCE_CODES 40 +#define CODE_LENGTH_CODES 19 + +#define MIN_HUFFMAN_BITS 2 // min number of Huffman bits +#define MAX_HUFFMAN_BITS 9 // max number of Huffman bits + +#define TRANSFORM_PRESENT 1 // The bit to be written when next data + // to be read is a transform. +#define NUM_TRANSFORMS 4 // Maximum number of allowed transform + // in a bitstream. +typedef enum { + PREDICTOR_TRANSFORM = 0, + CROSS_COLOR_TRANSFORM = 1, + SUBTRACT_GREEN = 2, + COLOR_INDEXING_TRANSFORM = 3 +} VP8LImageTransformType; + +// Alpha related constants. +#define ALPHA_HEADER_LEN 1 +#define ALPHA_NO_COMPRESSION 0 +#define ALPHA_LOSSLESS_COMPRESSION 1 +#define ALPHA_PREPROCESSED_LEVELS 1 + +// Mux related constants. +#define TAG_SIZE 4 // Size of a chunk tag (e.g. "VP8L"). +#define CHUNK_SIZE_BYTES 4 // Size needed to store chunk's size. +#define CHUNK_HEADER_SIZE 8 // Size of a chunk header. +#define RIFF_HEADER_SIZE 12 // Size of the RIFF header ("RIFFnnnnWEBP"). +#define ANMF_CHUNK_SIZE 16 // Size of an ANMF chunk. +#define ANIM_CHUNK_SIZE 6 // Size of an ANIM chunk. +#define VP8X_CHUNK_SIZE 10 // Size of a VP8X chunk. + +#define MAX_CANVAS_SIZE (1 << 24) // 24-bit max for VP8X width/height. +#define MAX_IMAGE_AREA (1ULL << 32) // 32-bit max for width x height. +#define MAX_LOOP_COUNT (1 << 16) // maximum value for loop-count +#define MAX_DURATION (1 << 24) // maximum duration +#define MAX_POSITION_OFFSET (1 << 24) // maximum frame x/y offset + +// Maximum chunk payload is such that adding the header and padding won't +// overflow a uint32_t. +#define MAX_CHUNK_PAYLOAD (~0U - CHUNK_HEADER_SIZE - 1) + +#endif // WEBP_WEBP_FORMAT_CONSTANTS_H_ diff --git a/webp/mux.h b/webp/mux.h new file mode 100644 index 0000000..7d27489 --- /dev/null +++ b/webp/mux.h @@ -0,0 +1,530 @@ +// Copyright 2011 Google Inc. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the COPYING file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. +// ----------------------------------------------------------------------------- +// +// RIFF container manipulation and encoding for WebP images. +// +// Authors: Urvang (urvang@google.com) +// Vikas (vikasa@google.com) + +#ifndef WEBP_WEBP_MUX_H_ +#define WEBP_WEBP_MUX_H_ + +#include "./mux_types.h" + +#ifdef __cplusplus +extern "C" { +#endif + +#define WEBP_MUX_ABI_VERSION 0x0108 // MAJOR(8b) + MINOR(8b) + +//------------------------------------------------------------------------------ +// Mux API +// +// This API allows manipulation of WebP container images containing features +// like color profile, metadata, animation. +// +// Code Example#1: Create a WebPMux object with image data, color profile and +// XMP metadata. +/* + int copy_data = 0; + WebPMux* mux = WebPMuxNew(); + // ... (Prepare image data). + WebPMuxSetImage(mux, &image, copy_data); + // ... (Prepare ICCP color profile data). + WebPMuxSetChunk(mux, "ICCP", &icc_profile, copy_data); + // ... (Prepare XMP metadata). + WebPMuxSetChunk(mux, "XMP ", &xmp, copy_data); + // Get data from mux in WebP RIFF format. + WebPMuxAssemble(mux, &output_data); + WebPMuxDelete(mux); + // ... (Consume output_data; e.g. write output_data.bytes to file). + WebPDataClear(&output_data); +*/ + +// Code Example#2: Get image and color profile data from a WebP file. +/* + int copy_data = 0; + // ... (Read data from file). + WebPMux* mux = WebPMuxCreate(&data, copy_data); + WebPMuxGetFrame(mux, 1, &image); + // ... (Consume image; e.g. call WebPDecode() to decode the data). + WebPMuxGetChunk(mux, "ICCP", &icc_profile); + // ... (Consume icc_data). + WebPMuxDelete(mux); + WebPFree(data); +*/ + +// Note: forward declaring enumerations is not allowed in (strict) C and C++, +// the types are left here for reference. +// typedef enum WebPMuxError WebPMuxError; +// typedef enum WebPChunkId WebPChunkId; +typedef struct WebPMux WebPMux; // main opaque object. +typedef struct WebPMuxFrameInfo WebPMuxFrameInfo; +typedef struct WebPMuxAnimParams WebPMuxAnimParams; +typedef struct WebPAnimEncoderOptions WebPAnimEncoderOptions; + +// Error codes +typedef enum WebPMuxError { + WEBP_MUX_OK = 1, + WEBP_MUX_NOT_FOUND = 0, + WEBP_MUX_INVALID_ARGUMENT = -1, + WEBP_MUX_BAD_DATA = -2, + WEBP_MUX_MEMORY_ERROR = -3, + WEBP_MUX_NOT_ENOUGH_DATA = -4 +} WebPMuxError; + +// IDs for different types of chunks. +typedef enum WebPChunkId { + WEBP_CHUNK_VP8X, // VP8X + WEBP_CHUNK_ICCP, // ICCP + WEBP_CHUNK_ANIM, // ANIM + WEBP_CHUNK_ANMF, // ANMF + WEBP_CHUNK_DEPRECATED, // (deprecated from FRGM) + WEBP_CHUNK_ALPHA, // ALPH + WEBP_CHUNK_IMAGE, // VP8/VP8L + WEBP_CHUNK_EXIF, // EXIF + WEBP_CHUNK_XMP, // XMP + WEBP_CHUNK_UNKNOWN, // Other chunks. + WEBP_CHUNK_NIL +} WebPChunkId; + +//------------------------------------------------------------------------------ + +// Returns the version number of the mux library, packed in hexadecimal using +// 8bits for each of major/minor/revision. E.g: v2.5.7 is 0x020507. +WEBP_EXTERN int WebPGetMuxVersion(void); + +//------------------------------------------------------------------------------ +// Life of a Mux object + +// Internal, version-checked, entry point +WEBP_EXTERN WebPMux* WebPNewInternal(int); + +// Creates an empty mux object. +// Returns: +// A pointer to the newly created empty mux object. +// Or NULL in case of memory error. +static WEBP_INLINE WebPMux* WebPMuxNew(void) { + return WebPNewInternal(WEBP_MUX_ABI_VERSION); +} + +// Deletes the mux object. +// Parameters: +// mux - (in/out) object to be deleted +WEBP_EXTERN void WebPMuxDelete(WebPMux* mux); + +//------------------------------------------------------------------------------ +// Mux creation. + +// Internal, version-checked, entry point +WEBP_EXTERN WebPMux* WebPMuxCreateInternal(const WebPData*, int, int); + +// Creates a mux object from raw data given in WebP RIFF format. +// Parameters: +// bitstream - (in) the bitstream data in WebP RIFF format +// copy_data - (in) value 1 indicates given data WILL be copied to the mux +// object and value 0 indicates data will NOT be copied. +// Returns: +// A pointer to the mux object created from given data - on success. +// NULL - In case of invalid data or memory error. +static WEBP_INLINE WebPMux* WebPMuxCreate(const WebPData* bitstream, + int copy_data) { + return WebPMuxCreateInternal(bitstream, copy_data, WEBP_MUX_ABI_VERSION); +} + +//------------------------------------------------------------------------------ +// Non-image chunks. + +// Note: Only non-image related chunks should be managed through chunk APIs. +// (Image related chunks are: "ANMF", "VP8 ", "VP8L" and "ALPH"). +// To add, get and delete images, use WebPMuxSetImage(), WebPMuxPushFrame(), +// WebPMuxGetFrame() and WebPMuxDeleteFrame(). + +// Adds a chunk with id 'fourcc' and data 'chunk_data' in the mux object. +// Any existing chunk(s) with the same id will be removed. +// Parameters: +// mux - (in/out) object to which the chunk is to be added +// fourcc - (in) a character array containing the fourcc of the given chunk; +// e.g., "ICCP", "XMP ", "EXIF" etc. +// chunk_data - (in) the chunk data to be added +// copy_data - (in) value 1 indicates given data WILL be copied to the mux +// object and value 0 indicates data will NOT be copied. +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux, fourcc or chunk_data is NULL +// or if fourcc corresponds to an image chunk. +// WEBP_MUX_MEMORY_ERROR - on memory allocation error. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxSetChunk( + WebPMux* mux, const char fourcc[4], const WebPData* chunk_data, + int copy_data); + +// Gets a reference to the data of the chunk with id 'fourcc' in the mux object. +// The caller should NOT free the returned data. +// Parameters: +// mux - (in) object from which the chunk data is to be fetched +// fourcc - (in) a character array containing the fourcc of the chunk; +// e.g., "ICCP", "XMP ", "EXIF" etc. +// chunk_data - (out) returned chunk data +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux, fourcc or chunk_data is NULL +// or if fourcc corresponds to an image chunk. +// WEBP_MUX_NOT_FOUND - If mux does not contain a chunk with the given id. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxGetChunk( + const WebPMux* mux, const char fourcc[4], WebPData* chunk_data); + +// Deletes the chunk with the given 'fourcc' from the mux object. +// Parameters: +// mux - (in/out) object from which the chunk is to be deleted +// fourcc - (in) a character array containing the fourcc of the chunk; +// e.g., "ICCP", "XMP ", "EXIF" etc. +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux or fourcc is NULL +// or if fourcc corresponds to an image chunk. +// WEBP_MUX_NOT_FOUND - If mux does not contain a chunk with the given fourcc. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxDeleteChunk( + WebPMux* mux, const char fourcc[4]); + +//------------------------------------------------------------------------------ +// Images. + +// Encapsulates data about a single frame. +struct WebPMuxFrameInfo { + WebPData bitstream; // image data: can be a raw VP8/VP8L bitstream + // or a single-image WebP file. + int x_offset; // x-offset of the frame. + int y_offset; // y-offset of the frame. + int duration; // duration of the frame (in milliseconds). + + WebPChunkId id; // frame type: should be one of WEBP_CHUNK_ANMF + // or WEBP_CHUNK_IMAGE + WebPMuxAnimDispose dispose_method; // Disposal method for the frame. + WebPMuxAnimBlend blend_method; // Blend operation for the frame. + uint32_t pad[1]; // padding for later use +}; + +// Sets the (non-animated) image in the mux object. +// Note: Any existing images (including frames) will be removed. +// Parameters: +// mux - (in/out) object in which the image is to be set +// bitstream - (in) can be a raw VP8/VP8L bitstream or a single-image +// WebP file (non-animated) +// copy_data - (in) value 1 indicates given data WILL be copied to the mux +// object and value 0 indicates data will NOT be copied. +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux is NULL or bitstream is NULL. +// WEBP_MUX_MEMORY_ERROR - on memory allocation error. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxSetImage( + WebPMux* mux, const WebPData* bitstream, int copy_data); + +// Adds a frame at the end of the mux object. +// Notes: (1) frame.id should be WEBP_CHUNK_ANMF +// (2) For setting a non-animated image, use WebPMuxSetImage() instead. +// (3) Type of frame being pushed must be same as the frames in mux. +// (4) As WebP only supports even offsets, any odd offset will be snapped +// to an even location using: offset &= ~1 +// Parameters: +// mux - (in/out) object to which the frame is to be added +// frame - (in) frame data. +// copy_data - (in) value 1 indicates given data WILL be copied to the mux +// object and value 0 indicates data will NOT be copied. +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux or frame is NULL +// or if content of 'frame' is invalid. +// WEBP_MUX_MEMORY_ERROR - on memory allocation error. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxPushFrame( + WebPMux* mux, const WebPMuxFrameInfo* frame, int copy_data); + +// Gets the nth frame from the mux object. +// The content of 'frame->bitstream' is allocated using WebPMalloc(), and NOT +// owned by the 'mux' object. It MUST be deallocated by the caller by calling +// WebPDataClear(). +// nth=0 has a special meaning - last position. +// Parameters: +// mux - (in) object from which the info is to be fetched +// nth - (in) index of the frame in the mux object +// frame - (out) data of the returned frame +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux or frame is NULL. +// WEBP_MUX_NOT_FOUND - if there are less than nth frames in the mux object. +// WEBP_MUX_BAD_DATA - if nth frame chunk in mux is invalid. +// WEBP_MUX_MEMORY_ERROR - on memory allocation error. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxGetFrame( + const WebPMux* mux, uint32_t nth, WebPMuxFrameInfo* frame); + +// Deletes a frame from the mux object. +// nth=0 has a special meaning - last position. +// Parameters: +// mux - (in/out) object from which a frame is to be deleted +// nth - (in) The position from which the frame is to be deleted +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux is NULL. +// WEBP_MUX_NOT_FOUND - If there are less than nth frames in the mux object +// before deletion. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxDeleteFrame(WebPMux* mux, uint32_t nth); + +//------------------------------------------------------------------------------ +// Animation. + +// Animation parameters. +struct WebPMuxAnimParams { + uint32_t bgcolor; // Background color of the canvas stored (in MSB order) as: + // Bits 00 to 07: Alpha. + // Bits 08 to 15: Red. + // Bits 16 to 23: Green. + // Bits 24 to 31: Blue. + int loop_count; // Number of times to repeat the animation [0 = infinite]. +}; + +// Sets the animation parameters in the mux object. Any existing ANIM chunks +// will be removed. +// Parameters: +// mux - (in/out) object in which ANIM chunk is to be set/added +// params - (in) animation parameters. +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux or params is NULL. +// WEBP_MUX_MEMORY_ERROR - on memory allocation error. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxSetAnimationParams( + WebPMux* mux, const WebPMuxAnimParams* params); + +// Gets the animation parameters from the mux object. +// Parameters: +// mux - (in) object from which the animation parameters to be fetched +// params - (out) animation parameters extracted from the ANIM chunk +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux or params is NULL. +// WEBP_MUX_NOT_FOUND - if ANIM chunk is not present in mux object. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxGetAnimationParams( + const WebPMux* mux, WebPMuxAnimParams* params); + +//------------------------------------------------------------------------------ +// Misc Utilities. + +// Sets the canvas size for the mux object. The width and height can be +// specified explicitly or left as zero (0, 0). +// * When width and height are specified explicitly, then this frame bound is +// enforced during subsequent calls to WebPMuxAssemble() and an error is +// reported if any animated frame does not completely fit within the canvas. +// * When unspecified (0, 0), the constructed canvas will get the frame bounds +// from the bounding-box over all frames after calling WebPMuxAssemble(). +// Parameters: +// mux - (in) object to which the canvas size is to be set +// width - (in) canvas width +// height - (in) canvas height +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux is NULL; or +// width or height are invalid or out of bounds +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxSetCanvasSize(WebPMux* mux, + int width, int height); + +// Gets the canvas size from the mux object. +// Note: This method assumes that the VP8X chunk, if present, is up-to-date. +// That is, the mux object hasn't been modified since the last call to +// WebPMuxAssemble() or WebPMuxCreate(). +// Parameters: +// mux - (in) object from which the canvas size is to be fetched +// width - (out) canvas width +// height - (out) canvas height +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux, width or height is NULL. +// WEBP_MUX_BAD_DATA - if VP8X/VP8/VP8L chunk or canvas size is invalid. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxGetCanvasSize(const WebPMux* mux, + int* width, int* height); + +// Gets the feature flags from the mux object. +// Note: This method assumes that the VP8X chunk, if present, is up-to-date. +// That is, the mux object hasn't been modified since the last call to +// WebPMuxAssemble() or WebPMuxCreate(). +// Parameters: +// mux - (in) object from which the features are to be fetched +// flags - (out) the flags specifying which features are present in the +// mux object. This will be an OR of various flag values. +// Enum 'WebPFeatureFlags' can be used to test individual flag values. +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux or flags is NULL. +// WEBP_MUX_BAD_DATA - if VP8X/VP8/VP8L chunk or canvas size is invalid. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxGetFeatures(const WebPMux* mux, + uint32_t* flags); + +// Gets number of chunks with the given 'id' in the mux object. +// Parameters: +// mux - (in) object from which the info is to be fetched +// id - (in) chunk id specifying the type of chunk +// num_elements - (out) number of chunks with the given chunk id +// Returns: +// WEBP_MUX_INVALID_ARGUMENT - if mux, or num_elements is NULL. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxNumChunks(const WebPMux* mux, + WebPChunkId id, int* num_elements); + +// Assembles all chunks in WebP RIFF format and returns in 'assembled_data'. +// This function also validates the mux object. +// Note: The content of 'assembled_data' will be ignored and overwritten. +// Also, the content of 'assembled_data' is allocated using WebPMalloc(), and +// NOT owned by the 'mux' object. It MUST be deallocated by the caller by +// calling WebPDataClear(). It's always safe to call WebPDataClear() upon +// return, even in case of error. +// Parameters: +// mux - (in/out) object whose chunks are to be assembled +// assembled_data - (out) assembled WebP data +// Returns: +// WEBP_MUX_BAD_DATA - if mux object is invalid. +// WEBP_MUX_INVALID_ARGUMENT - if mux or assembled_data is NULL. +// WEBP_MUX_MEMORY_ERROR - on memory allocation error. +// WEBP_MUX_OK - on success. +WEBP_EXTERN WebPMuxError WebPMuxAssemble(WebPMux* mux, + WebPData* assembled_data); + +//------------------------------------------------------------------------------ +// WebPAnimEncoder API +// +// This API allows encoding (possibly) animated WebP images. +// +// Code Example: +/* + WebPAnimEncoderOptions enc_options; + WebPAnimEncoderOptionsInit(&enc_options); + // Tune 'enc_options' as needed. + WebPAnimEncoder* enc = WebPAnimEncoderNew(width, height, &enc_options); + while() { + WebPConfig config; + WebPConfigInit(&config); + // Tune 'config' as needed. + WebPAnimEncoderAdd(enc, frame, timestamp_ms, &config); + } + WebPAnimEncoderAdd(enc, NULL, timestamp_ms, NULL); + WebPAnimEncoderAssemble(enc, webp_data); + WebPAnimEncoderDelete(enc); + // Write the 'webp_data' to a file, or re-mux it further. +*/ + +typedef struct WebPAnimEncoder WebPAnimEncoder; // Main opaque object. + +// Forward declarations. Defined in encode.h. +struct WebPPicture; +struct WebPConfig; + +// Global options. +struct WebPAnimEncoderOptions { + WebPMuxAnimParams anim_params; // Animation parameters. + int minimize_size; // If true, minimize the output size (slow). Implicitly + // disables key-frame insertion. + int kmin; + int kmax; // Minimum and maximum distance between consecutive key + // frames in the output. The library may insert some key + // frames as needed to satisfy this criteria. + // Note that these conditions should hold: kmax > kmin + // and kmin >= kmax / 2 + 1. Also, if kmax <= 0, then + // key-frame insertion is disabled; and if kmax == 1, + // then all frames will be key-frames (kmin value does + // not matter for these special cases). + int allow_mixed; // If true, use mixed compression mode; may choose + // either lossy and lossless for each frame. + int verbose; // If true, print info and warning messages to stderr. + + uint32_t padding[4]; // Padding for later use. +}; + +// Internal, version-checked, entry point. +WEBP_EXTERN int WebPAnimEncoderOptionsInitInternal( + WebPAnimEncoderOptions*, int); + +// Should always be called, to initialize a fresh WebPAnimEncoderOptions +// structure before modification. Returns false in case of version mismatch. +// WebPAnimEncoderOptionsInit() must have succeeded before using the +// 'enc_options' object. +static WEBP_INLINE int WebPAnimEncoderOptionsInit( + WebPAnimEncoderOptions* enc_options) { + return WebPAnimEncoderOptionsInitInternal(enc_options, WEBP_MUX_ABI_VERSION); +} + +// Internal, version-checked, entry point. +WEBP_EXTERN WebPAnimEncoder* WebPAnimEncoderNewInternal( + int, int, const WebPAnimEncoderOptions*, int); + +// Creates and initializes a WebPAnimEncoder object. +// Parameters: +// width/height - (in) canvas width and height of the animation. +// enc_options - (in) encoding options; can be passed NULL to pick +// reasonable defaults. +// Returns: +// A pointer to the newly created WebPAnimEncoder object. +// Or NULL in case of memory error. +static WEBP_INLINE WebPAnimEncoder* WebPAnimEncoderNew( + int width, int height, const WebPAnimEncoderOptions* enc_options) { + return WebPAnimEncoderNewInternal(width, height, enc_options, + WEBP_MUX_ABI_VERSION); +} + +// Optimize the given frame for WebP, encode it and add it to the +// WebPAnimEncoder object. +// The last call to 'WebPAnimEncoderAdd' should be with frame = NULL, which +// indicates that no more frames are to be added. This call is also used to +// determine the duration of the last frame. +// Parameters: +// enc - (in/out) object to which the frame is to be added. +// frame - (in/out) frame data in ARGB or YUV(A) format. If it is in YUV(A) +// format, it will be converted to ARGB, which incurs a small loss. +// timestamp_ms - (in) timestamp of this frame in milliseconds. +// Duration of a frame would be calculated as +// "timestamp of next frame - timestamp of this frame". +// Hence, timestamps should be in non-decreasing order. +// config - (in) encoding options; can be passed NULL to pick +// reasonable defaults. +// Returns: +// On error, returns false and frame->error_code is set appropriately. +// Otherwise, returns true. +WEBP_EXTERN int WebPAnimEncoderAdd( + WebPAnimEncoder* enc, struct WebPPicture* frame, int timestamp_ms, + const struct WebPConfig* config); + +// Assemble all frames added so far into a WebP bitstream. +// This call should be preceded by a call to 'WebPAnimEncoderAdd' with +// frame = NULL; if not, the duration of the last frame will be internally +// estimated. +// Parameters: +// enc - (in/out) object from which the frames are to be assembled. +// webp_data - (out) generated WebP bitstream. +// Returns: +// True on success. +WEBP_EXTERN int WebPAnimEncoderAssemble(WebPAnimEncoder* enc, + WebPData* webp_data); + +// Get error string corresponding to the most recent call using 'enc'. The +// returned string is owned by 'enc' and is valid only until the next call to +// WebPAnimEncoderAdd() or WebPAnimEncoderAssemble() or WebPAnimEncoderDelete(). +// Parameters: +// enc - (in/out) object from which the error string is to be fetched. +// Returns: +// NULL if 'enc' is NULL. Otherwise, returns the error string if the last call +// to 'enc' had an error, or an empty string if the last call was a success. +WEBP_EXTERN const char* WebPAnimEncoderGetError(WebPAnimEncoder* enc); + +// Deletes the WebPAnimEncoder object. +// Parameters: +// enc - (in/out) object to be deleted +WEBP_EXTERN void WebPAnimEncoderDelete(WebPAnimEncoder* enc); + +//------------------------------------------------------------------------------ + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // WEBP_WEBP_MUX_H_ diff --git a/webp/mux_types.h b/webp/mux_types.h new file mode 100644 index 0000000..2fe8195 --- /dev/null +++ b/webp/mux_types.h @@ -0,0 +1,98 @@ +// Copyright 2012 Google Inc. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the COPYING file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. +// ----------------------------------------------------------------------------- +// +// Data-types common to the mux and demux libraries. +// +// Author: Urvang (urvang@google.com) + +#ifndef WEBP_WEBP_MUX_TYPES_H_ +#define WEBP_WEBP_MUX_TYPES_H_ + +#include // memset() +#include "./types.h" + +#ifdef __cplusplus +extern "C" { +#endif + +// Note: forward declaring enumerations is not allowed in (strict) C and C++, +// the types are left here for reference. +// typedef enum WebPFeatureFlags WebPFeatureFlags; +// typedef enum WebPMuxAnimDispose WebPMuxAnimDispose; +// typedef enum WebPMuxAnimBlend WebPMuxAnimBlend; +typedef struct WebPData WebPData; + +// VP8X Feature Flags. +typedef enum WebPFeatureFlags { + ANIMATION_FLAG = 0x00000002, + XMP_FLAG = 0x00000004, + EXIF_FLAG = 0x00000008, + ALPHA_FLAG = 0x00000010, + ICCP_FLAG = 0x00000020, + + ALL_VALID_FLAGS = 0x0000003e +} WebPFeatureFlags; + +// Dispose method (animation only). Indicates how the area used by the current +// frame is to be treated before rendering the next frame on the canvas. +typedef enum WebPMuxAnimDispose { + WEBP_MUX_DISPOSE_NONE, // Do not dispose. + WEBP_MUX_DISPOSE_BACKGROUND // Dispose to background color. +} WebPMuxAnimDispose; + +// Blend operation (animation only). Indicates how transparent pixels of the +// current frame are blended with those of the previous canvas. +typedef enum WebPMuxAnimBlend { + WEBP_MUX_BLEND, // Blend. + WEBP_MUX_NO_BLEND // Do not blend. +} WebPMuxAnimBlend; + +// Data type used to describe 'raw' data, e.g., chunk data +// (ICC profile, metadata) and WebP compressed image data. +// 'bytes' memory must be allocated using WebPMalloc() and such. +struct WebPData { + const uint8_t* bytes; + size_t size; +}; + +// Initializes the contents of the 'webp_data' object with default values. +static WEBP_INLINE void WebPDataInit(WebPData* webp_data) { + if (webp_data != NULL) { + memset(webp_data, 0, sizeof(*webp_data)); + } +} + +// Clears the contents of the 'webp_data' object by calling WebPFree(). +// Does not deallocate the object itself. +static WEBP_INLINE void WebPDataClear(WebPData* webp_data) { + if (webp_data != NULL) { + WebPFree((void*)webp_data->bytes); + WebPDataInit(webp_data); + } +} + +// Allocates necessary storage for 'dst' and copies the contents of 'src'. +// Returns true on success. +static WEBP_INLINE int WebPDataCopy(const WebPData* src, WebPData* dst) { + if (src == NULL || dst == NULL) return 0; + WebPDataInit(dst); + if (src->bytes != NULL && src->size != 0) { + dst->bytes = (uint8_t*)WebPMalloc(src->size); + if (dst->bytes == NULL) return 0; + memcpy((void*)dst->bytes, src->bytes, src->size); + dst->size = src->size; + } + return 1; +} + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // WEBP_WEBP_MUX_TYPES_H_ diff --git a/webp/types.h b/webp/types.h new file mode 100644 index 0000000..47f7f2b --- /dev/null +++ b/webp/types.h @@ -0,0 +1,68 @@ +// Copyright 2010 Google Inc. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the COPYING file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. +// ----------------------------------------------------------------------------- +// +// Common types + memory wrappers +// +// Author: Skal (pascal.massimino@gmail.com) + +#ifndef WEBP_WEBP_TYPES_H_ +#define WEBP_WEBP_TYPES_H_ + +#include // for size_t + +#ifndef _MSC_VER +#include +#if defined(__cplusplus) || !defined(__STRICT_ANSI__) || \ + (defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L) +#define WEBP_INLINE inline +#else +#define WEBP_INLINE +#endif +#else +typedef signed char int8_t; +typedef unsigned char uint8_t; +typedef signed short int16_t; +typedef unsigned short uint16_t; +typedef signed int int32_t; +typedef unsigned int uint32_t; +typedef unsigned long long int uint64_t; +typedef long long int int64_t; +#define WEBP_INLINE __forceinline +#endif /* _MSC_VER */ + +#ifndef WEBP_EXTERN +// This explicitly marks library functions and allows for changing the +// signature for e.g., Windows DLL builds. +# if defined(__GNUC__) && __GNUC__ >= 4 +# define WEBP_EXTERN extern __attribute__ ((visibility ("default"))) +# else +# define WEBP_EXTERN extern +# endif /* __GNUC__ >= 4 */ +#endif /* WEBP_EXTERN */ + +// Macro to check ABI compatibility (same major revision number) +#define WEBP_ABI_IS_INCOMPATIBLE(a, b) (((a) >> 8) != ((b) >> 8)) + +#ifdef __cplusplus +extern "C" { +#endif + +// Allocates 'size' bytes of memory. Returns NULL upon error. Memory +// must be deallocated by calling WebPFree(). This function is made available +// by the core 'libwebp' library. +WEBP_EXTERN void* WebPMalloc(size_t size); + +// Releases memory returned by the WebPDecode*() functions (from decode.h). +WEBP_EXTERN void WebPFree(void* ptr); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif // WEBP_WEBP_TYPES_H_ diff --git a/webp/unicode.h b/webp/unicode.h new file mode 100644 index 0000000..0831e23 --- /dev/null +++ b/webp/unicode.h @@ -0,0 +1,116 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the COPYING file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. +// ----------------------------------------------------------------------------- +// +// Unicode support for Windows. The main idea is to maintain an array of Unicode +// arguments (wargv) and use it only for file paths. The regular argv is used +// for everything else. +// +// Author: Yannis Guyon (yguyon@google.com) + +#ifndef WEBP_EXAMPLES_UNICODE_H_ +#define WEBP_EXAMPLES_UNICODE_H_ + +#include + +#if defined(_WIN32) && defined(_UNICODE) + +// wchar_t is used instead of TCHAR because we only perform additional work when +// Unicode is enabled and because the output of CommandLineToArgvW() is wchar_t. + +#include +#include +#include +#include +#include + +// Create a wchar_t array containing Unicode parameters. +#define INIT_WARGV(ARGC, ARGV) \ + int wargc; \ + const W_CHAR** const wargv = \ + (const W_CHAR**)CommandLineToArgvW(GetCommandLineW(), &wargc); \ + do { \ + if (wargv == NULL || wargc != (ARGC)) { \ + fprintf(stderr, "Error: Unable to get Unicode arguments.\n"); \ + FREE_WARGV_AND_RETURN(-1); \ + } \ + } while (0) + +// Use this to get a Unicode argument (e.g. file path). +#define GET_WARGV(UNUSED, C) wargv[C] +// For cases where argv is shifted by one compared to wargv. +#define GET_WARGV_SHIFTED(UNUSED, C) wargv[(C) + 1] +#define GET_WARGV_OR_NULL() wargv + +// Release resources. LocalFree() is needed after CommandLineToArgvW(). +#define FREE_WARGV() LOCAL_FREE((W_CHAR** const)wargv) +#define LOCAL_FREE(WARGV) \ + do { \ + if ((WARGV) != NULL) LocalFree(WARGV); \ + } while (0) + +#define W_CHAR wchar_t // WCHAR without underscore might already be defined. +#define TO_W_CHAR(STR) (L##STR) + +#define WFOPEN(ARG, OPT) _wfopen((const W_CHAR*)ARG, TO_W_CHAR(OPT)) + +#define WFPRINTF(STREAM, STR, ...) \ + do { \ + int prev_mode; \ + fflush(STREAM); \ + prev_mode = _setmode(_fileno(STREAM), _O_U8TEXT); \ + fwprintf(STREAM, TO_W_CHAR(STR), __VA_ARGS__); \ + fflush(STREAM); \ + (void)_setmode(_fileno(STREAM), prev_mode); \ + } while (0) +#define WPRINTF(STR, ...) WFPRINTF(stdout, STR, __VA_ARGS__) + +#define WSTRLEN(FILENAME) wcslen((const W_CHAR*)FILENAME) +#define WSTRCMP(FILENAME, STR) wcscmp((const W_CHAR*)FILENAME, TO_W_CHAR(STR)) +#define WSTRRCHR(FILENAME, STR) wcsrchr((const W_CHAR*)FILENAME, TO_W_CHAR(STR)) +#define WSNPRINTF(A, B, STR, ...) _snwprintf(A, B, TO_W_CHAR(STR), __VA_ARGS__) + +#else + +#include + +// Unicode file paths work as is on Unix platforms, and no extra work is done on +// Windows either if Unicode is disabled. + +#define INIT_WARGV(ARGC, ARGV) + +#define GET_WARGV(ARGV, C) (ARGV)[C] +#define GET_WARGV_SHIFTED(ARGV, C) (ARGV)[C] +#define GET_WARGV_OR_NULL() NULL + +#define FREE_WARGV() +#define LOCAL_FREE(WARGV) + +#define W_CHAR char +#define TO_W_CHAR(STR) (STR) + +#define WFOPEN(ARG, OPT) fopen(ARG, OPT) + +#define WPRINTF(STR, ...) printf(STR, __VA_ARGS__) +#define WFPRINTF(STREAM, STR, ...) fprintf(STREAM, STR, __VA_ARGS__) + +#define WSTRLEN(FILENAME) strlen(FILENAME) +#define WSTRCMP(FILENAME, STR) strcmp(FILENAME, STR) +#define WSTRRCHR(FILENAME, STR) strrchr(FILENAME, STR) +#define WSNPRINTF(A, B, STR, ...) snprintf(A, B, STR, __VA_ARGS__) + +#endif // defined(_WIN32) && defined(_UNICODE) + +// Don't forget to free wargv before returning (e.g. from main). +#define FREE_WARGV_AND_RETURN(VALUE) \ + do { \ + FREE_WARGV(); \ + return (VALUE); \ + } while (0) + +#endif // WEBP_EXAMPLES_UNICODE_H_ diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 0000000..61280ec --- /dev/null +++ b/yarn.lock @@ -0,0 +1,314 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@esbuild/aix-ppc64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f" + integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ== + +"@esbuild/android-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052" + integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A== + +"@esbuild/android-arm@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28" + integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg== + +"@esbuild/android-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e" + integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA== + +"@esbuild/darwin-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a" + integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ== + +"@esbuild/darwin-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22" + integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw== + +"@esbuild/freebsd-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e" + integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g== + +"@esbuild/freebsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261" + integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ== + +"@esbuild/linux-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b" + integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q== + +"@esbuild/linux-arm@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9" + integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA== + +"@esbuild/linux-ia32@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2" + integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg== + +"@esbuild/linux-loong64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df" + integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg== + +"@esbuild/linux-mips64el@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe" + integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg== + +"@esbuild/linux-ppc64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4" + integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w== + +"@esbuild/linux-riscv64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc" + integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA== + +"@esbuild/linux-s390x@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de" + integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A== + +"@esbuild/linux-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0" + integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ== + +"@esbuild/netbsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047" + integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg== + +"@esbuild/openbsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70" + integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow== + +"@esbuild/sunos-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b" + integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg== + +"@esbuild/win32-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d" + integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A== + +"@esbuild/win32-ia32@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b" + integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA== + +"@esbuild/win32-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c" + integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw== + +"@ffmpeg/ffmpeg@^0.12.10": + version "0.12.10" + resolved "https://registry.yarnpkg.com/@ffmpeg/ffmpeg/-/ffmpeg-0.12.10.tgz#e3cce21f21f11f33dfc1ec1d5ad5694f4a3073c9" + integrity sha512-lVtk8PW8e+NUzGZhPTWj2P1J4/NyuCrbDD3O9IGpSeLYtUZKBqZO8CNj1WYGghep/MXoM8e1qVY1GztTkf8YYQ== + dependencies: + "@ffmpeg/types" "^0.12.2" + +"@ffmpeg/types@^0.12.2": + version "0.12.2" + resolved "https://registry.yarnpkg.com/@ffmpeg/types/-/types-0.12.2.tgz#bc7eef321ae50225c247091f1f23fd3087c6aa1d" + integrity sha512-NJtxwPoLb60/z1Klv0ueshguWQ/7mNm106qdHkB4HL49LXszjhjCCiL+ldHJGQ9ai2Igx0s4F24ghigy//ERdA== + +"@ffmpeg/util@^0.12.1": + version "0.12.1" + resolved "https://registry.yarnpkg.com/@ffmpeg/util/-/util-0.12.1.tgz#98afa20d7b4c0821eebdb205ddcfa5d07b0a4f53" + integrity sha512-10jjfAKWaDyb8+nAkijcsi9wgz/y26LOc1NKJradNMyCIl6usQcBbhkjX5qhALrSBcOy6TOeksunTYa+a03qNQ== + +"@rollup/rollup-android-arm-eabi@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.24.0.tgz#1661ff5ea9beb362795304cb916049aba7ac9c54" + integrity sha512-Q6HJd7Y6xdB48x8ZNVDOqsbh2uByBhgK8PiQgPhwkIw/HC/YX5Ghq2mQY5sRMZWHb3VsFkWooUVOZHKr7DmDIA== + +"@rollup/rollup-android-arm64@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.24.0.tgz#2ffaa91f1b55a0082b8a722525741aadcbd3971e" + integrity sha512-ijLnS1qFId8xhKjT81uBHuuJp2lU4x2yxa4ctFPtG+MqEE6+C5f/+X/bStmxapgmwLwiL3ih122xv8kVARNAZA== + +"@rollup/rollup-darwin-arm64@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.24.0.tgz#627007221b24b8cc3063703eee0b9177edf49c1f" + integrity sha512-bIv+X9xeSs1XCk6DVvkO+S/z8/2AMt/2lMqdQbMrmVpgFvXlmde9mLcbQpztXm1tajC3raFDqegsH18HQPMYtA== + +"@rollup/rollup-darwin-x64@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.24.0.tgz#0605506142b9e796c370d59c5984ae95b9758724" + integrity sha512-X6/nOwoFN7RT2svEQWUsW/5C/fYMBe4fnLK9DQk4SX4mgVBiTA9h64kjUYPvGQ0F/9xwJ5U5UfTbl6BEjaQdBQ== + +"@rollup/rollup-linux-arm-gnueabihf@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.24.0.tgz#62dfd196d4b10c0c2db833897164d2d319ee0cbb" + integrity sha512-0KXvIJQMOImLCVCz9uvvdPgfyWo93aHHp8ui3FrtOP57svqrF/roSSR5pjqL2hcMp0ljeGlU4q9o/rQaAQ3AYA== + +"@rollup/rollup-linux-arm-musleabihf@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.24.0.tgz#53ce72aeb982f1f34b58b380baafaf6a240fddb3" + integrity sha512-it2BW6kKFVh8xk/BnHfakEeoLPv8STIISekpoF+nBgWM4d55CZKc7T4Dx1pEbTnYm/xEKMgy1MNtYuoA8RFIWw== + +"@rollup/rollup-linux-arm64-gnu@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.24.0.tgz#1632990f62a75c74f43e4b14ab3597d7ed416496" + integrity sha512-i0xTLXjqap2eRfulFVlSnM5dEbTVque/3Pi4g2y7cxrs7+a9De42z4XxKLYJ7+OhE3IgxvfQM7vQc43bwTgPwA== + +"@rollup/rollup-linux-arm64-musl@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.24.0.tgz#8c03a996efb41e257b414b2e0560b7a21f2d9065" + integrity sha512-9E6MKUJhDuDh604Qco5yP/3qn3y7SLXYuiC0Rpr89aMScS2UAmK1wHP2b7KAa1nSjWJc/f/Lc0Wl1L47qjiyQw== + +"@rollup/rollup-linux-powerpc64le-gnu@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.24.0.tgz#5b98729628d5bcc8f7f37b58b04d6845f85c7b5d" + integrity sha512-2XFFPJ2XMEiF5Zi2EBf4h73oR1V/lycirxZxHZNc93SqDN/IWhYYSYj8I9381ikUFXZrz2v7r2tOVk2NBwxrWw== + +"@rollup/rollup-linux-riscv64-gnu@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.24.0.tgz#48e42e41f4cabf3573cfefcb448599c512e22983" + integrity sha512-M3Dg4hlwuntUCdzU7KjYqbbd+BLq3JMAOhCKdBE3TcMGMZbKkDdJ5ivNdehOssMCIokNHFOsv7DO4rlEOfyKpg== + +"@rollup/rollup-linux-s390x-gnu@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.24.0.tgz#e0b4f9a966872cb7d3e21b9e412a4b7efd7f0b58" + integrity sha512-mjBaoo4ocxJppTorZVKWFpy1bfFj9FeCMJqzlMQGjpNPY9JwQi7OuS1axzNIk0nMX6jSgy6ZURDZ2w0QW6D56g== + +"@rollup/rollup-linux-x64-gnu@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.24.0.tgz#78144741993100f47bd3da72fce215e077ae036b" + integrity sha512-ZXFk7M72R0YYFN5q13niV0B7G8/5dcQ9JDp8keJSfr3GoZeXEoMHP/HlvqROA3OMbMdfr19IjCeNAnPUG93b6A== + +"@rollup/rollup-linux-x64-musl@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.24.0.tgz#d9fe32971883cd1bd858336bd33a1c3ca6146127" + integrity sha512-w1i+L7kAXZNdYl+vFvzSZy8Y1arS7vMgIy8wusXJzRrPyof5LAb02KGr1PD2EkRcl73kHulIID0M501lN+vobQ== + +"@rollup/rollup-win32-arm64-msvc@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.24.0.tgz#71fa3ea369316db703a909c790743972e98afae5" + integrity sha512-VXBrnPWgBpVDCVY6XF3LEW0pOU51KbaHhccHw6AS6vBWIC60eqsH19DAeeObl+g8nKAz04QFdl/Cefta0xQtUQ== + +"@rollup/rollup-win32-ia32-msvc@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.24.0.tgz#653f5989a60658e17d7576a3996deb3902e342e2" + integrity sha512-xrNcGDU0OxVcPTH/8n/ShH4UevZxKIO6HJFK0e15XItZP2UcaiLFd5kiX7hJnqCbSztUF8Qot+JWBC/QXRPYWQ== + +"@rollup/rollup-win32-x64-msvc@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.24.0.tgz#0574d7e87b44ee8511d08cc7f914bcb802b70818" + integrity sha512-fbMkAF7fufku0N2dE5TBXcNlg0pt0cJue4xBRE2Qc5Vqikxr4VCgKj/ht6SMdFcOacVA9rqF70APJ8RN/4vMJw== + +"@types/estree@1.0.6": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50" + integrity sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw== + +esbuild@^0.21.3: + version "0.21.5" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d" + integrity sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw== + optionalDependencies: + "@esbuild/aix-ppc64" "0.21.5" + "@esbuild/android-arm" "0.21.5" + "@esbuild/android-arm64" "0.21.5" + "@esbuild/android-x64" "0.21.5" + "@esbuild/darwin-arm64" "0.21.5" + "@esbuild/darwin-x64" "0.21.5" + "@esbuild/freebsd-arm64" "0.21.5" + "@esbuild/freebsd-x64" "0.21.5" + "@esbuild/linux-arm" "0.21.5" + "@esbuild/linux-arm64" "0.21.5" + "@esbuild/linux-ia32" "0.21.5" + "@esbuild/linux-loong64" "0.21.5" + "@esbuild/linux-mips64el" "0.21.5" + "@esbuild/linux-ppc64" "0.21.5" + "@esbuild/linux-riscv64" "0.21.5" + "@esbuild/linux-s390x" "0.21.5" + "@esbuild/linux-x64" "0.21.5" + "@esbuild/netbsd-x64" "0.21.5" + "@esbuild/openbsd-x64" "0.21.5" + "@esbuild/sunos-x64" "0.21.5" + "@esbuild/win32-arm64" "0.21.5" + "@esbuild/win32-ia32" "0.21.5" + "@esbuild/win32-x64" "0.21.5" + +fsevents@~2.3.2, fsevents@~2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + +nanoid@^3.3.7: + version "3.3.7" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8" + integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g== + +picocolors@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.0.tgz#5358b76a78cde483ba5cef6a9dc9671440b27d59" + integrity sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw== + +postcss@^8.4.43: + version "8.4.47" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.47.tgz#5bf6c9a010f3e724c503bf03ef7947dcb0fea365" + integrity sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ== + dependencies: + nanoid "^3.3.7" + picocolors "^1.1.0" + source-map-js "^1.2.1" + +rollup@^4.20.0: + version "4.24.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.24.0.tgz#c14a3576f20622ea6a5c9cad7caca5e6e9555d05" + integrity sha512-DOmrlGSXNk1DM0ljiQA+i+o0rSLhtii1je5wgk60j49d1jHT5YYttBv1iWOnYSTG+fZZESUOSNiAl89SIet+Cg== + dependencies: + "@types/estree" "1.0.6" + optionalDependencies: + "@rollup/rollup-android-arm-eabi" "4.24.0" + "@rollup/rollup-android-arm64" "4.24.0" + "@rollup/rollup-darwin-arm64" "4.24.0" + "@rollup/rollup-darwin-x64" "4.24.0" + "@rollup/rollup-linux-arm-gnueabihf" "4.24.0" + "@rollup/rollup-linux-arm-musleabihf" "4.24.0" + "@rollup/rollup-linux-arm64-gnu" "4.24.0" + "@rollup/rollup-linux-arm64-musl" "4.24.0" + "@rollup/rollup-linux-powerpc64le-gnu" "4.24.0" + "@rollup/rollup-linux-riscv64-gnu" "4.24.0" + "@rollup/rollup-linux-s390x-gnu" "4.24.0" + "@rollup/rollup-linux-x64-gnu" "4.24.0" + "@rollup/rollup-linux-x64-musl" "4.24.0" + "@rollup/rollup-win32-arm64-msvc" "4.24.0" + "@rollup/rollup-win32-ia32-msvc" "4.24.0" + "@rollup/rollup-win32-x64-msvc" "4.24.0" + fsevents "~2.3.2" + +source-map-js@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46" + integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA== + +vite@^5.4.8: + version "5.4.8" + resolved "https://registry.yarnpkg.com/vite/-/vite-5.4.8.tgz#af548ce1c211b2785478d3ba3e8da51e39a287e8" + integrity sha512-FqrItQ4DT1NC4zCUqMB4c4AZORMKIa0m8/URVCZ77OZ/QSNeJ54bU1vrFADbDsuwfIPcgknRkmqakQcgnL4GiQ== + dependencies: + esbuild "^0.21.3" + postcss "^8.4.43" + rollup "^4.20.0" + optionalDependencies: + fsevents "~2.3.3" diff --git a/生成 b/生成 new file mode 100644 index 0000000..cb7a0b8 --- /dev/null +++ b/生成 @@ -0,0 +1,4 @@ +docker run --rm -v $(pwd):/src emscripten/emsdk emcc convert_image_to_webp.cpp stb_image.c libwebp.a libsharpyuv.a libwebpdecoder.a libwebpdemux.a libwebpmux.a -o ./test/convert_image_to_webp.js -s WASM=1 -s INITIAL_MEMORY=34340864 -s EXPORTED_FUNCTIONS="['_free','_malloc','_convert_image_to_webp']" -s EXTRA_EXPORTED_RUNTIME_METHODS='["cwrap", "getValue"]' + + +docker run --rm -v $(pwd):/src emscripten/emsdk emcc convert_image_to_webp.cpp stb_image.c libwebp.a libsharpyuv.a -o ./test/convert_image_to_webp.js -s WASM=1 -s NO_FILESYSTEM=1 -s INITIAL_MEMORY=34340864 -s ALLOW_MEMORY_GROWTH=1 -s MAXIMUM_MEMORY=268435456 -s EXPORTED_FUNCTIONS="['_free','_malloc','_convert_image_to_webp']" -s EXTRA_EXPORTED_RUNTIME_METHODS='["cwrap", "getValue"]' -Oz \ No newline at end of file