vite-plugin-compression2
Advanced tools
+23
-27
@@ -11,5 +11,6 @@ import { Plugin } from 'vite'; | ||
| type CompressionOptions<T> = InferDefault<T>; | ||
| type Pretty<T> = { | ||
| [key in keyof T]: T[key]; | ||
| } & NonNullable<unknown>; | ||
| interface FileNameFunctionMetadata { | ||
| algorithm: Algorithm | AlgorithmFunction<UserCompressionOptions>; | ||
| options: UserCompressionOptions; | ||
| } | ||
| interface BaseCompressionPluginOptions { | ||
@@ -19,3 +20,3 @@ include?: FilterPattern; | ||
| threshold?: number; | ||
| filename?: string | ((id: string) => string); | ||
| filename?: string | ((id: string, metadata: FileNameFunctionMetadata) => string); | ||
| deleteOriginalAssets?: boolean; | ||
@@ -31,21 +32,18 @@ skipIfLargerOrEqual?: boolean; | ||
| type AlgorithmFunction<T extends UserCompressionOptions> = (buf: InputType, options: T) => Promise<Buffer>; | ||
| type InternalCompressionPluginOptionsFunction<T, A extends AlgorithmFunction<T>> = { | ||
| algorithm?: A; | ||
| compressionOptions: T; | ||
| }; | ||
| type InternalWithoutCompressionPluginOptionsFunction = { | ||
| algorithm?: AlgorithmFunction<undefined>; | ||
| }; | ||
| type InternalCompressionPluginOptionsAlgorithm<A extends Algorithm> = { | ||
| algorithm?: A; | ||
| compressionOptions?: Pretty<AlgorithmToZlib[A]>; | ||
| }; | ||
| type ViteCompressionPluginConfigFunction<T extends UserCompressionOptions, A extends AlgorithmFunction<T>> = BaseCompressionPluginOptions & InternalCompressionPluginOptionsFunction<T, A>; | ||
| type ViteWithoutCompressionPluginConfigFunction = Pretty<BaseCompressionPluginOptions & InternalWithoutCompressionPluginOptionsFunction>; | ||
| type ViteCompressionPluginConfigAlgorithm<A extends Algorithm> = BaseCompressionPluginOptions & InternalCompressionPluginOptionsAlgorithm<A>; | ||
| type ViteCompressionPluginConfig<T, A extends Algorithm> = ViteCompressionPluginConfigFunction<T, AlgorithmFunction<T>> | ViteCompressionPluginConfigAlgorithm<A>; | ||
| type ViteCompressionPluginOption<A extends Algorithm | UserCompressionOptions | undefined = undefined> = A extends undefined ? Pretty<ViteWithoutCompressionPluginConfigFunction> : A extends Algorithm ? Pretty<ViteCompressionPluginConfigAlgorithm<A>> : A extends UserCompressionOptions ? Pretty<ViteCompressionPluginConfigFunction<A, AlgorithmFunction<A>>> : never; | ||
| type DefineAlgorithmResult<T extends UserCompressionOptions = UserCompressionOptions> = readonly [ | ||
| 'gzip' | 'deflate' | 'deflateRaw', | ||
| ZlibOptions | ||
| ] | readonly [ | ||
| 'brotliCompress', | ||
| BrotliOptions | ||
| ] | readonly [ | ||
| AlgorithmFunction<T>, | ||
| T | ||
| ]; | ||
| type Algorithms = (Algorithm | DefineAlgorithmResult)[]; | ||
| interface ViteCompressionPluginOption extends BaseCompressionPluginOptions { | ||
| algorithms?: Algorithms; | ||
| } | ||
| interface ViteTarballPluginOptions { | ||
| dest?: string; | ||
| gz?: boolean; | ||
| } | ||
@@ -58,11 +56,9 @@ | ||
| declare function tarball(opts?: ViteTarballPluginOptions): Plugin; | ||
| declare function compression(): Plugin; | ||
| declare function compression<T extends UserCompressionOptions | undefined, A extends Algorithm | AlgorithmFunction<T> | AlgorithmFunction<undefined>>(opts: A extends Algorithm ? Pretty<ViteCompressionPluginConfigAlgorithm<A>> : ViteCompressionPluginConfigFunction<T, AlgorithmFunction<T>>): Plugin; | ||
| declare function compression<T extends UserCompressionOptions>(opts: ViteCompressionPluginConfigFunction<T, AlgorithmFunction<T>>): Plugin; | ||
| declare function compression(opts: ViteWithoutCompressionPluginConfigFunction): Plugin; | ||
| declare function compression(opts?: ViteCompressionPluginOption): Plugin; | ||
| declare namespace compression { | ||
| var getPluginAPI: (plugins: readonly Plugin[]) => CompressionPluginAPI | undefined; | ||
| } | ||
| declare function defineCompressionOption<T = never, A extends Algorithm = never>(option: ViteCompressionPluginConfig<T, A>): ViteCompressionPluginConfig<T, A>; | ||
| declare function defineAlgorithm<T extends Algorithm | UserCompressionOptions | AlgorithmFunction<UserCompressionOptions>>(algorithm: T extends Algorithm | AlgorithmFunction<UserCompressionOptions> ? T : AlgorithmFunction<Exclude<T, string>>, options?: T extends Algorithm ? AlgorithmToZlib[T] : T extends AlgorithmFunction<UserCompressionOptions> ? UserCompressionOptions : T): DefineAlgorithmResult<T extends Algorithm | AlgorithmFunction<UserCompressionOptions> ? UserCompressionOptions : T>; | ||
| export { type Algorithm, type CompressionOptions, type ViteCompressionPluginConfig, type ViteCompressionPluginOption, type ViteTarballPluginOptions, compression, compression as default, defineCompressionOption, tarball }; | ||
| export { compression, compression as default, defineAlgorithm, tarball }; | ||
| export type { Algorithm, CompressionOptions, ViteCompressionPluginOption, ViteTarballPluginOptions }; |
+23
-27
@@ -11,5 +11,6 @@ import { Plugin } from 'vite'; | ||
| type CompressionOptions<T> = InferDefault<T>; | ||
| type Pretty<T> = { | ||
| [key in keyof T]: T[key]; | ||
| } & NonNullable<unknown>; | ||
| interface FileNameFunctionMetadata { | ||
| algorithm: Algorithm | AlgorithmFunction<UserCompressionOptions>; | ||
| options: UserCompressionOptions; | ||
| } | ||
| interface BaseCompressionPluginOptions { | ||
@@ -19,3 +20,3 @@ include?: FilterPattern; | ||
| threshold?: number; | ||
| filename?: string | ((id: string) => string); | ||
| filename?: string | ((id: string, metadata: FileNameFunctionMetadata) => string); | ||
| deleteOriginalAssets?: boolean; | ||
@@ -31,21 +32,18 @@ skipIfLargerOrEqual?: boolean; | ||
| type AlgorithmFunction<T extends UserCompressionOptions> = (buf: InputType, options: T) => Promise<Buffer>; | ||
| type InternalCompressionPluginOptionsFunction<T, A extends AlgorithmFunction<T>> = { | ||
| algorithm?: A; | ||
| compressionOptions: T; | ||
| }; | ||
| type InternalWithoutCompressionPluginOptionsFunction = { | ||
| algorithm?: AlgorithmFunction<undefined>; | ||
| }; | ||
| type InternalCompressionPluginOptionsAlgorithm<A extends Algorithm> = { | ||
| algorithm?: A; | ||
| compressionOptions?: Pretty<AlgorithmToZlib[A]>; | ||
| }; | ||
| type ViteCompressionPluginConfigFunction<T extends UserCompressionOptions, A extends AlgorithmFunction<T>> = BaseCompressionPluginOptions & InternalCompressionPluginOptionsFunction<T, A>; | ||
| type ViteWithoutCompressionPluginConfigFunction = Pretty<BaseCompressionPluginOptions & InternalWithoutCompressionPluginOptionsFunction>; | ||
| type ViteCompressionPluginConfigAlgorithm<A extends Algorithm> = BaseCompressionPluginOptions & InternalCompressionPluginOptionsAlgorithm<A>; | ||
| type ViteCompressionPluginConfig<T, A extends Algorithm> = ViteCompressionPluginConfigFunction<T, AlgorithmFunction<T>> | ViteCompressionPluginConfigAlgorithm<A>; | ||
| type ViteCompressionPluginOption<A extends Algorithm | UserCompressionOptions | undefined = undefined> = A extends undefined ? Pretty<ViteWithoutCompressionPluginConfigFunction> : A extends Algorithm ? Pretty<ViteCompressionPluginConfigAlgorithm<A>> : A extends UserCompressionOptions ? Pretty<ViteCompressionPluginConfigFunction<A, AlgorithmFunction<A>>> : never; | ||
| type DefineAlgorithmResult<T extends UserCompressionOptions = UserCompressionOptions> = readonly [ | ||
| 'gzip' | 'deflate' | 'deflateRaw', | ||
| ZlibOptions | ||
| ] | readonly [ | ||
| 'brotliCompress', | ||
| BrotliOptions | ||
| ] | readonly [ | ||
| AlgorithmFunction<T>, | ||
| T | ||
| ]; | ||
| type Algorithms = (Algorithm | DefineAlgorithmResult)[]; | ||
| interface ViteCompressionPluginOption extends BaseCompressionPluginOptions { | ||
| algorithms?: Algorithms; | ||
| } | ||
| interface ViteTarballPluginOptions { | ||
| dest?: string; | ||
| gz?: boolean; | ||
| } | ||
@@ -58,11 +56,9 @@ | ||
| declare function tarball(opts?: ViteTarballPluginOptions): Plugin; | ||
| declare function compression(): Plugin; | ||
| declare function compression<T extends UserCompressionOptions | undefined, A extends Algorithm | AlgorithmFunction<T> | AlgorithmFunction<undefined>>(opts: A extends Algorithm ? Pretty<ViteCompressionPluginConfigAlgorithm<A>> : ViteCompressionPluginConfigFunction<T, AlgorithmFunction<T>>): Plugin; | ||
| declare function compression<T extends UserCompressionOptions>(opts: ViteCompressionPluginConfigFunction<T, AlgorithmFunction<T>>): Plugin; | ||
| declare function compression(opts: ViteWithoutCompressionPluginConfigFunction): Plugin; | ||
| declare function compression(opts?: ViteCompressionPluginOption): Plugin; | ||
| declare namespace compression { | ||
| var getPluginAPI: (plugins: readonly Plugin[]) => CompressionPluginAPI | undefined; | ||
| } | ||
| declare function defineCompressionOption<T = never, A extends Algorithm = never>(option: ViteCompressionPluginConfig<T, A>): ViteCompressionPluginConfig<T, A>; | ||
| declare function defineAlgorithm<T extends Algorithm | UserCompressionOptions | AlgorithmFunction<UserCompressionOptions>>(algorithm: T extends Algorithm | AlgorithmFunction<UserCompressionOptions> ? T : AlgorithmFunction<Exclude<T, string>>, options?: T extends Algorithm ? AlgorithmToZlib[T] : T extends AlgorithmFunction<UserCompressionOptions> ? UserCompressionOptions : T): DefineAlgorithmResult<T extends Algorithm | AlgorithmFunction<UserCompressionOptions> ? UserCompressionOptions : T>; | ||
| export { type Algorithm, type CompressionOptions, type ViteCompressionPluginConfig, type ViteCompressionPluginOption, type ViteTarballPluginOptions, compression, compression as default, defineCompressionOption, tarball }; | ||
| export { compression, compression as default, defineAlgorithm, tarball }; | ||
| export type { Algorithm, CompressionOptions, ViteCompressionPluginOption, ViteTarballPluginOptions }; |
+560
-1
@@ -1,1 +0,560 @@ | ||
| "use strict";Object.defineProperty(exports,"__esModule",{value:!0});var t=require("@rollup/pluginutils"),e=require("fs"),n=require("fs/promises"),i=require("os"),r=require("path"),s=require("tar-mini"),o=require("util"),a=require("zlib");function u(t){return t&&t.__esModule?t:{default:t}}var l=u(e),c=u(n),p=u(i),f=u(r),d=u(o),h=u(a);function g(t){return t&&t.__esModule&&Object.prototype.hasOwnProperty.call(t,"default")?t.default:t}var m,w={exports:{}};function y(){if(m)return w.exports;m=1;let t,e,n,{defineProperty:i,setPrototypeOf:r,create:s,keys:o}=Object,a="",{round:u,max:l}=Math,c=t=>{let e=/([a-f\d]{3,6})/i.exec(t)?.[1],n=e?.length,i=parseInt(6^n?3^n?"0":e[0]+e[0]+e[1]+e[1]+e[2]+e[2]:e,16);return[i>>16&255,i>>8&255,255&i]},p=(t,e,n)=>t^e||e^n?16+36*u(t/51)+6*u(e/51)+u(n/51):8>t?16:t>248?231:u(24*(t-8)/247)+232,f=t=>{let e,n,i,r,s;return 8>t?30+t:16>t?t-8+90:(232>t?(s=(t-=16)%36,e=(t/36|0)/5,n=(s/6|0)/5,i=s%6/5):e=n=i=(10*(t-232)+8)/255,r=2*l(e,n,i),r?30+(u(i)<<2|u(n)<<1|u(e))+(2^r?0:60):30)},d=(()=>{let n=t=>s.some((e=>t.test(e))),i=globalThis,r=i.process??{},s=r.argv??[],a=r.env??{},u=-1;try{t=","+o(a).join(",")}catch(t){a={},u=0}let l="FORCE_COLOR",c={false:0,0:0,1:1,2:2,3:3}[a[l]]??-1,p=l in a&&c||n(/^--color=?(true|always)?$/);return p&&(u=c),~u||(u=((n,i,r)=>(e=n.TERM,{"24bit":3,truecolor:3,ansi256:2,ansi:1}[n.COLORTERM]||(n.CI?/,GITHUB/.test(t)?3:1:i&&"dumb"!==e?r?3:/-256/.test(e)?2:1:0)))(a,!!a.PM2_HOME||a.NEXT_RUNTIME?.includes("edge")||!!r.stdout?.isTTY,"win32"===r.platform)),!c||a.NO_COLOR||n(/^--(no-color|color=(false|never))$/)?0:i.window?.chrome||p&&!u?3:u})(),h={open:a,close:a},g=39,y=49,b={},O=({p:t},{open:e,close:i})=>{let s=(t,...n)=>{if(!t){if(e&&e===i)return e;if((t??a)===a)return a}let r,o=t.raw?String.raw({raw:t},...n):a+t,u=s.p,l=u.o,c=u.c;if(o.includes(""))for(;u;u=u.p){let{open:t,close:e}=u,n=e.length,i=a,s=0;if(n)for(;~(r=o.indexOf(e,s));s=r+n)i+=o.slice(s,r)+t;o=i+o.slice(s)}return l+(o.includes("\n")?o.replace(/(\r?\n)/g,c+"$1"+l):o)+c},o=e,u=i;return t&&(o=t.o+e,u=i+t.c),r(s,n),s.p={open:e,close:i,o:o,c:u,p:t},s.open=o,s.close=u,s};const v=function(t=d){let e={Ansis:v,isSupported:()=>o,strip:t=>t.replace(/[][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g,a),extend(t){for(let e in t){let n=t[e],r=(typeof n)[0],s="s"===r?P(...c(n)):n;b[e]="f"===r?{get(){return(...t)=>O(this,n(...t))}}:{get(){let t=O(this,s);return i(this,e,{value:t}),t}}}return n=s({},b),r(e,n),e}},o=t>0,u=(t,e)=>o?{open:`[${t}m`,close:`[${e}m`}:h,l=t=>e=>t(...c(e)),m=(t,e)=>(n,i,r)=>u(`${t}8;2;${n};${i};${r}`,e),w=(t,e)=>(n,i,r)=>u(((t,e,n)=>f(p(t,e,n)))(n,i,r)+t,e),x=t=>(e,n,i)=>t(p(e,n,i)),P=m(3,g),E=m(4,y),_=t=>u("38;5;"+t,g),j=t=>u("48;5;"+t,y);2===t?(P=x(_),E=x(j)):1===t&&(P=w(0,g),E=w(10,y),_=t=>u(f(t),g),j=t=>u(f(t)+10,y));let R,q={fg:_,bg:j,rgb:P,bgRgb:E,hex:l(P),bgHex:l(E),visible:h,reset:u(0,0),bold:u(1,22),dim:u(2,22),italic:u(3,23),underline:u(4,24),inverse:u(7,27),hidden:u(8,28),strikethrough:u(9,29)},S="Bright";return"black,red,green,yellow,blue,magenta,cyan,white,gray".split(",").map(((t,e)=>{R="bg"+t[0].toUpperCase()+t.slice(1),8>e?(q[t+S]=u(90+e,g),q[R+S]=u(100+e,y)):e=60,q[t]=u(30+e,g),q[R]=u(40+e,y)})),e.extend(q)},x=new v;return w.exports=x,x.default=x,w.exports}var b=g(y());function O(t){return t.length}function v(t,e){const n="function"==typeof e?e(t):e,{dir:i,base:r}=f.default.parse(t),s=i?i+"/":"";return n.replace(/\[path\]/,s).replace(/\[base\]/,r)}function x(t){return/^\\\\\?\\/.test(t)?t:t.replace(/\\/g,"/")}const P=new TextEncoder;function E(t){return"string"==typeof t?P.encode(t):t}function _(){}function j(t){const e=t in h.default?t:"gzip";return{algorithm:d.default.promisify(h.default[e])}}async function R(t,e,n){try{return await e(t,n)}catch(t){return Promise.reject(t)}}const q={gzip:{level:h.default.constants.Z_BEST_COMPRESSION},brotliCompress:{params:{[h.default.constants.BROTLI_PARAM_QUALITY]:h.default.constants.BROTLI_MAX_QUALITY}},deflate:{level:h.default.constants.Z_BEST_COMPRESSION},deflateRaw:{level:h.default.constants.Z_BEST_COMPRESSION}};class S{constructor(t){this.maxConcurrent=t,this.queue=[],this.errors=[],this.running=0}enqueue(t){this.queue.push(t),this.run()}async run(){for(;this.running<this.maxConcurrent&&this.queue.length;){const t=this.queue.shift();this.running++;try{await t()}catch(t){this.errors.push(t)}finally{this.running--,this.run()}}}async wait(){for(;this.running;)await new Promise((t=>setTimeout(t,0)));if(O(this.errors))throw new AggregateError(this.errors,"task failed")}}function T(t){return new S(t)}const z="vite-plugin-compression",M=(()=>{const t=p.default.cpus()||{length:1};return 1===t.length?10:Math.max(1,t.length-1)})();function C(t){const e=new Set,n=(t,e)=>x(f.default.resolve(t,e));if(t.build.rollupOptions?.output){(Array.isArray(t.build.rollupOptions.output)?t.build.rollupOptions.output:[t.build.rollupOptions.output]).forEach((i=>{("object"!=typeof i||O(Object.keys(i)))&&e.add(n(t.root,i.dir||t.build.outDir))}))}else e.add(n(t.root,t.build.outDir));return e}async function I(t,e){const n=!("copyPublicDir"in t.build)||t.build.copyPublicDir;if(t.publicDir&&n&&l.default.existsSync(t.publicDir)){const n=await async function(t){const e=await Promise.all((await c.default.readdir(t)).map((e=>f.default.join(t,e))));let n=0;const i=[];for(;n!==O(e);){const t=e[n],r=await c.default.stat(t);if(r.isDirectory()){const n=await c.default.readdir(t);e.push(...n.map((e=>f.default.join(t,e))))}r.isFile()&&i.push(t),n++}return i}(t.publicDir),i=f.default.join(t.root,f.default.relative(t.root,t.publicDir));n.forEach((t=>{const n=x(f.default.relative(i,t));e(n,t)}))}}function A(e={}){const{include:n=/\.(html|xml|css|json|js|mjs|svg|yaml|yml|toml)$/,exclude:i,threshold:r=0,algorithm:s="gzip",filename:o,compressionOptions:a,deleteOriginalAssets:u=!1,skipIfLargerOrEqual:l=!0}=e,p=t.createFilter(n,i),d=[],h=[],{msgs:g,cleanup:m}=function(){const t=[],e=process.stdout.write.bind(process.stdout);return process.stdout.write=function(...n){const[i]=n,r="string"==typeof i?i:i.toString();return r.includes("built in")?(t.push(r),!1):e.apply(this,n)},{cleanup:()=>process.stdout.write=e,msgs:t}}();let w,y=process.cwd();const x={algorithm:"string"==typeof s?j(s).algorithm:s,options:"function"==typeof s?a:Object.assign({},q[s],a),filename:o??("brotliCompress"===s?"[path][base].br":"[path][base].gz")},P=T(M),S=async function(t,e){for(const t in e){if(!p(t))continue;const n=e[t],i=E("asset"===n.type?n.source:n.code),s=O(i);s<r||P.enqueue((async()=>{const n=v(t,x.filename),r=await R(i,x.algorithm,x.options);l&&O(r)>=s||((u||t===n)&&Reflect.deleteProperty(e,t),this.emitFile({type:"asset",fileName:n,source:r}))}))}await P.wait().catch(this.error)},A={resolve:_},B={staticOutputs:new Set,done:new Promise((t=>{A.resolve=t}))},D=new Intl.NumberFormat("en",{maximumFractionDigits:2,minimumFractionDigits:2});return{name:z,apply:"build",enforce:"post",api:B,async configResolved(t){h.push(...C(t)),await I(t,(t=>{d.push(t)}));const e=t.plugins.find((t=>"vite:build-import-analysis"===t.name));if(!e)throw new Error("[vite-plugin-compression] Can't be work in versions lower than vite at 2.0.0");!function(t,e){const n=t.generateBundle;if("object"==typeof n&&n.handler){const t=n.handler;n.handler=async function(...n){await t.apply(this,n),await e.apply(this,n)}}"function"==typeof n&&(t.generateBundle=async function(...t){await n.apply(this,t),await e.apply(this,t)})}(e,S),w=t.logger,y=t.root},async closeBundle(){const t=[],e=async(e,n)=>{const i=f.default.join(e,n);if(!p(i)&&!B.staticOutputs.has(n))return void B.staticOutputs.add(n);const{size:s}=await c.default.stat(i);s<r?B.staticOutputs.has(n)||B.staticOutputs.add(n):await(async(e,n,i)=>{const r=await c.default.readFile(e),s=await R(r,x.algorithm,x.options);if(l&&O(s)>=O(r))return void(B.staticOutputs.has(n)||B.staticOutputs.add(n));const o=v(n,x.filename);B.staticOutputs.has(o)||B.staticOutputs.add(o);const a=f.default.join(i,o);u&&a!==e&&await c.default.rm(e,{recursive:!0,force:!0}),await c.default.writeFile(a,s),t.push({dest:f.default.relative(y,i)+"/",file:o,size:O(s)})})(i,n,e)};for(const t of h)for(const n of d)P.enqueue((()=>e(t,n)));if(await P.wait().catch((t=>t)),A.resolve(),m(),w){const e=t.reduce(((t,e)=>{const n=e.dest+e.file;return Math.max(t,n.length)}),0);for(const{dest:i,file:r,size:s}of t){const t=r.padEnd(e);w.info(b.dim(i)+b.green(t)+b.bold(b.dim((n=s,`${D.format(n/1e3)} kB`))))}}var n;for(const t of g)console.info(t)}}}A.getPluginAPI=t=>t.find((t=>t.name===z))?.api,exports.compression=A,exports.default=A,exports.defineCompressionOption=function(t){return t},exports.tarball=function(t={}){const{dest:e,gz:n=!1}=t,i=[],r=[];let o=[],a=process.cwd();const u=function(){const t=s.createPack(),e=[],n={dests:[],root:"",gz:!1};return{add:e=>{t.add(E(e.content),{filename:e.filename})},setup:t=>{Object.assign(n,t),n.dests.forEach((t=>{const i=x(f.default.resolve(n.root,t+".tar"+(n.gz?".gz":""))),r=x(f.default.dirname(i));x(n.root)!==r&&l.default.mkdirSync(r,{recursive:!0});const s=l.default.createWriteStream(i);e.push(s)}))},done:async()=>{t.done(),await Promise.all(e.map((e=>new Promise(((i,r)=>{e.on("error",r),e.on("finish",i),n.gz?t.receiver.pipe(h.default.createGzip()).pipe(e):t.receiver.pipe(e)}))))),e.length=0}}}(),p=T(M);let d;return{name:"vite-plugin-tarball",enforce:"post",async configResolved(t){r.push(...C(t)),a=t.root,o=e?[e]:r,d=A.getPluginAPI(t.plugins),d||await I(t,(t=>{i.push(t)})),u.setup({dests:o,root:a,gz:n})},writeBundle(t,e){for(const t in e){const n=e[t];u.add({filename:t,content:"asset"===n.type?n.source:n.code})}},async closeBundle(){d&&await d.done,!i.length&&d&&d.staticOutputs.size&&i.push(...d.staticOutputs);for(const t of r)for(const e of i)p.enqueue((async()=>{const n=f.default.join(t,e),i=await c.default.readFile(n);u.add({filename:e,content:i})}));await p.wait(),await u.done()}}}; | ||
| 'use strict'; | ||
| Object.defineProperty(exports, '__esModule', { value: true }); | ||
| var pluginutils = require('@rollup/pluginutils'); | ||
| var ansis = require('ansis'); | ||
| var fs = require('fs'); | ||
| var fsp = require('node:fs/promises'); | ||
| var os = require('os'); | ||
| var path = require('path'); | ||
| var tarMini = require('tar-mini'); | ||
| var util = require('util'); | ||
| var zlib = require('zlib'); | ||
| function len(source) { | ||
| return source.length; | ||
| } | ||
| // [path][base].ext | ||
| // [path] is replaced with the directories to the original asset, included trailing | ||
| // [base] is replaced with the base ([name] + [ext]) of the original asset (image.png) | ||
| function replaceFileName(staticPath, rule, metadata) { | ||
| const template = typeof rule === 'function' ? rule(staticPath, metadata) : rule; | ||
| const { dir, base } = path.parse(staticPath); | ||
| const p = dir ? dir + '/' : ''; | ||
| return template.replace(/\[path\]/, p).replace(/\[base\]/, base); | ||
| } | ||
| function slash(path) { | ||
| const isExtendedLengthPath = /^\\\\\?\\/.test(path); | ||
| if (isExtendedLengthPath) { | ||
| return path; | ||
| } | ||
| return path.replace(/\\/g, '/'); | ||
| } | ||
| async function readAll(entry) { | ||
| const paths = await Promise.all((await fsp.readdir(entry)).map((dir)=>path.join(entry, dir))); | ||
| let pos = 0; | ||
| const result = []; | ||
| while(pos !== len(paths)){ | ||
| const dir = paths[pos]; | ||
| const stat = await fsp.stat(dir); | ||
| if (stat.isDirectory()) { | ||
| const dirs = await fsp.readdir(dir); | ||
| paths.push(...dirs.map((sub)=>path.join(dir, sub))); | ||
| } | ||
| if (stat.isFile()) { | ||
| result.push(dir); | ||
| } | ||
| pos++; | ||
| } | ||
| return result; | ||
| } | ||
| const encoder = new TextEncoder(); | ||
| function stringToBytes(b) { | ||
| return typeof b === 'string' ? encoder.encode(b) : b; | ||
| } | ||
| function noop() {} | ||
| function captureViteLogger() { | ||
| const msgs = []; | ||
| const originalStdWrite = process.stdout.write.bind(process.stdout); | ||
| const cleanup = ()=>process.stdout.write = originalStdWrite; | ||
| // @ts-expect-error overloaded methods | ||
| process.stdout.write = function(...args) { | ||
| const [output] = args; | ||
| const str = typeof output === 'string' ? output : output.toString(); | ||
| if (str.includes('built in')) { | ||
| msgs.push(str); | ||
| return false; | ||
| } | ||
| // eslint-disable-next-line @typescript-eslint/no-unsafe-return | ||
| return originalStdWrite.apply(this, args); | ||
| }; | ||
| return { | ||
| cleanup, | ||
| msgs | ||
| }; | ||
| } | ||
| function ensureAlgorithm(userAlgorithm) { | ||
| const algorithm = userAlgorithm in zlib ? userAlgorithm : 'gzip'; | ||
| return { | ||
| algorithm: util.promisify(zlib[algorithm]) | ||
| }; | ||
| } | ||
| async function compress(buf, compress, options) { | ||
| try { | ||
| const res = await compress(buf, options); | ||
| return res; | ||
| } catch (error) { | ||
| return Promise.reject(error); | ||
| } | ||
| } | ||
| const defaultCompressionOptions = { | ||
| gzip: { | ||
| level: zlib.constants.Z_BEST_COMPRESSION | ||
| }, | ||
| brotliCompress: { | ||
| params: { | ||
| [zlib.constants.BROTLI_PARAM_QUALITY]: zlib.constants.BROTLI_MAX_QUALITY | ||
| } | ||
| }, | ||
| deflate: { | ||
| level: zlib.constants.Z_BEST_COMPRESSION | ||
| }, | ||
| deflateRaw: { | ||
| level: zlib.constants.Z_BEST_COMPRESSION | ||
| } | ||
| }; | ||
| function createTarBall() { | ||
| const pack = tarMini.createPack(); | ||
| const wss = []; | ||
| const options = { | ||
| dests: [], | ||
| root: '' | ||
| }; | ||
| const add = (meta)=>{ | ||
| pack.add(stringToBytes(meta.content), { | ||
| filename: meta.filename | ||
| }); | ||
| }; | ||
| const setup = (tarballOPtions)=>{ | ||
| Object.assign(options, tarballOPtions); | ||
| options.dests.forEach((dest)=>{ | ||
| const expected = slash(path.resolve(options.root, dest + '.tar')); | ||
| const parent = slash(path.dirname(expected)); | ||
| if (slash(options.root) !== parent) { | ||
| fs.mkdirSync(parent, { | ||
| recursive: true | ||
| }); | ||
| } | ||
| const w = fs.createWriteStream(expected); | ||
| wss.push(w); | ||
| }); | ||
| }; | ||
| const done = async ()=>{ | ||
| pack.done(); | ||
| await Promise.all(wss.map((w)=>new Promise((resolve, reject)=>{ | ||
| w.on('error', reject); | ||
| w.on('finish', resolve); | ||
| pack.receiver.pipe(w); | ||
| }))); | ||
| wss.length = 0; | ||
| }; | ||
| const context = { | ||
| add, | ||
| setup, | ||
| done | ||
| }; | ||
| return context; | ||
| } | ||
| /* eslint-disable @typescript-eslint/no-floating-promises */ function _define_property(obj, key, value) { | ||
| if (key in obj) { | ||
| Object.defineProperty(obj, key, { | ||
| value: value, | ||
| enumerable: true, | ||
| configurable: true, | ||
| writable: true | ||
| }); | ||
| } else { | ||
| obj[key] = value; | ||
| } | ||
| return obj; | ||
| } | ||
| class Queue { | ||
| enqueue(task) { | ||
| this.queue.push(task); | ||
| this.run(); | ||
| } | ||
| async run() { | ||
| while(this.running < this.maxConcurrent && this.queue.length){ | ||
| const task = this.queue.shift(); | ||
| this.running++; | ||
| try { | ||
| await task(); | ||
| } catch (error) { | ||
| this.errors.push(error); | ||
| } finally{ | ||
| this.running--; | ||
| this.run(); | ||
| } | ||
| } | ||
| } | ||
| async wait() { | ||
| while(this.running){ | ||
| await new Promise((resolve)=>setTimeout(resolve, 0)); | ||
| } | ||
| if (len(this.errors)) { | ||
| throw new AggregateError(this.errors, 'task failed'); | ||
| } | ||
| } | ||
| constructor(maxConcurrent){ | ||
| _define_property(this, "maxConcurrent", void 0); | ||
| _define_property(this, "queue", void 0); | ||
| _define_property(this, "running", void 0); | ||
| _define_property(this, "errors", void 0); | ||
| this.maxConcurrent = maxConcurrent; | ||
| this.queue = []; | ||
| this.errors = []; | ||
| this.running = 0; | ||
| } | ||
| } | ||
| function createConcurrentQueue(max) { | ||
| return new Queue(max); | ||
| } | ||
| const VITE_INTERNAL_ANALYSIS_PLUGIN = 'vite:build-import-analysis'; | ||
| const VITE_COMPRESSION_PLUGIN = 'vite-plugin-compression'; | ||
| const VITE_COPY_PUBLIC_DIR = 'copyPublicDir'; | ||
| const MAX_CONCURRENT = (()=>{ | ||
| const cpus = os.cpus() || { | ||
| length: 1 | ||
| }; | ||
| if (cpus.length === 1) { | ||
| return 10; | ||
| } | ||
| return Math.max(1, cpus.length - 1); | ||
| })(); | ||
| function handleOutputOption(conf) { | ||
| var _conf_build_rollupOptions; | ||
| // issue #39 | ||
| // In some case like vite-plugin-legacy will set an empty output item | ||
| // we should skip it. | ||
| // Using full path. I find if we using like `dist` or others path it can't | ||
| // work on monorepo | ||
| // eg: | ||
| // yarn --cwd @pkg/website build | ||
| const outputs = new Set(); | ||
| const prepareAbsPath = (root, sub)=>slash(path.resolve(root, sub)); | ||
| if ((_conf_build_rollupOptions = conf.build.rollupOptions) === null || _conf_build_rollupOptions === void 0 ? void 0 : _conf_build_rollupOptions.output) { | ||
| const outputOptions = Array.isArray(conf.build.rollupOptions.output) ? conf.build.rollupOptions.output : [ | ||
| conf.build.rollupOptions.output | ||
| ]; | ||
| outputOptions.forEach((opt)=>{ | ||
| if (typeof opt === 'object' && !len(Object.keys(opt))) { | ||
| return; | ||
| } | ||
| outputs.add(prepareAbsPath(conf.root, opt.dir || conf.build.outDir)); | ||
| }); | ||
| } else { | ||
| outputs.add(prepareAbsPath(conf.root, conf.build.outDir)); | ||
| } | ||
| return outputs; | ||
| } | ||
| async function handleStaticFiles(config, callback) { | ||
| const baseCondit = VITE_COPY_PUBLIC_DIR in config.build ? config.build.copyPublicDir : true; | ||
| if (config.publicDir && baseCondit && fs.existsSync(config.publicDir)) { | ||
| const staticAssets = await readAll(config.publicDir); | ||
| const publicPath = path.join(config.root, path.relative(config.root, config.publicDir)); | ||
| staticAssets.forEach((assets)=>{ | ||
| const file = slash(path.relative(publicPath, assets)); | ||
| callback(file, assets); | ||
| }); | ||
| } | ||
| } | ||
| function tarball(opts = {}) { | ||
| const { dest: userDest } = opts; | ||
| const statics = []; | ||
| const outputs = []; | ||
| let dests = []; | ||
| let root = process.cwd(); | ||
| const tarball1 = createTarBall(); | ||
| const queue = createConcurrentQueue(MAX_CONCURRENT); | ||
| let ctx; | ||
| return { | ||
| name: 'vite-plugin-tarball', | ||
| enforce: 'post', | ||
| async configResolved (config) { | ||
| outputs.push(...handleOutputOption(config)); | ||
| root = config.root; | ||
| dests = userDest ? [ | ||
| userDest | ||
| ] : outputs; | ||
| // No need to add source to pack in configResolved stage | ||
| // If we do at the start stage. The build task will be slow. | ||
| ctx = compression.getPluginAPI(config.plugins); | ||
| if (!ctx) { | ||
| await handleStaticFiles(config, (file)=>{ | ||
| statics.push(file); | ||
| }); | ||
| } | ||
| // create dest dir | ||
| tarball1.setup({ | ||
| dests, | ||
| root | ||
| }); | ||
| }, | ||
| writeBundle (_, bundles) { | ||
| for(const fileName in bundles){ | ||
| const bundle = bundles[fileName]; | ||
| tarball1.add({ | ||
| filename: fileName, | ||
| content: bundle.type === 'asset' ? bundle.source : bundle.code | ||
| }); | ||
| } | ||
| }, | ||
| async closeBundle () { | ||
| if (ctx) { | ||
| await ctx.done; | ||
| } | ||
| if (!statics.length && ctx && ctx.staticOutputs.size) { | ||
| statics.push(...ctx.staticOutputs); | ||
| } | ||
| for (const dest of outputs){ | ||
| for (const file of statics){ | ||
| queue.enqueue(async ()=>{ | ||
| const p = path.join(dest, file); | ||
| const buf = await fsp.readFile(p); | ||
| tarball1.add({ | ||
| filename: file, | ||
| content: buf | ||
| }); | ||
| }); | ||
| } | ||
| } | ||
| await queue.wait(); | ||
| await tarball1.done(); | ||
| } | ||
| }; | ||
| } | ||
| function hijackGenerateBundle(plugin, afterHook) { | ||
| const hook = plugin.generateBundle; | ||
| if (typeof hook === 'object' && hook.handler) { | ||
| const fn = hook.handler; | ||
| hook.handler = async function handler(...args) { | ||
| await fn.apply(this, args); | ||
| await afterHook.apply(this, args); | ||
| }; | ||
| } | ||
| if (typeof hook === 'function') { | ||
| plugin.generateBundle = async function handler(...args) { | ||
| await hook.apply(this, args); | ||
| await afterHook.apply(this, args); | ||
| }; | ||
| } | ||
| } | ||
| function compression(opts = {}) { | ||
| const { include = /\.(html|xml|css|json|js|mjs|svg|yaml|yml|toml)$/, exclude, threshold = 0, algorithms: userAlgorithms = [ | ||
| 'gzip', | ||
| 'brotliCompress' | ||
| ], filename, deleteOriginalAssets = false, skipIfLargerOrEqual = true } = opts; | ||
| const algorithms = []; | ||
| userAlgorithms.forEach((algorithm)=>{ | ||
| if (typeof algorithm === 'string') { | ||
| algorithms.push(defineAlgorithm(algorithm)); | ||
| } else if (typeof algorithm === 'object' && Array.isArray(algorithm)) { | ||
| algorithms.push(algorithm); | ||
| } | ||
| }); | ||
| const filter = pluginutils.createFilter(include, exclude); | ||
| const statics = []; | ||
| const outputs = []; | ||
| // vite internal vite:reporter don't write any log info to stdout. So we only capture the built in message | ||
| // and print static process to stdout. I don't want to complicate things. So about message aligned with internal | ||
| // result. I never deal with it. | ||
| const { msgs, cleanup } = captureViteLogger(); | ||
| let logger; | ||
| let root = process.cwd(); | ||
| const zlibs = algorithms.map(([algorithm, options])=>({ | ||
| algorithm: typeof algorithm === 'string' ? ensureAlgorithm(algorithm).algorithm : algorithm, | ||
| options, | ||
| filename: filename !== null && filename !== void 0 ? filename : algorithm === 'brotliCompress' ? '[path][base].br' : '[path][base].gz' | ||
| })); | ||
| const queue = createConcurrentQueue(MAX_CONCURRENT); | ||
| const generateBundle = async function handler(_, bundles) { | ||
| for(const fileName in bundles){ | ||
| if (!filter(fileName)) { | ||
| continue; | ||
| } | ||
| const bundle = bundles[fileName]; | ||
| const source = stringToBytes(bundle.type === 'asset' ? bundle.source : bundle.code); | ||
| const size = len(source); | ||
| if (size < threshold) { | ||
| continue; | ||
| } | ||
| queue.enqueue(async ()=>{ | ||
| for(let i = 0; i < zlibs.length; i++){ | ||
| const z = zlibs[i]; | ||
| const flag = i === zlibs.length - 1; | ||
| const name = replaceFileName(fileName, z.filename, { | ||
| options: z.options, | ||
| algorithm: z.algorithm | ||
| }); | ||
| const compressed = await compress(source, z.algorithm, z.options); | ||
| if (skipIfLargerOrEqual && len(compressed) >= size) { | ||
| return; | ||
| } | ||
| // #issue 30 31 | ||
| // https://rollupjs.org/plugin-development/#this-emitfile | ||
| if (flag) { | ||
| if (deleteOriginalAssets || fileName === name) { | ||
| Reflect.deleteProperty(bundles, fileName); | ||
| } | ||
| } | ||
| this.emitFile({ | ||
| type: 'asset', | ||
| fileName: name, | ||
| source: compressed | ||
| }); | ||
| } | ||
| }); | ||
| } | ||
| await queue.wait().catch(this.error); | ||
| }; | ||
| const doneResolver = { | ||
| resolve: noop | ||
| }; | ||
| const pluginContext = { | ||
| staticOutputs: new Set(), | ||
| done: new Promise((resolve)=>{ | ||
| doneResolver.resolve = resolve; | ||
| }) | ||
| }; | ||
| const numberFormatter = new Intl.NumberFormat('en', { | ||
| maximumFractionDigits: 2, | ||
| minimumFractionDigits: 2 | ||
| }); | ||
| const displaySize = (bytes)=>{ | ||
| return `${numberFormatter.format(bytes / 1000)} kB`; | ||
| }; | ||
| const plugin = { | ||
| name: VITE_COMPRESSION_PLUGIN, | ||
| apply: 'build', | ||
| enforce: 'post', | ||
| api: pluginContext, | ||
| async configResolved (config) { | ||
| // hijack vite's internal `vite:build-import-analysis` plugin.So we won't need process us chunks at closeBundle anymore. | ||
| // issue #26 | ||
| // https://github.com/vitejs/vite/blob/716286ef21f4d59786f21341a52a81ee5db58aba/packages/vite/src/node/build.ts#L566-L611 | ||
| // Vite follow rollup option as first and the configResolved Hook don't expose merged conf for user. :( | ||
| // Someone who like using rollupOption. `config.build.outDir` will not as expected. | ||
| outputs.push(...handleOutputOption(config)); | ||
| // Vite's pubic build: https://github.com/vitejs/vite/blob/HEAD/packages/vite/src/node/build.ts#L704-L709 | ||
| // copyPublicDir minimum version 3.2+ | ||
| // No need check size here. | ||
| await handleStaticFiles(config, (file)=>{ | ||
| statics.push(file); | ||
| }); | ||
| const viteAnalyzerPlugin = config.plugins.find((p)=>p.name === VITE_INTERNAL_ANALYSIS_PLUGIN); | ||
| if (!viteAnalyzerPlugin) { | ||
| throw new Error("[vite-plugin-compression] Can't be work in versions lower than vite at 2.0.0"); | ||
| } | ||
| hijackGenerateBundle(viteAnalyzerPlugin, generateBundle); | ||
| logger = config.logger; | ||
| root = config.root; | ||
| }, | ||
| async closeBundle () { | ||
| const compressedMessages = []; | ||
| const compressAndHandleFile = async (filePath, file, dest)=>{ | ||
| const buf = await fsp.readFile(filePath); | ||
| for(let i = 0; i < zlibs.length; i++){ | ||
| const z = zlibs[i]; | ||
| const flag = i === zlibs.length - 1; | ||
| const compressed = await compress(buf, z.algorithm, z.options); | ||
| if (skipIfLargerOrEqual && len(compressed) >= len(buf)) { | ||
| if (!pluginContext.staticOutputs.has(file)) { | ||
| pluginContext.staticOutputs.add(file); | ||
| } | ||
| return; | ||
| } | ||
| const fileName = replaceFileName(file, z.filename, { | ||
| options: z.options, | ||
| algorithm: z.algorithm | ||
| }); | ||
| if (!pluginContext.staticOutputs.has(fileName)) { | ||
| pluginContext.staticOutputs.add(fileName); | ||
| } | ||
| const outputPath = path.join(dest, fileName); | ||
| if (flag) { | ||
| if (deleteOriginalAssets && outputPath !== filePath) { | ||
| await fsp.rm(filePath, { | ||
| recursive: true, | ||
| force: true | ||
| }); | ||
| } | ||
| } | ||
| await fsp.writeFile(outputPath, compressed); | ||
| compressedMessages.push({ | ||
| dest: path.relative(root, dest) + '/', | ||
| file: fileName, | ||
| size: len(compressed) | ||
| }); | ||
| } | ||
| }; | ||
| const processFile = async (dest, file)=>{ | ||
| const filePath = path.join(dest, file); | ||
| if (!filter(filePath) && !pluginContext.staticOutputs.has(file)) { | ||
| pluginContext.staticOutputs.add(file); | ||
| return; | ||
| } | ||
| const { size } = await fsp.stat(filePath); | ||
| if (size < threshold) { | ||
| if (!pluginContext.staticOutputs.has(file)) { | ||
| pluginContext.staticOutputs.add(file); | ||
| } | ||
| return; | ||
| } | ||
| await compressAndHandleFile(filePath, file, dest); | ||
| }; | ||
| // parallel run | ||
| for (const dest of outputs){ | ||
| for (const file of statics){ | ||
| queue.enqueue(()=>processFile(dest, file)); | ||
| } | ||
| } | ||
| // issue #18 | ||
| // In somecase. Like vuepress it will called vite build with `Promise.all`. But it's concurrency. when we record the | ||
| // file fd. It had been changed. So that we should catch the error | ||
| await queue.wait().catch((e)=>e); | ||
| doneResolver.resolve(); | ||
| cleanup(); | ||
| if (logger) { | ||
| const paddingSize = compressedMessages.reduce((acc, cur)=>{ | ||
| const full = cur.dest + cur.file; | ||
| return Math.max(acc, full.length); | ||
| }, 0); | ||
| for (const { dest, file, size } of compressedMessages){ | ||
| const paddedFile = file.padEnd(paddingSize); | ||
| logger.info(ansis.dim(dest) + ansis.green(paddedFile) + ansis.bold(ansis.dim(displaySize(size)))); | ||
| } | ||
| } | ||
| for (const msg of msgs){ | ||
| console.info(msg); | ||
| } | ||
| } | ||
| }; | ||
| return plugin; | ||
| } | ||
| compression.getPluginAPI = (plugins)=>{ | ||
| var _plugins_find; | ||
| return (_plugins_find = plugins.find((p)=>p.name === VITE_COMPRESSION_PLUGIN)) === null || _plugins_find === void 0 ? void 0 : _plugins_find.api; | ||
| }; | ||
| function defineAlgorithm(algorithm, options) { | ||
| if (typeof algorithm === 'string') { | ||
| if (algorithm in defaultCompressionOptions) { | ||
| // @ts-expect-error no need to check | ||
| // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment | ||
| const opts = { | ||
| ...defaultCompressionOptions[algorithm] | ||
| }; | ||
| if (options) { | ||
| Object.assign(opts, options); | ||
| } | ||
| return [ | ||
| algorithm, | ||
| opts | ||
| ]; | ||
| } | ||
| throw new Error(`[vite-plugin-compression] Unsupported algorithm: ${algorithm}`); | ||
| } | ||
| // @ts-expect-error no need to check | ||
| return [ | ||
| algorithm, | ||
| options || {} | ||
| ]; | ||
| } | ||
| exports.compression = compression; | ||
| exports.default = compression; | ||
| exports.defineAlgorithm = defineAlgorithm; | ||
| exports.tarball = tarball; |
+553
-1
@@ -1,1 +0,553 @@ | ||
| import{createFilter as t}from"@rollup/pluginutils";import e from"fs";import n from"fs/promises";import i from"os";import o from"path";import{createPack as s}from"tar-mini";import r from"util";import a from"zlib";function c(t){return t&&t.__esModule&&Object.prototype.hasOwnProperty.call(t,"default")?t.default:t}var u,l={exports:{}};function p(){if(u)return l.exports;u=1;let t,e,n,{defineProperty:i,setPrototypeOf:o,create:s,keys:r}=Object,a="",{round:c,max:p}=Math,f=t=>{let e=/([a-f\d]{3,6})/i.exec(t)?.[1],n=e?.length,i=parseInt(6^n?3^n?"0":e[0]+e[0]+e[1]+e[1]+e[2]+e[2]:e,16);return[i>>16&255,i>>8&255,255&i]},d=(t,e,n)=>t^e||e^n?16+36*c(t/51)+6*c(e/51)+c(n/51):8>t?16:t>248?231:c(24*(t-8)/247)+232,h=t=>{let e,n,i,o,s;return 8>t?30+t:16>t?t-8+90:(232>t?(s=(t-=16)%36,e=(t/36|0)/5,n=(s/6|0)/5,i=s%6/5):e=n=i=(10*(t-232)+8)/255,o=2*p(e,n,i),o?30+(c(i)<<2|c(n)<<1|c(e))+(2^o?0:60):30)},m=(()=>{let n=t=>s.some((e=>t.test(e))),i=globalThis,o=i.process??{},s=o.argv??[],a=o.env??{},c=-1;try{t=","+r(a).join(",")}catch(t){a={},c=0}let u="FORCE_COLOR",l={false:0,0:0,1:1,2:2,3:3}[a[u]]??-1,p=u in a&&l||n(/^--color=?(true|always)?$/);return p&&(c=l),~c||(c=((n,i,o)=>(e=n.TERM,{"24bit":3,truecolor:3,ansi256:2,ansi:1}[n.COLORTERM]||(n.CI?/,GITHUB/.test(t)?3:1:i&&"dumb"!==e?o?3:/-256/.test(e)?2:1:0)))(a,!!a.PM2_HOME||a.NEXT_RUNTIME?.includes("edge")||!!o.stdout?.isTTY,"win32"===o.platform)),!l||a.NO_COLOR||n(/^--(no-color|color=(false|never))$/)?0:i.window?.chrome||p&&!c?3:c})(),g={open:a,close:a},w=39,y=49,b={},O=({p:t},{open:e,close:i})=>{let s=(t,...n)=>{if(!t){if(e&&e===i)return e;if((t??a)===a)return a}let o,r=t.raw?String.raw({raw:t},...n):a+t,c=s.p,u=c.o,l=c.c;if(r.includes(""))for(;c;c=c.p){let{open:t,close:e}=c,n=e.length,i=a,s=0;if(n)for(;~(o=r.indexOf(e,s));s=o+n)i+=r.slice(s,o)+t;r=i+r.slice(s)}return u+(r.includes("\n")?r.replace(/(\r?\n)/g,l+"$1"+u):r)+l},r=e,c=i;return t&&(r=t.o+e,c=i+t.c),o(s,n),s.p={open:e,close:i,o:r,c:c,p:t},s.open=r,s.close=c,s};const v=function(t=m){let e={Ansis:v,isSupported:()=>r,strip:t=>t.replace(/[][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g,a),extend(t){for(let e in t){let n=t[e],o=(typeof n)[0],s="s"===o?P(...f(n)):n;b[e]="f"===o?{get(){return(...t)=>O(this,n(...t))}}:{get(){let t=O(this,s);return i(this,e,{value:t}),t}}}return n=s({},b),o(e,n),e}},r=t>0,c=(t,e)=>r?{open:`[${t}m`,close:`[${e}m`}:g,u=t=>e=>t(...f(e)),l=(t,e)=>(n,i,o)=>c(`${t}8;2;${n};${i};${o}`,e),p=(t,e)=>(n,i,o)=>c(((t,e,n)=>h(d(t,e,n)))(n,i,o)+t,e),x=t=>(e,n,i)=>t(d(e,n,i)),P=l(3,w),E=l(4,y),R=t=>c("38;5;"+t,w),j=t=>c("48;5;"+t,y);2===t?(P=x(R),E=x(j)):1===t&&(P=p(0,w),E=p(10,y),R=t=>c(h(t),w),j=t=>c(h(t)+10,y));let S,T={fg:R,bg:j,rgb:P,bgRgb:E,hex:u(P),bgHex:u(E),visible:g,reset:c(0,0),bold:c(1,22),dim:c(2,22),italic:c(3,23),underline:c(4,24),inverse:c(7,27),hidden:c(8,28),strikethrough:c(9,29)},z="Bright";return"black,red,green,yellow,blue,magenta,cyan,white,gray".split(",").map(((t,e)=>{S="bg"+t[0].toUpperCase()+t.slice(1),8>e?(T[t+z]=c(90+e,w),T[S+z]=c(100+e,y)):e=60,T[t]=c(30+e,w),T[S]=c(40+e,y)})),e.extend(T)},x=new v;return l.exports=x,x.default=x,l.exports}var f=c(p());function d(t){return t.length}function h(t,e){const n="function"==typeof e?e(t):e,{dir:i,base:s}=o.parse(t),r=i?i+"/":"";return n.replace(/\[path\]/,r).replace(/\[base\]/,s)}function m(t){return/^\\\\\?\\/.test(t)?t:t.replace(/\\/g,"/")}const g=new TextEncoder;function w(t){return"string"==typeof t?g.encode(t):t}function y(){}function b(t){const e=t in a?t:"gzip";return{algorithm:r.promisify(a[e])}}async function O(t,e,n){try{return await e(t,n)}catch(t){return Promise.reject(t)}}const v={gzip:{level:a.constants.Z_BEST_COMPRESSION},brotliCompress:{params:{[a.constants.BROTLI_PARAM_QUALITY]:a.constants.BROTLI_MAX_QUALITY}},deflate:{level:a.constants.Z_BEST_COMPRESSION},deflateRaw:{level:a.constants.Z_BEST_COMPRESSION}};class x{constructor(t){this.maxConcurrent=t,this.queue=[],this.errors=[],this.running=0}enqueue(t){this.queue.push(t),this.run()}async run(){for(;this.running<this.maxConcurrent&&this.queue.length;){const t=this.queue.shift();this.running++;try{await t()}catch(t){this.errors.push(t)}finally{this.running--,this.run()}}}async wait(){for(;this.running;)await new Promise((t=>setTimeout(t,0)));if(d(this.errors))throw new AggregateError(this.errors,"task failed")}}function P(t){return new x(t)}const E="vite-plugin-compression",R=(()=>{const t=i.cpus()||{length:1};return 1===t.length?10:Math.max(1,t.length-1)})();function j(t){const e=new Set,n=(t,e)=>m(o.resolve(t,e));if(t.build.rollupOptions?.output){(Array.isArray(t.build.rollupOptions.output)?t.build.rollupOptions.output:[t.build.rollupOptions.output]).forEach((i=>{("object"!=typeof i||d(Object.keys(i)))&&e.add(n(t.root,i.dir||t.build.outDir))}))}else e.add(n(t.root,t.build.outDir));return e}async function S(t,i){const s=!("copyPublicDir"in t.build)||t.build.copyPublicDir;if(t.publicDir&&s&&e.existsSync(t.publicDir)){const e=await async function(t){const e=await Promise.all((await n.readdir(t)).map((e=>o.join(t,e))));let i=0;const s=[];for(;i!==d(e);){const t=e[i],r=await n.stat(t);if(r.isDirectory()){const i=await n.readdir(t);e.push(...i.map((e=>o.join(t,e))))}r.isFile()&&s.push(t),i++}return s}(t.publicDir),s=o.join(t.root,o.relative(t.root,t.publicDir));e.forEach((t=>{const e=m(o.relative(s,t));i(e,t)}))}}function T(t={}){const{dest:i,gz:r=!1}=t,c=[],u=[];let l=[],p=process.cwd();const f=function(){const t=s(),n=[],i={dests:[],root:"",gz:!1};return{add:e=>{t.add(w(e.content),{filename:e.filename})},setup:t=>{Object.assign(i,t),i.dests.forEach((t=>{const s=m(o.resolve(i.root,t+".tar"+(i.gz?".gz":""))),r=m(o.dirname(s));m(i.root)!==r&&e.mkdirSync(r,{recursive:!0});const a=e.createWriteStream(s);n.push(a)}))},done:async()=>{t.done(),await Promise.all(n.map((e=>new Promise(((n,o)=>{e.on("error",o),e.on("finish",n),i.gz?t.receiver.pipe(a.createGzip()).pipe(e):t.receiver.pipe(e)}))))),n.length=0}}}(),d=P(R);let h;return{name:"vite-plugin-tarball",enforce:"post",async configResolved(t){u.push(...j(t)),p=t.root,l=i?[i]:u,h=z.getPluginAPI(t.plugins),h||await S(t,(t=>{c.push(t)})),f.setup({dests:l,root:p,gz:r})},writeBundle(t,e){for(const t in e){const n=e[t];f.add({filename:t,content:"asset"===n.type?n.source:n.code})}},async closeBundle(){h&&await h.done,!c.length&&h&&h.staticOutputs.size&&c.push(...h.staticOutputs);for(const t of u)for(const e of c)d.enqueue((async()=>{const i=o.join(t,e),s=await n.readFile(i);f.add({filename:e,content:s})}));await d.wait(),await f.done()}}}function z(e={}){const{include:i=/\.(html|xml|css|json|js|mjs|svg|yaml|yml|toml)$/,exclude:s,threshold:r=0,algorithm:a="gzip",filename:c,compressionOptions:u,deleteOriginalAssets:l=!1,skipIfLargerOrEqual:p=!0}=e,m=t(i,s),g=[],x=[],{msgs:T,cleanup:z}=function(){const t=[],e=process.stdout.write.bind(process.stdout);return process.stdout.write=function(...n){const[i]=n,o="string"==typeof i?i:i.toString();return o.includes("built in")?(t.push(o),!1):e.apply(this,n)},{cleanup:()=>process.stdout.write=e,msgs:t}}();let _,I=process.cwd();const C={algorithm:"string"==typeof a?b(a).algorithm:a,options:"function"==typeof a?u:Object.assign({},v[a],u),filename:c??("brotliCompress"===a?"[path][base].br":"[path][base].gz")},M=P(R),A=async function(t,e){for(const t in e){if(!m(t))continue;const n=e[t],i=w("asset"===n.type?n.source:n.code),o=d(i);o<r||M.enqueue((async()=>{const n=h(t,C.filename),s=await O(i,C.algorithm,C.options);p&&d(s)>=o||((l||t===n)&&Reflect.deleteProperty(e,t),this.emitFile({type:"asset",fileName:n,source:s}))}))}await M.wait().catch(this.error)},B={resolve:y},D={staticOutputs:new Set,done:new Promise((t=>{B.resolve=t}))},$=new Intl.NumberFormat("en",{maximumFractionDigits:2,minimumFractionDigits:2});return{name:E,apply:"build",enforce:"post",api:D,async configResolved(t){x.push(...j(t)),await S(t,(t=>{g.push(t)}));const e=t.plugins.find((t=>"vite:build-import-analysis"===t.name));if(!e)throw new Error("[vite-plugin-compression] Can't be work in versions lower than vite at 2.0.0");!function(t,e){const n=t.generateBundle;if("object"==typeof n&&n.handler){const t=n.handler;n.handler=async function(...n){await t.apply(this,n),await e.apply(this,n)}}"function"==typeof n&&(t.generateBundle=async function(...t){await n.apply(this,t),await e.apply(this,t)})}(e,A),_=t.logger,I=t.root},async closeBundle(){const t=[],e=async(e,i)=>{const s=o.join(e,i);if(!m(s)&&!D.staticOutputs.has(i))return void D.staticOutputs.add(i);const{size:a}=await n.stat(s);a<r?D.staticOutputs.has(i)||D.staticOutputs.add(i):await(async(e,i,s)=>{const r=await n.readFile(e),a=await O(r,C.algorithm,C.options);if(p&&d(a)>=d(r))return void(D.staticOutputs.has(i)||D.staticOutputs.add(i));const c=h(i,C.filename);D.staticOutputs.has(c)||D.staticOutputs.add(c);const u=o.join(s,c);l&&u!==e&&await n.rm(e,{recursive:!0,force:!0}),await n.writeFile(u,a),t.push({dest:o.relative(I,s)+"/",file:c,size:d(a)})})(s,i,e)};for(const t of x)for(const n of g)M.enqueue((()=>e(t,n)));if(await M.wait().catch((t=>t)),B.resolve(),z(),_){const e=t.reduce(((t,e)=>{const n=e.dest+e.file;return Math.max(t,n.length)}),0);for(const{dest:n,file:o,size:s}of t){const t=o.padEnd(e);_.info(f.dim(n)+f.green(t)+f.bold(f.dim((i=s,`${$.format(i/1e3)} kB`))))}}var i;for(const t of T)console.info(t)}}}function _(t){return t}z.getPluginAPI=t=>t.find((t=>t.name===E))?.api;export{z as compression,z as default,_ as defineCompressionOption,T as tarball}; | ||
| import { createFilter } from '@rollup/pluginutils'; | ||
| import ansis from 'ansis'; | ||
| import fs from 'fs'; | ||
| import fsp from 'node:fs/promises'; | ||
| import os from 'os'; | ||
| import path from 'path'; | ||
| import { createPack } from 'tar-mini'; | ||
| import util from 'util'; | ||
| import zlib from 'zlib'; | ||
| function len(source) { | ||
| return source.length; | ||
| } | ||
| // [path][base].ext | ||
| // [path] is replaced with the directories to the original asset, included trailing | ||
| // [base] is replaced with the base ([name] + [ext]) of the original asset (image.png) | ||
| function replaceFileName(staticPath, rule, metadata) { | ||
| const template = typeof rule === 'function' ? rule(staticPath, metadata) : rule; | ||
| const { dir, base } = path.parse(staticPath); | ||
| const p = dir ? dir + '/' : ''; | ||
| return template.replace(/\[path\]/, p).replace(/\[base\]/, base); | ||
| } | ||
| function slash(path) { | ||
| const isExtendedLengthPath = /^\\\\\?\\/.test(path); | ||
| if (isExtendedLengthPath) { | ||
| return path; | ||
| } | ||
| return path.replace(/\\/g, '/'); | ||
| } | ||
| async function readAll(entry) { | ||
| const paths = await Promise.all((await fsp.readdir(entry)).map((dir)=>path.join(entry, dir))); | ||
| let pos = 0; | ||
| const result = []; | ||
| while(pos !== len(paths)){ | ||
| const dir = paths[pos]; | ||
| const stat = await fsp.stat(dir); | ||
| if (stat.isDirectory()) { | ||
| const dirs = await fsp.readdir(dir); | ||
| paths.push(...dirs.map((sub)=>path.join(dir, sub))); | ||
| } | ||
| if (stat.isFile()) { | ||
| result.push(dir); | ||
| } | ||
| pos++; | ||
| } | ||
| return result; | ||
| } | ||
| const encoder = new TextEncoder(); | ||
| function stringToBytes(b) { | ||
| return typeof b === 'string' ? encoder.encode(b) : b; | ||
| } | ||
| function noop() {} | ||
| function captureViteLogger() { | ||
| const msgs = []; | ||
| const originalStdWrite = process.stdout.write.bind(process.stdout); | ||
| const cleanup = ()=>process.stdout.write = originalStdWrite; | ||
| // @ts-expect-error overloaded methods | ||
| process.stdout.write = function(...args) { | ||
| const [output] = args; | ||
| const str = typeof output === 'string' ? output : output.toString(); | ||
| if (str.includes('built in')) { | ||
| msgs.push(str); | ||
| return false; | ||
| } | ||
| // eslint-disable-next-line @typescript-eslint/no-unsafe-return | ||
| return originalStdWrite.apply(this, args); | ||
| }; | ||
| return { | ||
| cleanup, | ||
| msgs | ||
| }; | ||
| } | ||
| function ensureAlgorithm(userAlgorithm) { | ||
| const algorithm = userAlgorithm in zlib ? userAlgorithm : 'gzip'; | ||
| return { | ||
| algorithm: util.promisify(zlib[algorithm]) | ||
| }; | ||
| } | ||
| async function compress(buf, compress, options) { | ||
| try { | ||
| const res = await compress(buf, options); | ||
| return res; | ||
| } catch (error) { | ||
| return Promise.reject(error); | ||
| } | ||
| } | ||
| const defaultCompressionOptions = { | ||
| gzip: { | ||
| level: zlib.constants.Z_BEST_COMPRESSION | ||
| }, | ||
| brotliCompress: { | ||
| params: { | ||
| [zlib.constants.BROTLI_PARAM_QUALITY]: zlib.constants.BROTLI_MAX_QUALITY | ||
| } | ||
| }, | ||
| deflate: { | ||
| level: zlib.constants.Z_BEST_COMPRESSION | ||
| }, | ||
| deflateRaw: { | ||
| level: zlib.constants.Z_BEST_COMPRESSION | ||
| } | ||
| }; | ||
| function createTarBall() { | ||
| const pack = createPack(); | ||
| const wss = []; | ||
| const options = { | ||
| dests: [], | ||
| root: '' | ||
| }; | ||
| const add = (meta)=>{ | ||
| pack.add(stringToBytes(meta.content), { | ||
| filename: meta.filename | ||
| }); | ||
| }; | ||
| const setup = (tarballOPtions)=>{ | ||
| Object.assign(options, tarballOPtions); | ||
| options.dests.forEach((dest)=>{ | ||
| const expected = slash(path.resolve(options.root, dest + '.tar')); | ||
| const parent = slash(path.dirname(expected)); | ||
| if (slash(options.root) !== parent) { | ||
| fs.mkdirSync(parent, { | ||
| recursive: true | ||
| }); | ||
| } | ||
| const w = fs.createWriteStream(expected); | ||
| wss.push(w); | ||
| }); | ||
| }; | ||
| const done = async ()=>{ | ||
| pack.done(); | ||
| await Promise.all(wss.map((w)=>new Promise((resolve, reject)=>{ | ||
| w.on('error', reject); | ||
| w.on('finish', resolve); | ||
| pack.receiver.pipe(w); | ||
| }))); | ||
| wss.length = 0; | ||
| }; | ||
| const context = { | ||
| add, | ||
| setup, | ||
| done | ||
| }; | ||
| return context; | ||
| } | ||
| /* eslint-disable @typescript-eslint/no-floating-promises */ function _define_property(obj, key, value) { | ||
| if (key in obj) { | ||
| Object.defineProperty(obj, key, { | ||
| value: value, | ||
| enumerable: true, | ||
| configurable: true, | ||
| writable: true | ||
| }); | ||
| } else { | ||
| obj[key] = value; | ||
| } | ||
| return obj; | ||
| } | ||
| class Queue { | ||
| enqueue(task) { | ||
| this.queue.push(task); | ||
| this.run(); | ||
| } | ||
| async run() { | ||
| while(this.running < this.maxConcurrent && this.queue.length){ | ||
| const task = this.queue.shift(); | ||
| this.running++; | ||
| try { | ||
| await task(); | ||
| } catch (error) { | ||
| this.errors.push(error); | ||
| } finally{ | ||
| this.running--; | ||
| this.run(); | ||
| } | ||
| } | ||
| } | ||
| async wait() { | ||
| while(this.running){ | ||
| await new Promise((resolve)=>setTimeout(resolve, 0)); | ||
| } | ||
| if (len(this.errors)) { | ||
| throw new AggregateError(this.errors, 'task failed'); | ||
| } | ||
| } | ||
| constructor(maxConcurrent){ | ||
| _define_property(this, "maxConcurrent", void 0); | ||
| _define_property(this, "queue", void 0); | ||
| _define_property(this, "running", void 0); | ||
| _define_property(this, "errors", void 0); | ||
| this.maxConcurrent = maxConcurrent; | ||
| this.queue = []; | ||
| this.errors = []; | ||
| this.running = 0; | ||
| } | ||
| } | ||
| function createConcurrentQueue(max) { | ||
| return new Queue(max); | ||
| } | ||
| const VITE_INTERNAL_ANALYSIS_PLUGIN = 'vite:build-import-analysis'; | ||
| const VITE_COMPRESSION_PLUGIN = 'vite-plugin-compression'; | ||
| const VITE_COPY_PUBLIC_DIR = 'copyPublicDir'; | ||
| const MAX_CONCURRENT = (()=>{ | ||
| const cpus = os.cpus() || { | ||
| length: 1 | ||
| }; | ||
| if (cpus.length === 1) { | ||
| return 10; | ||
| } | ||
| return Math.max(1, cpus.length - 1); | ||
| })(); | ||
| function handleOutputOption(conf) { | ||
| var _conf_build_rollupOptions; | ||
| // issue #39 | ||
| // In some case like vite-plugin-legacy will set an empty output item | ||
| // we should skip it. | ||
| // Using full path. I find if we using like `dist` or others path it can't | ||
| // work on monorepo | ||
| // eg: | ||
| // yarn --cwd @pkg/website build | ||
| const outputs = new Set(); | ||
| const prepareAbsPath = (root, sub)=>slash(path.resolve(root, sub)); | ||
| if ((_conf_build_rollupOptions = conf.build.rollupOptions) === null || _conf_build_rollupOptions === void 0 ? void 0 : _conf_build_rollupOptions.output) { | ||
| const outputOptions = Array.isArray(conf.build.rollupOptions.output) ? conf.build.rollupOptions.output : [ | ||
| conf.build.rollupOptions.output | ||
| ]; | ||
| outputOptions.forEach((opt)=>{ | ||
| if (typeof opt === 'object' && !len(Object.keys(opt))) { | ||
| return; | ||
| } | ||
| outputs.add(prepareAbsPath(conf.root, opt.dir || conf.build.outDir)); | ||
| }); | ||
| } else { | ||
| outputs.add(prepareAbsPath(conf.root, conf.build.outDir)); | ||
| } | ||
| return outputs; | ||
| } | ||
| async function handleStaticFiles(config, callback) { | ||
| const baseCondit = VITE_COPY_PUBLIC_DIR in config.build ? config.build.copyPublicDir : true; | ||
| if (config.publicDir && baseCondit && fs.existsSync(config.publicDir)) { | ||
| const staticAssets = await readAll(config.publicDir); | ||
| const publicPath = path.join(config.root, path.relative(config.root, config.publicDir)); | ||
| staticAssets.forEach((assets)=>{ | ||
| const file = slash(path.relative(publicPath, assets)); | ||
| callback(file, assets); | ||
| }); | ||
| } | ||
| } | ||
| function tarball(opts = {}) { | ||
| const { dest: userDest } = opts; | ||
| const statics = []; | ||
| const outputs = []; | ||
| let dests = []; | ||
| let root = process.cwd(); | ||
| const tarball1 = createTarBall(); | ||
| const queue = createConcurrentQueue(MAX_CONCURRENT); | ||
| let ctx; | ||
| return { | ||
| name: 'vite-plugin-tarball', | ||
| enforce: 'post', | ||
| async configResolved (config) { | ||
| outputs.push(...handleOutputOption(config)); | ||
| root = config.root; | ||
| dests = userDest ? [ | ||
| userDest | ||
| ] : outputs; | ||
| // No need to add source to pack in configResolved stage | ||
| // If we do at the start stage. The build task will be slow. | ||
| ctx = compression.getPluginAPI(config.plugins); | ||
| if (!ctx) { | ||
| await handleStaticFiles(config, (file)=>{ | ||
| statics.push(file); | ||
| }); | ||
| } | ||
| // create dest dir | ||
| tarball1.setup({ | ||
| dests, | ||
| root | ||
| }); | ||
| }, | ||
| writeBundle (_, bundles) { | ||
| for(const fileName in bundles){ | ||
| const bundle = bundles[fileName]; | ||
| tarball1.add({ | ||
| filename: fileName, | ||
| content: bundle.type === 'asset' ? bundle.source : bundle.code | ||
| }); | ||
| } | ||
| }, | ||
| async closeBundle () { | ||
| if (ctx) { | ||
| await ctx.done; | ||
| } | ||
| if (!statics.length && ctx && ctx.staticOutputs.size) { | ||
| statics.push(...ctx.staticOutputs); | ||
| } | ||
| for (const dest of outputs){ | ||
| for (const file of statics){ | ||
| queue.enqueue(async ()=>{ | ||
| const p = path.join(dest, file); | ||
| const buf = await fsp.readFile(p); | ||
| tarball1.add({ | ||
| filename: file, | ||
| content: buf | ||
| }); | ||
| }); | ||
| } | ||
| } | ||
| await queue.wait(); | ||
| await tarball1.done(); | ||
| } | ||
| }; | ||
| } | ||
| function hijackGenerateBundle(plugin, afterHook) { | ||
| const hook = plugin.generateBundle; | ||
| if (typeof hook === 'object' && hook.handler) { | ||
| const fn = hook.handler; | ||
| hook.handler = async function handler(...args) { | ||
| await fn.apply(this, args); | ||
| await afterHook.apply(this, args); | ||
| }; | ||
| } | ||
| if (typeof hook === 'function') { | ||
| plugin.generateBundle = async function handler(...args) { | ||
| await hook.apply(this, args); | ||
| await afterHook.apply(this, args); | ||
| }; | ||
| } | ||
| } | ||
| function compression(opts = {}) { | ||
| const { include = /\.(html|xml|css|json|js|mjs|svg|yaml|yml|toml)$/, exclude, threshold = 0, algorithms: userAlgorithms = [ | ||
| 'gzip', | ||
| 'brotliCompress' | ||
| ], filename, deleteOriginalAssets = false, skipIfLargerOrEqual = true } = opts; | ||
| const algorithms = []; | ||
| userAlgorithms.forEach((algorithm)=>{ | ||
| if (typeof algorithm === 'string') { | ||
| algorithms.push(defineAlgorithm(algorithm)); | ||
| } else if (typeof algorithm === 'object' && Array.isArray(algorithm)) { | ||
| algorithms.push(algorithm); | ||
| } | ||
| }); | ||
| const filter = createFilter(include, exclude); | ||
| const statics = []; | ||
| const outputs = []; | ||
| // vite internal vite:reporter don't write any log info to stdout. So we only capture the built in message | ||
| // and print static process to stdout. I don't want to complicate things. So about message aligned with internal | ||
| // result. I never deal with it. | ||
| const { msgs, cleanup } = captureViteLogger(); | ||
| let logger; | ||
| let root = process.cwd(); | ||
| const zlibs = algorithms.map(([algorithm, options])=>({ | ||
| algorithm: typeof algorithm === 'string' ? ensureAlgorithm(algorithm).algorithm : algorithm, | ||
| options, | ||
| filename: filename !== null && filename !== void 0 ? filename : algorithm === 'brotliCompress' ? '[path][base].br' : '[path][base].gz' | ||
| })); | ||
| const queue = createConcurrentQueue(MAX_CONCURRENT); | ||
| const generateBundle = async function handler(_, bundles) { | ||
| for(const fileName in bundles){ | ||
| if (!filter(fileName)) { | ||
| continue; | ||
| } | ||
| const bundle = bundles[fileName]; | ||
| const source = stringToBytes(bundle.type === 'asset' ? bundle.source : bundle.code); | ||
| const size = len(source); | ||
| if (size < threshold) { | ||
| continue; | ||
| } | ||
| queue.enqueue(async ()=>{ | ||
| for(let i = 0; i < zlibs.length; i++){ | ||
| const z = zlibs[i]; | ||
| const flag = i === zlibs.length - 1; | ||
| const name = replaceFileName(fileName, z.filename, { | ||
| options: z.options, | ||
| algorithm: z.algorithm | ||
| }); | ||
| const compressed = await compress(source, z.algorithm, z.options); | ||
| if (skipIfLargerOrEqual && len(compressed) >= size) { | ||
| return; | ||
| } | ||
| // #issue 30 31 | ||
| // https://rollupjs.org/plugin-development/#this-emitfile | ||
| if (flag) { | ||
| if (deleteOriginalAssets || fileName === name) { | ||
| Reflect.deleteProperty(bundles, fileName); | ||
| } | ||
| } | ||
| this.emitFile({ | ||
| type: 'asset', | ||
| fileName: name, | ||
| source: compressed | ||
| }); | ||
| } | ||
| }); | ||
| } | ||
| await queue.wait().catch(this.error); | ||
| }; | ||
| const doneResolver = { | ||
| resolve: noop | ||
| }; | ||
| const pluginContext = { | ||
| staticOutputs: new Set(), | ||
| done: new Promise((resolve)=>{ | ||
| doneResolver.resolve = resolve; | ||
| }) | ||
| }; | ||
| const numberFormatter = new Intl.NumberFormat('en', { | ||
| maximumFractionDigits: 2, | ||
| minimumFractionDigits: 2 | ||
| }); | ||
| const displaySize = (bytes)=>{ | ||
| return `${numberFormatter.format(bytes / 1000)} kB`; | ||
| }; | ||
| const plugin = { | ||
| name: VITE_COMPRESSION_PLUGIN, | ||
| apply: 'build', | ||
| enforce: 'post', | ||
| api: pluginContext, | ||
| async configResolved (config) { | ||
| // hijack vite's internal `vite:build-import-analysis` plugin.So we won't need process us chunks at closeBundle anymore. | ||
| // issue #26 | ||
| // https://github.com/vitejs/vite/blob/716286ef21f4d59786f21341a52a81ee5db58aba/packages/vite/src/node/build.ts#L566-L611 | ||
| // Vite follow rollup option as first and the configResolved Hook don't expose merged conf for user. :( | ||
| // Someone who like using rollupOption. `config.build.outDir` will not as expected. | ||
| outputs.push(...handleOutputOption(config)); | ||
| // Vite's pubic build: https://github.com/vitejs/vite/blob/HEAD/packages/vite/src/node/build.ts#L704-L709 | ||
| // copyPublicDir minimum version 3.2+ | ||
| // No need check size here. | ||
| await handleStaticFiles(config, (file)=>{ | ||
| statics.push(file); | ||
| }); | ||
| const viteAnalyzerPlugin = config.plugins.find((p)=>p.name === VITE_INTERNAL_ANALYSIS_PLUGIN); | ||
| if (!viteAnalyzerPlugin) { | ||
| throw new Error("[vite-plugin-compression] Can't be work in versions lower than vite at 2.0.0"); | ||
| } | ||
| hijackGenerateBundle(viteAnalyzerPlugin, generateBundle); | ||
| logger = config.logger; | ||
| root = config.root; | ||
| }, | ||
| async closeBundle () { | ||
| const compressedMessages = []; | ||
| const compressAndHandleFile = async (filePath, file, dest)=>{ | ||
| const buf = await fsp.readFile(filePath); | ||
| for(let i = 0; i < zlibs.length; i++){ | ||
| const z = zlibs[i]; | ||
| const flag = i === zlibs.length - 1; | ||
| const compressed = await compress(buf, z.algorithm, z.options); | ||
| if (skipIfLargerOrEqual && len(compressed) >= len(buf)) { | ||
| if (!pluginContext.staticOutputs.has(file)) { | ||
| pluginContext.staticOutputs.add(file); | ||
| } | ||
| return; | ||
| } | ||
| const fileName = replaceFileName(file, z.filename, { | ||
| options: z.options, | ||
| algorithm: z.algorithm | ||
| }); | ||
| if (!pluginContext.staticOutputs.has(fileName)) { | ||
| pluginContext.staticOutputs.add(fileName); | ||
| } | ||
| const outputPath = path.join(dest, fileName); | ||
| if (flag) { | ||
| if (deleteOriginalAssets && outputPath !== filePath) { | ||
| await fsp.rm(filePath, { | ||
| recursive: true, | ||
| force: true | ||
| }); | ||
| } | ||
| } | ||
| await fsp.writeFile(outputPath, compressed); | ||
| compressedMessages.push({ | ||
| dest: path.relative(root, dest) + '/', | ||
| file: fileName, | ||
| size: len(compressed) | ||
| }); | ||
| } | ||
| }; | ||
| const processFile = async (dest, file)=>{ | ||
| const filePath = path.join(dest, file); | ||
| if (!filter(filePath) && !pluginContext.staticOutputs.has(file)) { | ||
| pluginContext.staticOutputs.add(file); | ||
| return; | ||
| } | ||
| const { size } = await fsp.stat(filePath); | ||
| if (size < threshold) { | ||
| if (!pluginContext.staticOutputs.has(file)) { | ||
| pluginContext.staticOutputs.add(file); | ||
| } | ||
| return; | ||
| } | ||
| await compressAndHandleFile(filePath, file, dest); | ||
| }; | ||
| // parallel run | ||
| for (const dest of outputs){ | ||
| for (const file of statics){ | ||
| queue.enqueue(()=>processFile(dest, file)); | ||
| } | ||
| } | ||
| // issue #18 | ||
| // In somecase. Like vuepress it will called vite build with `Promise.all`. But it's concurrency. when we record the | ||
| // file fd. It had been changed. So that we should catch the error | ||
| await queue.wait().catch((e)=>e); | ||
| doneResolver.resolve(); | ||
| cleanup(); | ||
| if (logger) { | ||
| const paddingSize = compressedMessages.reduce((acc, cur)=>{ | ||
| const full = cur.dest + cur.file; | ||
| return Math.max(acc, full.length); | ||
| }, 0); | ||
| for (const { dest, file, size } of compressedMessages){ | ||
| const paddedFile = file.padEnd(paddingSize); | ||
| logger.info(ansis.dim(dest) + ansis.green(paddedFile) + ansis.bold(ansis.dim(displaySize(size)))); | ||
| } | ||
| } | ||
| for (const msg of msgs){ | ||
| console.info(msg); | ||
| } | ||
| } | ||
| }; | ||
| return plugin; | ||
| } | ||
| compression.getPluginAPI = (plugins)=>{ | ||
| var _plugins_find; | ||
| return (_plugins_find = plugins.find((p)=>p.name === VITE_COMPRESSION_PLUGIN)) === null || _plugins_find === void 0 ? void 0 : _plugins_find.api; | ||
| }; | ||
| function defineAlgorithm(algorithm, options) { | ||
| if (typeof algorithm === 'string') { | ||
| if (algorithm in defaultCompressionOptions) { | ||
| // @ts-expect-error no need to check | ||
| // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment | ||
| const opts = { | ||
| ...defaultCompressionOptions[algorithm] | ||
| }; | ||
| if (options) { | ||
| Object.assign(opts, options); | ||
| } | ||
| return [ | ||
| algorithm, | ||
| opts | ||
| ]; | ||
| } | ||
| throw new Error(`[vite-plugin-compression] Unsupported algorithm: ${algorithm}`); | ||
| } | ||
| // @ts-expect-error no need to check | ||
| return [ | ||
| algorithm, | ||
| options || {} | ||
| ]; | ||
| } | ||
| export { compression, compression as default, defineAlgorithm, tarball }; |
+27
-14
| { | ||
| "name": "vite-plugin-compression2", | ||
| "version": "1.4.0", | ||
| "version": "2.0.0", | ||
| "packageManager": "pnpm@9.4.0", | ||
| "description": "a fast vite compression plugin", | ||
| "main": "./dist/index.js", | ||
| "main": "dist/index.js", | ||
| "types": "dist/index.d.ts", | ||
| "module": "./dist/index.mjs", | ||
| "module": "dist/index.mjs", | ||
| "files": [ | ||
@@ -13,2 +14,12 @@ "dist", | ||
| ], | ||
| "exports": { | ||
| ".": { | ||
| "types": { | ||
| "import": "./dist/index.d.mts", | ||
| "require": "./dist/index.d.ts" | ||
| }, | ||
| "import": "./dist/index.mjs", | ||
| "require": "./dist/index.js" | ||
| } | ||
| }, | ||
| "repository": { | ||
@@ -29,23 +40,20 @@ "type": "git", | ||
| "homepage": "https://github.com/nonzzz/vite-plugin-compression#readme", | ||
| "exports": { | ||
| ".": { | ||
| "source": "./src/index.ts", | ||
| "import": "./dist/index.mjs", | ||
| "default": "./dist/index.js" | ||
| } | ||
| }, | ||
| "devDependencies": { | ||
| "@swc/core": "^1.11.31", | ||
| "@types/node": "^20.14.9", | ||
| "@vitest/coverage-v8": "^2.0.3", | ||
| "ansis": "^4.0.0", | ||
| "dprint": "^0.46.3", | ||
| "eslint": "^9.16.0", | ||
| "eslint-config-kagura": "^3.0.1", | ||
| "jiek": "^1.0.14", | ||
| "memdisk": "^1.2.1", | ||
| "playwright": "^1.32.3", | ||
| "rollup": "^4.41.1", | ||
| "rollup-plugin-dts": "^6.2.1", | ||
| "rollup-plugin-swc3": "^0.12.1", | ||
| "sirv": "^2.0.3", | ||
| "typescript": "^5.3.3", | ||
| "vitest": "^2.1.2", | ||
| "vite": "^6.3.5", | ||
| "ansis": "^4.0.0" | ||
| "vite-bundle-analyzer": "^0.22.0", | ||
| "vitest": "^2.1.2" | ||
| }, | ||
@@ -62,3 +70,8 @@ "dependencies": { | ||
| "sirv": "2.0.3" | ||
| }, | ||
| "pnpm": { | ||
| "patchedDependencies": { | ||
| "vite@2.9.18": "patches/vite@2.9.18.patch" | ||
| } | ||
| } | ||
| } | ||
| } |
+183
-11
@@ -19,5 +19,6 @@ <p align="center"> | ||
| ### Basic Usage | ||
| ```js | ||
| import { defineConfig } from 'vite' | ||
| import { compression } from 'vite-plugin-compression2' | ||
@@ -27,6 +28,4 @@ | ||
| plugins: [ | ||
| // ...your plugin | ||
| // ...your plugins | ||
| compression() | ||
| // If you want to create a tarball archive you can import tarball plugin from this package and use | ||
| // after compression. | ||
| ] | ||
@@ -36,4 +35,60 @@ }) | ||
| ### Multiple Algorithms | ||
| ```js | ||
| import { compression, defineAlgorithm } from 'vite-plugin-compression2' | ||
| export default defineConfig({ | ||
| plugins: [ | ||
| compression({ | ||
| algorithms: [ | ||
| 'gzip', | ||
| 'brotliCompress', | ||
| defineAlgorithm('deflate', { level: 9 }) | ||
| ] | ||
| }) | ||
| ] | ||
| }) | ||
| ``` | ||
| ### Custom Algorithm Function | ||
| ```js | ||
| import { compression, defineAlgorithm } from 'vite-plugin-compression2' | ||
| export default defineConfig({ | ||
| plugins: [ | ||
| compression({ | ||
| algorithms: [ | ||
| defineAlgorithm( | ||
| async (buffer, options) => { | ||
| // Your custom compression logic | ||
| return compressedBuffer | ||
| }, | ||
| { customOption: true } | ||
| ) | ||
| ] | ||
| }) | ||
| ] | ||
| }) | ||
| ``` | ||
| ### With Tarball | ||
| ```js | ||
| import { compression, tarball } from 'vite-plugin-compression2' | ||
| export default defineConfig({ | ||
| plugins: [ | ||
| compression(), | ||
| // If you want to create a tarball archive, use tarball plugin after compression | ||
| tarball({ dest: './dist/archive' }) | ||
| ] | ||
| }) | ||
| ``` | ||
| ## Options | ||
| ### Compression Plugin Options | ||
| | params | type | default | description | | ||
@@ -44,8 +99,75 @@ | ---------------------- | --------------------------------------------- | ------------------------------------------------------------ | ------------------------------------------------------------------------------------------ | | ||
| | `threshold` | `number` | `0` | Only assets bigger than this size are processed (in bytes) | | ||
| | `algorithm` | `string\| function` | `gzip` | The compression algorithm | | ||
| | `compressionOptions` | `Record<string,any>` | `{}` | Compression options for `algorithm`(details see `zlib module`) | | ||
| | `algorithms` | `Algorithms` | `['gzip', 'brotliCompress']` | Array of compression algorithms or defineAlgorithm results | | ||
| | `filename` | `string \| function` | `[path][base].gz` or `[path][base].br` | The target asset filename pattern | | ||
| | `deleteOriginalAssets` | `boolean` | `false` | Whether to delete the original assets or not | | ||
| | `skipIfLargerOrEqual` | `boolean` | `true` | Whether to skip the compression if the result is larger than or equal to the original file | | ||
| | `filename` | `string` | `[path][base].gz` | The target asset filename | | ||
| ### Tarball Plugin Options | ||
| | params | type | default | description | | ||
| | ------ | -------- | ------- | --------------------------------- | | ||
| | `dest` | `string` | `-` | Destination directory for tarball | | ||
| ## API | ||
| ### `defineAlgorithm(algorithm, options?)` | ||
| Define a compression algorithm with options. | ||
| **Parameters:** | ||
| - `algorithm`: Algorithm name (`'gzip' | 'brotliCompress' | 'deflate' | 'deflateRaw'`) or custom function | ||
| - `options`: Compression options for the algorithm | ||
| **Returns:** `[algorithm, options]` tuple | ||
| **Examples:** | ||
| ```js | ||
| // Built-in algorithm with default options | ||
| defineAlgorithm('gzip') | ||
| // Built-in algorithm with custom options | ||
| defineAlgorithm('gzip', { level: 9 }) | ||
| // Brotli with custom quality | ||
| defineAlgorithm('brotliCompress', { | ||
| params: { | ||
| [require('zlib').constants.BROTLI_PARAM_QUALITY]: 11 | ||
| } | ||
| }) | ||
| // Custom algorithm function | ||
| defineAlgorithm( | ||
| async (buffer, options) => { | ||
| // Your compression implementation | ||
| return compressedBuffer | ||
| }, | ||
| { customOption: 'value' } | ||
| ) | ||
| ``` | ||
| ### Supported Algorithms | ||
| - **gzip**: Standard gzip compression | ||
| - **brotliCompress**: Brotli compression (better compression ratio) | ||
| - **deflate**: Deflate compression | ||
| - **deflateRaw**: Raw deflate compression | ||
| - **Custom Function**: Your own compression algorithm | ||
| ### Algorithm Types | ||
| The `algorithms` option accepts: | ||
| ```typescript | ||
| type Algorithms = | ||
| | Algorithm[] // ['gzip', 'brotliCompress'] | ||
| | DefineAlgorithmResult[] // [defineAlgorithm('gzip'), ...] | ||
| | (Algorithm | DefineAlgorithmResult)[] // Mixed array | ||
| ``` | ||
| ## Migration | ||
| If you're upgrading from v1.x, please check the [Migration Guide](./MIGRATION-GUIDE.md). | ||
| ## Q & A | ||
@@ -55,10 +177,60 @@ | ||
| ### Examples | ||
| #### Basic Gzip Only | ||
| ```js | ||
| compression({ | ||
| algorithms: ['gzip'] | ||
| }) | ||
| ``` | ||
| #### Multiple Algorithms with Custom Options | ||
| ```js | ||
| compression({ | ||
| algorithms: [ | ||
| defineAlgorithm('gzip', { level: 9 }), | ||
| defineAlgorithm('brotliCompress', { | ||
| params: { | ||
| [require('zlib').constants.BROTLI_PARAM_QUALITY]: 11 | ||
| } | ||
| }) | ||
| ] | ||
| }) | ||
| ``` | ||
| #### Custom Filename Pattern | ||
| ```js | ||
| compression({ | ||
| algorithms: ['gzip'], | ||
| filename: '[path][base].[hash].gz' | ||
| }) | ||
| ``` | ||
| #### Delete Original Files | ||
| ```js | ||
| compression({ | ||
| algorithms: ['gzip'], | ||
| deleteOriginalAssets: true | ||
| }) | ||
| ``` | ||
| #### Size Threshold | ||
| ```js | ||
| compression({ | ||
| algorithms: ['gzip'], | ||
| threshold: 1000 // Only compress files larger than 1KB | ||
| }) | ||
| ``` | ||
| ### Others | ||
| - If you want to analysis your bundle assets. Maybe you can try [vite-bundle-analyzer](https://github.com/nonzzz/vite-bundle-analyzer) | ||
| - If you want to analyze your bundle assets, try [vite-bundle-analyzer](https://github.com/nonzzz/vite-bundle-analyzer) | ||
| - `tarball` option `dest` means to generate a tarball somewhere | ||
| - `tarball` is based on the `ustar` format. It should be compatible with all popular tar distributions (gnutar, bsdtar etc) | ||
| - `tarball` is based on the `ustar`. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc) | ||
| ### Sponsors | ||
@@ -65,0 +237,0 @@ |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 2 instances in 1 package
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 1 instance in 1 package
55826
68.42%1159
739.86%0
-100%250
220.51%17
30.77%9
350%1
Infinity%