0
Fork 0
mirror of https://github.com/withastro/astro.git synced 2024-12-16 21:46:22 -05:00

feat: remove the squoosh image service (#11770)

* feat: remove the squoosh image service

* fix: build

* chore: changeset
This commit is contained in:
Erika 2024-08-20 15:12:48 +02:00 committed by GitHub
parent 787fed8504
commit cfa6a47ac7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
60 changed files with 30 additions and 12922 deletions

View file

@ -0,0 +1,22 @@
---
'@astrojs/vercel': major
'@astrojs/node': major
'astro': major
---
Removed support for the Squoosh image service. As the underlying library `libsquoosh` is no longer maintained, and the image service sees very little usage we have decided to remove it from Astro.
Our recommendation is to use the base Sharp image service, which is more powerful, faster, and more actively maintained.
```diff
- import { squooshImageService } from "astro/config";
import { defineConfig } from "astro/config";
export default defineConfig({
- image: {
- service: squooshImageService()
- }
});
```
If you are using this service, and cannot migrate to the base Sharp image service, a third-party extraction of the previous service is available here: https://github.com/Princesseuh/astro-image-service-squoosh

View file

@ -25,15 +25,6 @@ export function getViteConfig(
*/
export function sharpImageService(config?: SharpImageServiceConfig): ImageServiceConfig;
/**
* @deprecated The Squoosh image service is deprecated and will be removed in Astro 5.x.
* We suggest migrating to the default Sharp image service instead, as it is faster, more powerful and better maintained.
*
* Return the configuration needed to use the Squoosh-based image service
* See: https://docs.astro.build/en/guides/images/#configure-squoosh
*/
export function squooshImageService(): ImageServiceConfig;
/**
* Return the configuration needed to use the passthrough image service. This image services does not perform
* any image transformations, and is mainly useful when your platform does not support other image services, or you are

View file

@ -8,13 +8,6 @@ export function sharpImageService(config = {}) {
};
}
export function squooshImageService() {
return {
entrypoint: 'astro/assets/services/squoosh',
config: {},
};
}
export function passthroughImageService() {
return {
entrypoint: 'astro/assets/services/noop',

View file

@ -66,7 +66,6 @@
"./assets/utils/inferRemoteSize.js": "./dist/assets/utils/remoteProbe.js",
"./assets/endpoint/*": "./dist/assets/endpoint/*.js",
"./assets/services/sharp": "./dist/assets/services/sharp.js",
"./assets/services/squoosh": "./dist/assets/services/squoosh.js",
"./assets/services/noop": "./dist/assets/services/noop.js",
"./loaders": "./dist/content/loaders/index.js",
"./content/runtime": "./dist/content/runtime.js",

View file

@ -1,6 +1,6 @@
import { type LocalImageService, baseService } from './service.js';
// Empty service used for platforms that neither support Squoosh or Sharp.
// Empty service used for platforms that don't support Sharp / users who don't want transformations.
const noopService: LocalImageService = {
...baseService,
propertiesToHash: ['src'],

View file

@ -1,119 +0,0 @@
import { yellow } from 'kleur/colors';
import type { ImageOutputFormat, ImageQualityPreset } from '../types.js';
import { imageMetadata } from '../utils/metadata.js';
import {
type BaseServiceTransform,
type LocalImageService,
baseService,
parseQuality,
} from './service.js';
import { processBuffer } from './vendor/squoosh/image-pool.js';
import type { Operation } from './vendor/squoosh/image.js';
// eslint-disable-next-line no-console
console.warn(
yellow(
'The Squoosh image service is deprecated and will be removed in Astro 5.x. We suggest migrating to the default Sharp image service instead, as it is faster, more powerful and better maintained.',
),
);
const baseQuality = { low: 25, mid: 50, high: 80, max: 100 };
const qualityTable: Record<
Exclude<ImageOutputFormat, 'png' | 'svg'>,
Record<ImageQualityPreset, number>
> = {
avif: {
// Squoosh's AVIF encoder has a bit of a weird behavior where `62` is technically the maximum, and anything over is overkill
max: 62,
high: 45,
mid: 35,
low: 20,
},
jpeg: baseQuality,
jpg: baseQuality,
webp: baseQuality,
// Squoosh's PNG encoder does not support a quality setting, so we can skip that here
};
async function getRotationForEXIF(
inputBuffer: Uint8Array,
src?: string,
): Promise<Operation | undefined> {
const meta = await imageMetadata(inputBuffer, src);
if (!meta) return undefined;
// EXIF orientations are a bit hard to read, but the numbers are actually standard. See https://exiftool.org/TagNames/EXIF.html for a list.
// Various illustrations can also be found online for a more graphic representation, it's a bit old school.
switch (meta.orientation) {
case 3:
case 4:
return { type: 'rotate', numRotations: 2 };
case 5:
case 6:
return { type: 'rotate', numRotations: 1 };
case 7:
case 8:
return { type: 'rotate', numRotations: 3 };
default:
return undefined;
}
}
const service: LocalImageService = {
validateOptions: baseService.validateOptions,
getURL: baseService.getURL,
parseURL: baseService.parseURL,
getHTMLAttributes: baseService.getHTMLAttributes,
getSrcSet: baseService.getSrcSet,
async transform(inputBuffer, transformOptions) {
const transform: BaseServiceTransform = transformOptions as BaseServiceTransform;
let format = transform.format;
// Return SVGs as-is
if (format === 'svg') return { data: inputBuffer, format: 'svg' };
const operations: Operation[] = [];
const rotation = await getRotationForEXIF(inputBuffer, transform.src);
if (rotation) {
operations.push(rotation);
}
// Never resize using both width and height at the same time, prioritizing width.
if (transform.height && !transform.width) {
operations.push({
type: 'resize',
height: Math.round(transform.height),
});
} else if (transform.width) {
operations.push({
type: 'resize',
width: Math.round(transform.width),
});
}
let quality: number | string | undefined = undefined;
if (transform.quality) {
const parsedQuality = parseQuality(transform.quality);
if (typeof parsedQuality === 'number') {
quality = parsedQuality;
} else {
quality =
transform.quality in qualityTable[format]
? qualityTable[format][transform.quality]
: undefined;
}
}
const data = await processBuffer(inputBuffer, operations, format, quality);
return {
data: Buffer.from(data),
format: format,
};
},
};
export default service;

View file

@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,32 +0,0 @@
// eslint-disable-next-line no-shadow
export const enum AVIFTune {
auto,
psnr,
ssim,
}
export interface EncodeOptions {
cqLevel: number
denoiseLevel: number
cqAlphaLevel: number
tileRowsLog2: number
tileColsLog2: number
speed: number
subsample: number
chromaDeltaQ: boolean
sharpness: number
tune: AVIFTune
}
export interface AVIFModule extends EmscriptenWasm.Module {
encode(
data: BufferSource,
width: number,
height: number,
options: EncodeOptions
): Uint8Array
}
declare var moduleFactory: EmscriptenWasm.ModuleFactory<AVIFModule>
export default moduleFactory

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

View file

@ -1,369 +0,0 @@
import { instantiateEmscriptenWasm } from './emscripten-utils.js'
interface DecodeModule extends EmscriptenWasm.Module {
decode: (data: Uint8Array) => ImageData
}
type DecodeModuleFactory = EmscriptenWasm.ModuleFactory<DecodeModule>
interface RotateModuleInstance {
exports: {
memory: WebAssembly.Memory
rotate(width: number, height: number, rotate: number): void
}
}
interface ResizeWithAspectParams {
input_width: number
input_height: number
target_width?: number
target_height?: number
}
export interface ResizeOptions {
width?: number
height?: number
method: 'triangle' | 'catrom' | 'mitchell' | 'lanczos3'
premultiply: boolean
linearRGB: boolean
}
export interface RotateOptions {
numRotations: number
}
// MozJPEG
import type { MozJPEGModule as MozJPEGEncodeModule } from './mozjpeg/mozjpeg_enc.js'
import mozDec from './mozjpeg/mozjpeg_node_dec.js'
import mozDecWasm from './mozjpeg/mozjpeg_node_dec.wasm.js'
import mozEnc from './mozjpeg/mozjpeg_node_enc.js'
import mozEncWasm from './mozjpeg/mozjpeg_node_enc.wasm.js'
// WebP
import type { WebPModule as WebPEncodeModule } from './webp/webp_enc.js'
import webpDec from './webp/webp_node_dec.js'
import webpDecWasm from './webp/webp_node_dec.wasm.js'
import webpEnc from './webp/webp_node_enc.js'
import webpEncWasm from './webp/webp_node_enc.wasm.js'
// AVIF
import type { AVIFModule as AVIFEncodeModule } from './avif/avif_enc.js'
import avifDec from './avif/avif_node_dec.js'
import avifDecWasm from './avif/avif_node_dec.wasm.js'
import avifEnc from './avif/avif_node_enc.js'
import avifEncWasm from './avif/avif_node_enc.wasm.js'
// PNG
import * as pngEncDec from './png/squoosh_png.js'
import pngEncDecWasm from './png/squoosh_png_bg.wasm.js'
const pngEncDecInit = () =>
pngEncDec.default(pngEncDecWasm)
// OxiPNG
import * as oxipng from './png/squoosh_oxipng.js'
import oxipngWasm from './png/squoosh_oxipng_bg.wasm.js'
const oxipngInit = () => oxipng.default(oxipngWasm)
// Resize
import * as resize from './resize/squoosh_resize.js'
import resizeWasm from './resize/squoosh_resize_bg.wasm.js'
const resizeInit = () => resize.default(resizeWasm)
// rotate
import rotateWasm from './rotate/rotate.wasm.js'
// Our decoders currently rely on a `ImageData` global.
import ImageData from './image_data.js'
(global as any).ImageData = ImageData
function resizeNameToIndex(
name: 'triangle' | 'catrom' | 'mitchell' | 'lanczos3'
) {
switch (name) {
case 'triangle':
return 0
case 'catrom':
return 1
case 'mitchell':
return 2
case 'lanczos3':
return 3
default:
throw Error(`Unknown resize algorithm "${name}"`)
}
}
function resizeWithAspect({
input_width,
input_height,
target_width,
target_height,
}: ResizeWithAspectParams): { width: number; height: number } {
if (!target_width && !target_height) {
throw Error('Need to specify at least width or height when resizing')
}
if (target_width && target_height) {
return { width: target_width, height: target_height }
}
if (!target_width) {
return {
width: Math.round((input_width / input_height) * target_height!),
height: target_height!,
}
}
return {
width: target_width,
height: Math.round((input_height / input_width) * target_width),
}
}
export const preprocessors = {
resize: {
name: 'Resize',
description: 'Resize the image before compressing',
instantiate: async () => {
await resizeInit()
return (
buffer: Uint8Array,
input_width: number,
input_height: number,
{ width, height, method, premultiply, linearRGB }: ResizeOptions
) => {
;({ width, height } = resizeWithAspect({
input_width,
input_height,
target_width: width,
target_height: height,
}))
const imageData = new ImageData(
resize.resize(
buffer,
input_width,
input_height,
width,
height,
resizeNameToIndex(method),
premultiply,
linearRGB
),
width,
height
)
resize.cleanup()
return imageData
}
},
defaultOptions: {
method: 'lanczos3',
fitMethod: 'stretch',
premultiply: true,
linearRGB: true,
},
},
rotate: {
name: 'Rotate',
description: 'Rotate image',
instantiate: async () => {
return async (
buffer: Uint8Array,
width: number,
height: number,
{ numRotations }: RotateOptions
) => {
const degrees = (numRotations * 90) % 360
const sameDimensions = degrees === 0 || degrees === 180
const size = width * height * 4
const instance = (
await WebAssembly.instantiate(rotateWasm)
).instance as RotateModuleInstance
const { memory } = instance.exports
const additionalPagesNeeded = Math.ceil(
(size * 2 - memory.buffer.byteLength + 8) / (64 * 1024)
)
if (additionalPagesNeeded > 0) {
memory.grow(additionalPagesNeeded)
}
const view = new Uint8ClampedArray(memory.buffer)
view.set(buffer, 8)
instance.exports.rotate(width, height, degrees)
return new ImageData(
view.slice(size + 8, size * 2 + 8),
sameDimensions ? width : height,
sameDimensions ? height : width
)
}
},
defaultOptions: {
numRotations: 0,
},
},
} as const
export const codecs = {
mozjpeg: {
name: 'MozJPEG',
extension: 'jpg',
detectors: [/^\xFF\xD8\xFF/],
dec: () =>
instantiateEmscriptenWasm(mozDec as DecodeModuleFactory, mozDecWasm),
enc: () =>
instantiateEmscriptenWasm(
mozEnc as EmscriptenWasm.ModuleFactory<MozJPEGEncodeModule>,
mozEncWasm
),
defaultEncoderOptions: {
quality: 75,
baseline: false,
arithmetic: false,
progressive: true,
optimize_coding: true,
smoothing: 0,
color_space: 3 /*YCbCr*/,
quant_table: 3,
trellis_multipass: false,
trellis_opt_zero: false,
trellis_opt_table: false,
trellis_loops: 1,
auto_subsample: true,
chroma_subsample: 2,
separate_chroma_quality: false,
chroma_quality: 75,
},
autoOptimize: {
option: 'quality',
min: 0,
max: 100,
},
},
webp: {
name: 'WebP',
extension: 'webp',
detectors: [/^RIFF....WEBPVP8[LX ]/s],
dec: () =>
instantiateEmscriptenWasm(webpDec as DecodeModuleFactory, webpDecWasm),
enc: () =>
instantiateEmscriptenWasm(
webpEnc as EmscriptenWasm.ModuleFactory<WebPEncodeModule>,
webpEncWasm
),
defaultEncoderOptions: {
quality: 75,
target_size: 0,
target_PSNR: 0,
method: 4,
sns_strength: 50,
filter_strength: 60,
filter_sharpness: 0,
filter_type: 1,
partitions: 0,
segments: 4,
pass: 1,
show_compressed: 0,
preprocessing: 0,
autofilter: 0,
partition_limit: 0,
alpha_compression: 1,
alpha_filtering: 1,
alpha_quality: 100,
lossless: 0,
exact: 0,
image_hint: 0,
emulate_jpeg_size: 0,
thread_level: 0,
low_memory: 0,
near_lossless: 100,
use_delta_palette: 0,
use_sharp_yuv: 0,
},
autoOptimize: {
option: 'quality',
min: 0,
max: 100,
},
},
avif: {
name: 'AVIF',
extension: 'avif',
// Disable eslint rule to not touch the original code
// eslint-disable-next-line no-control-regex, regexp/control-character-escape
detectors: [/^\x00\x00\x00 ftypavif\x00\x00\x00\x00/],
dec: () =>
instantiateEmscriptenWasm(avifDec as DecodeModuleFactory, avifDecWasm),
enc: async () => {
return instantiateEmscriptenWasm(
avifEnc as EmscriptenWasm.ModuleFactory<AVIFEncodeModule>,
avifEncWasm
)
},
defaultEncoderOptions: {
cqLevel: 33,
cqAlphaLevel: -1,
denoiseLevel: 0,
tileColsLog2: 0,
tileRowsLog2: 0,
speed: 6,
subsample: 1,
chromaDeltaQ: false,
sharpness: 0,
tune: 0 /* AVIFTune.auto */,
},
autoOptimize: {
option: 'cqLevel',
min: 62,
max: 0,
},
},
oxipng: {
name: 'OxiPNG',
extension: 'png',
// Disable eslint rule to not touch the original code
// eslint-disable-next-line no-control-regex, regexp/control-character-escape
detectors: [/^\x89PNG\x0D\x0A\x1A\x0A/],
dec: async () => {
await pngEncDecInit()
return {
decode: (buffer: Buffer | Uint8Array) => {
const imageData = pngEncDec.decode(buffer)
pngEncDec.cleanup()
return imageData
},
}
},
enc: async () => {
await pngEncDecInit()
await oxipngInit()
return {
encode: (
buffer: Uint8ClampedArray | ArrayBuffer,
width: number,
height: number,
opts: { level: number }
) => {
const simplePng = pngEncDec.encode(
new Uint8Array(buffer),
width,
height
)
const imageData = oxipng.optimise(simplePng, opts.level, false)
oxipng.cleanup()
return imageData
},
}
},
defaultEncoderOptions: {
level: 2,
},
autoOptimize: {
option: 'level',
min: 6,
max: 1,
},
},
} as const

View file

@ -1,121 +0,0 @@
// These types roughly model the object that the JS files generated by Emscripten define. Copied from https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/types/emscripten/index.d.ts and turned into a type definition rather than a global to support our way of using Emscripten.
declare namespace EmscriptenWasm {
type ModuleFactory<T extends Module = Module> = (
moduleOverrides?: ModuleOpts
) => Promise<T>
type EnvironmentType = 'WEB' | 'NODE' | 'SHELL' | 'WORKER'
// Options object for modularized Emscripten files. Shoe-horned by @surma.
// FIXME: This an incomplete definition!
interface ModuleOpts {
mainScriptUrlOrBlob?: string
noInitialRun?: boolean
locateFile?: (url: string) => string
onRuntimeInitialized?: () => void
}
interface Module {
print(str: string): void
printErr(str: string): void
arguments: string[]
environment: EnvironmentType
preInit: { (): void }[]
preRun: { (): void }[]
postRun: { (): void }[]
preinitializedWebGLContext: WebGLRenderingContext
noInitialRun: boolean
noExitRuntime: boolean
logReadFiles: boolean
filePackagePrefixURL: string
wasmBinary: ArrayBuffer
destroy(object: object): void
getPreloadedPackage(
remotePackageName: string,
remotePackageSize: number
): ArrayBuffer
instantiateWasm(
imports: WebAssembly.Imports,
successCallback: (module: WebAssembly.Module) => void
): WebAssembly.Exports
locateFile(url: string): string
onCustomMessage(event: MessageEvent): void
Runtime: any
ccall(
ident: string,
returnType: string | null,
argTypes: string[],
args: any[]
): any
cwrap(ident: string, returnType: string | null, argTypes: string[]): any
setValue(ptr: number, value: any, type: string, noSafe?: boolean): void
getValue(ptr: number, type: string, noSafe?: boolean): number
ALLOC_NORMAL: number
ALLOC_STACK: number
ALLOC_STATIC: number
ALLOC_DYNAMIC: number
ALLOC_NONE: number
allocate(slab: any, types: string, allocator: number, ptr: number): number
allocate(slab: any, types: string[], allocator: number, ptr: number): number
Pointer_stringify(ptr: number, length?: number): string
UTF16ToString(ptr: number): string
stringToUTF16(str: string, outPtr: number): void
UTF32ToString(ptr: number): string
stringToUTF32(str: string, outPtr: number): void
// USE_TYPED_ARRAYS == 1
HEAP: Int32Array
IHEAP: Int32Array
FHEAP: Float64Array
// USE_TYPED_ARRAYS == 2
HEAP8: Int8Array
HEAP16: Int16Array
HEAP32: Int32Array
HEAPU8: Uint8Array
HEAPU16: Uint16Array
HEAPU32: Uint32Array
HEAPF32: Float32Array
HEAPF64: Float64Array
TOTAL_STACK: number
TOTAL_MEMORY: number
FAST_MEMORY: number
addOnPreRun(cb: () => any): void
addOnInit(cb: () => any): void
addOnPreMain(cb: () => any): void
addOnExit(cb: () => any): void
addOnPostRun(cb: () => any): void
// Tools
intArrayFromString(
stringy: string,
dontAddNull?: boolean,
length?: number
): number[]
intArrayToString(array: number[]): string
writeStringToMemory(str: string, buffer: number, dontAddNull: boolean): void
writeArrayToMemory(array: number[], buffer: number): void
writeAsciiToMemory(str: string, buffer: number, dontAddNull: boolean): void
addRunDependency(id: any): void
removeRunDependency(id: any): void
preloadedImages: any
preloadedAudios: any
_malloc(size: number): number
_free(ptr: number): void
// Augmentations below by @surma.
onRuntimeInitialized: () => void | null
}
}

View file

@ -1,39 +0,0 @@
//
import { fileURLToPath, pathToFileURL } from 'node:url'
export function pathify(path: string): string {
if (path.startsWith('file://')) {
path = fileURLToPath(path)
}
return path
}
export function instantiateEmscriptenWasm<T extends EmscriptenWasm.Module>(
factory: EmscriptenWasm.ModuleFactory<T>,
bytes: Uint8Array,
): Promise<T> {
return factory({
// @ts-expect-error This is a valid Emscripten option, but the type definitions don't know about it
wasmBinary: bytes,
locateFile(file: string) {
return file
}
})
}
export function dirname(url: string) {
return url.substring(0, url.lastIndexOf('/'))
}
/**
* On certain serverless hosts, our ESM bundle is transpiled to CJS before being run, which means
* import.meta.url is undefined, so we'll fall back to __filename in those cases
* We should be able to remove this once https://github.com/netlify/zip-it-and-ship-it/issues/750 is fixed
*/
export function getModuleURL(url: string | undefined): string {
if (!url) {
return pathToFileURL(__filename).toString();
}
return url
}

View file

@ -1,150 +0,0 @@
import { cpus } from 'node:os';
import { fileURLToPath } from 'node:url';
import { isMainThread } from 'node:worker_threads';
import type { ImageOutputFormat } from '../../../types.js';
import { getModuleURL } from './emscripten-utils.js';
import type { Operation } from './image.js';
import * as impl from './impl.js';
import execOnce from './utils/execOnce.js';
import WorkerPool from './utils/workerPool.js';
const getWorker = execOnce(() => {
return new WorkerPool(
// There will be at most 7 workers needed since each worker will take
// at least 1 operation type.
Math.max(1, Math.min(cpus().length - 1, 7)),
fileURLToPath(getModuleURL(import.meta.url))
);
});
type DecodeParams = {
operation: 'decode';
buffer: Uint8Array;
};
type ResizeParams = {
operation: 'resize';
imageData: ImageData;
height?: number;
width?: number;
};
type RotateParams = {
operation: 'rotate';
imageData: ImageData;
numRotations: number;
};
type EncodeAvifParams = {
operation: 'encodeavif';
imageData: ImageData;
quality: number;
};
type EncodeJpegParams = {
operation: 'encodejpeg';
imageData: ImageData;
quality: number;
};
type EncodePngParams = {
operation: 'encodepng';
imageData: ImageData;
};
type EncodeWebpParams = {
operation: 'encodewebp';
imageData: ImageData;
quality: number;
};
type JobMessage =
| DecodeParams
| ResizeParams
| RotateParams
| EncodeAvifParams
| EncodeJpegParams
| EncodePngParams
| EncodeWebpParams;
function handleJob(params: JobMessage) {
switch (params.operation) {
case 'decode':
return impl.decodeBuffer(params.buffer);
case 'resize':
return impl.resize({
image: params.imageData as any,
width: params.width,
height: params.height,
});
case 'rotate':
return impl.rotate(params.imageData as any, params.numRotations);
case 'encodeavif':
return impl.encodeAvif(params.imageData as any, { quality: params.quality });
case 'encodejpeg':
return impl.encodeJpeg(params.imageData as any, { quality: params.quality });
case 'encodepng':
return impl.encodePng(params.imageData as any);
case 'encodewebp':
return impl.encodeWebp(params.imageData as any, { quality: params.quality });
default:
throw Error(`Invalid job "${(params as any).operation}"`);
}
}
export async function processBuffer(
buffer: Uint8Array,
operations: Operation[],
encoding: ImageOutputFormat,
quality?: number
): Promise<Uint8Array> {
// @ts-ignore
const worker = await getWorker();
let imageData = await worker.dispatchJob({
operation: 'decode',
buffer,
});
for (const operation of operations) {
if (operation.type === 'rotate') {
imageData = await worker.dispatchJob({
operation: 'rotate',
imageData,
numRotations: operation.numRotations,
});
} else if (operation.type === 'resize') {
imageData = await worker.dispatchJob({
operation: 'resize',
imageData,
height: operation.height,
width: operation.width,
});
}
}
switch (encoding) {
case 'avif':
return (await worker.dispatchJob({
operation: 'encodeavif',
imageData,
quality,
})) as Uint8Array;
case 'jpeg':
case 'jpg':
return (await worker.dispatchJob({
operation: 'encodejpeg',
imageData,
quality,
})) as Uint8Array;
case 'png':
return (await worker.dispatchJob({
operation: 'encodepng',
imageData,
})) as Uint8Array;
case 'webp':
return (await worker.dispatchJob({
operation: 'encodewebp',
imageData,
quality,
})) as Uint8Array;
default:
throw Error(`Unsupported encoding format`);
}
}
if (!isMainThread) {
WorkerPool.useThisThreadAsWorker(handleJob);
}

View file

@ -1,43 +0,0 @@
import type { ImageOutputFormat } from '../../../types.js';
import * as impl from './impl.js';
type RotateOperation = {
type: 'rotate'
numRotations: number
}
type ResizeOperation = {
type: 'resize'
width?: number
height?: number
}
export type Operation = RotateOperation | ResizeOperation
export async function processBuffer(
buffer: Buffer,
operations: Operation[],
encoding: ImageOutputFormat,
quality?: number
): Promise<Uint8Array> {
let imageData = await impl.decodeBuffer(buffer)
for (const operation of operations) {
if (operation.type === 'rotate') {
imageData = await impl.rotate(imageData, operation.numRotations);
} else if (operation.type === 'resize') {
imageData = await impl.resize({ image: imageData, width: operation.width, height: operation.height })
}
}
switch (encoding) {
case 'avif':
return await impl.encodeAvif(imageData, { quality });
case 'jpeg':
case 'jpg':
return await impl.encodeJpeg(imageData, { quality });
case 'png':
return await impl.encodePng(imageData);
case 'webp':
return await impl.encodeWebp(imageData, { quality });
default:
throw Error(`Unsupported encoding format`)
}
}

View file

@ -1,33 +0,0 @@
export default class ImageData {
static from(input: ImageData): ImageData {
return new ImageData(input.data || input._data, input.width, input.height)
}
private _data: Buffer | Uint8Array | Uint8ClampedArray
width: number
height: number
get data(): Buffer {
if (Object.prototype.toString.call(this._data) === '[object Object]') {
return Buffer.from(Object.values(this._data))
}
if (
this._data instanceof Buffer ||
this._data instanceof Uint8Array ||
this._data instanceof Uint8ClampedArray
) {
return Buffer.from(this._data)
}
throw new Error('invariant')
}
constructor(
data: Buffer | Uint8Array | Uint8ClampedArray,
width: number,
height: number
) {
this._data = data
this.width = width
this.height = height
}
}

View file

@ -1,143 +0,0 @@
import { preprocessors, codecs as supportedFormats } from './codecs.js'
import ImageData from './image_data.js'
type EncoderKey = keyof typeof supportedFormats
const DELAY_MS = 1000
let _promise: Promise<void> | undefined
function delayOnce(ms: number): Promise<void> {
if (!_promise) {
_promise = new Promise((resolve) => {
setTimeout(resolve, ms)
})
}
return _promise
}
function maybeDelay(): Promise<void> {
const isAppleM1 = process.arch === 'arm64' && process.platform === 'darwin'
if (isAppleM1) {
return delayOnce(DELAY_MS)
}
return Promise.resolve()
}
export async function decodeBuffer(
_buffer: Buffer | Uint8Array
): Promise<ImageData> {
const buffer = Buffer.from(_buffer)
const firstChunk = buffer.slice(0, 16)
const firstChunkString = Array.from(firstChunk)
.map((v) => String.fromCodePoint(v))
.join('')
// TODO (future PR): support more formats
if (firstChunkString.includes('GIF')) {
throw Error(`GIF images are not supported, please use the Sharp image service`)
}
const key = Object.entries(supportedFormats).find(([, { detectors }]) =>
detectors.some((detector) => detector.exec(firstChunkString))
)?.[0] as EncoderKey | undefined
if (!key) {
throw Error(`Buffer has an unsupported format`)
}
const encoder = supportedFormats[key]
const mod = await encoder.dec()
const rgba = mod.decode(new Uint8Array(buffer))
// @ts-ignore
return rgba
}
export async function rotate(
image: ImageData,
numRotations: number
): Promise<ImageData> {
image = ImageData.from(image)
const m = await preprocessors['rotate'].instantiate()
return await m(image.data, image.width, image.height, { numRotations })
}
type ResizeOpts = { image: ImageData } & { width?: number; height?: number }
export async function resize({ image, width, height }: ResizeOpts) {
image = ImageData.from(image)
const p = preprocessors['resize']
const m = await p.instantiate()
await maybeDelay()
return await m(image.data, image.width, image.height, {
...p.defaultOptions,
width,
height,
})
}
export async function encodeJpeg(
image: ImageData,
opts: { quality?: number }
): Promise<Uint8Array> {
image = ImageData.from(image)
const e = supportedFormats['mozjpeg']
const m = await e.enc()
await maybeDelay()
const quality = opts.quality || e.defaultEncoderOptions.quality
const r = await m.encode(image.data, image.width, image.height, {
...e.defaultEncoderOptions,
quality,
})
return r
}
export async function encodeWebp(
image: ImageData,
opts: { quality?: number }
): Promise<Uint8Array> {
image = ImageData.from(image)
const e = supportedFormats['webp']
const m = await e.enc()
await maybeDelay()
const quality = opts.quality || e.defaultEncoderOptions.quality
const r = await m.encode(image.data, image.width, image.height, {
...e.defaultEncoderOptions,
quality,
})
return r
}
export async function encodeAvif(
image: ImageData,
opts: { quality?: number }
): Promise<Uint8Array> {
image = ImageData.from(image)
const e = supportedFormats['avif']
const m = await e.enc()
await maybeDelay()
const val = e.autoOptimize.min
// AVIF doesn't use a 0-100 quality, default to 75 and convert to cqLevel below
const quality = opts.quality || 75
const r = await m.encode(image.data, image.width, image.height, {
...e.defaultEncoderOptions,
// Think of cqLevel as the "amount" of quantization (0 to 62),
// so a lower value yields higher quality (0 to 100).
cqLevel: quality === 0 ? val : Math.round(val - (quality / 100) * val),
})
return r
}
export async function encodePng(
image: ImageData
): Promise<Uint8Array> {
image = ImageData.from(image)
const e = supportedFormats['oxipng']
const m = await e.enc()
await maybeDelay()
const r = await m.encode(image.data, image.width, image.height, {
...e.defaultEncoderOptions,
})
return r
}

View file

@ -1,38 +0,0 @@
// eslint-disable-next-line no-shadow
export const enum MozJpegColorSpace {
GRAYSCALE = 1,
RGB,
YCbCr,
}
export interface EncodeOptions {
quality: number
baseline: boolean
arithmetic: boolean
progressive: boolean
optimize_coding: boolean
smoothing: number
color_space: MozJpegColorSpace
quant_table: number
trellis_multipass: boolean
trellis_opt_zero: boolean
trellis_opt_table: boolean
trellis_loops: number
auto_subsample: boolean
chroma_subsample: number
separate_chroma_quality: boolean
chroma_quality: number
}
export interface MozJPEGModule extends EmscriptenWasm.Module {
encode(
data: BufferSource,
width: number,
height: number,
options: EncodeOptions
): Uint8Array
}
declare var moduleFactory: EmscriptenWasm.ModuleFactory<MozJPEGModule>
export default moduleFactory

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1,120 +0,0 @@
// @ts-nocheck
let wasm
let cachedTextDecoder = new TextDecoder('utf-8', {
ignoreBOM: true,
fatal: true,
})
cachedTextDecoder.decode()
let cachegetUint8Memory0 = null
function getUint8Memory0() {
if (
cachegetUint8Memory0 === null ||
cachegetUint8Memory0.buffer !== wasm.memory.buffer
) {
cachegetUint8Memory0 = new Uint8Array(wasm.memory.buffer)
}
return cachegetUint8Memory0
}
function getStringFromWasm0(ptr, len) {
return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len))
}
let WASM_VECTOR_LEN = 0
function passArray8ToWasm0(arg, malloc) {
const ptr = malloc(arg.length * 1)
getUint8Memory0().set(arg, ptr / 1)
WASM_VECTOR_LEN = arg.length
return ptr
}
let cachegetInt32Memory0 = null
function getInt32Memory0() {
if (
cachegetInt32Memory0 === null ||
cachegetInt32Memory0.buffer !== wasm.memory.buffer
) {
cachegetInt32Memory0 = new Int32Array(wasm.memory.buffer)
}
return cachegetInt32Memory0
}
function getArrayU8FromWasm0(ptr, len) {
return getUint8Memory0().subarray(ptr / 1, ptr / 1 + len)
}
/**
* @param {Uint8Array} data
* @param {number} level
* @param {boolean} interlace
* @returns {Uint8Array}
*/
export function optimise(data, level, interlace) {
try {
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16)
const ptr0 = passArray8ToWasm0(data, wasm.__wbindgen_malloc)
const len0 = WASM_VECTOR_LEN
wasm.optimise(retptr, ptr0, len0, level, interlace)
const r0 = getInt32Memory0()[retptr / 4 + 0]
const r1 = getInt32Memory0()[retptr / 4 + 1]
const v1 = getArrayU8FromWasm0(r0, r1).slice()
wasm.__wbindgen_free(r0, r1 * 1)
return v1
} finally {
wasm.__wbindgen_add_to_stack_pointer(16)
}
}
async function load(module, imports) {
if (typeof Response === 'function' && module instanceof Response) {
if (typeof WebAssembly.instantiateStreaming === 'function') {
return await WebAssembly.instantiateStreaming(module, imports)
}
const bytes = await module.arrayBuffer()
return await WebAssembly.instantiate(bytes, imports)
} else {
const instance = await WebAssembly.instantiate(module, imports)
if (instance instanceof WebAssembly.Instance) {
return { instance, module }
} else {
return instance
}
}
}
async function init(input) {
const imports = {}
imports.wbg = {}
imports.wbg.__wbindgen_throw = function (arg0, arg1) {
throw new Error(getStringFromWasm0(arg0, arg1))
}
if (
typeof input === 'string' ||
(typeof Request === 'function' && input instanceof Request) ||
(typeof URL === 'function' && input instanceof URL)
) {
input = fetch(input)
}
const { instance, module } = await load(await input, imports)
wasm = instance.exports
init.__wbindgen_wasm_module = module
return wasm
}
export default init
// Manually remove the wasm and memory references to trigger GC
export function cleanup() {
wasm = null
cachegetUint8Memory0 = null
cachegetInt32Memory0 = null
}

File diff suppressed because one or more lines are too long

View file

@ -1,184 +0,0 @@
// @ts-nocheck
let wasm
let cachedTextDecoder = new TextDecoder('utf-8', {
ignoreBOM: true,
fatal: true,
})
cachedTextDecoder.decode()
let cachegetUint8Memory0 = null
function getUint8Memory0() {
if (
cachegetUint8Memory0 === null ||
cachegetUint8Memory0.buffer !== wasm.memory.buffer
) {
cachegetUint8Memory0 = new Uint8Array(wasm.memory.buffer)
}
return cachegetUint8Memory0
}
function getStringFromWasm0(ptr, len) {
return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len))
}
let cachegetUint8ClampedMemory0 = null
function getUint8ClampedMemory0() {
if (
cachegetUint8ClampedMemory0 === null ||
cachegetUint8ClampedMemory0.buffer !== wasm.memory.buffer
) {
cachegetUint8ClampedMemory0 = new Uint8ClampedArray(wasm.memory.buffer)
}
return cachegetUint8ClampedMemory0
}
function getClampedArrayU8FromWasm0(ptr, len) {
return getUint8ClampedMemory0().subarray(ptr / 1, ptr / 1 + len)
}
const heap = new Array(32).fill(undefined)
heap.push(undefined, null, true, false)
let heap_next = heap.length
function addHeapObject(obj) {
if (heap_next === heap.length) heap.push(heap.length + 1)
const idx = heap_next
heap_next = heap[idx]
heap[idx] = obj
return idx
}
let WASM_VECTOR_LEN = 0
function passArray8ToWasm0(arg, malloc) {
const ptr = malloc(arg.length * 1)
getUint8Memory0().set(arg, ptr / 1)
WASM_VECTOR_LEN = arg.length
return ptr
}
let cachegetInt32Memory0 = null
function getInt32Memory0() {
if (
cachegetInt32Memory0 === null ||
cachegetInt32Memory0.buffer !== wasm.memory.buffer
) {
cachegetInt32Memory0 = new Int32Array(wasm.memory.buffer)
}
return cachegetInt32Memory0
}
function getArrayU8FromWasm0(ptr, len) {
return getUint8Memory0().subarray(ptr / 1, ptr / 1 + len)
}
/**
* @param {Uint8Array} data
* @param {number} width
* @param {number} height
* @returns {Uint8Array}
*/
export function encode(data, width, height) {
try {
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16)
const ptr0 = passArray8ToWasm0(data, wasm.__wbindgen_malloc)
const len0 = WASM_VECTOR_LEN
wasm.encode(retptr, ptr0, len0, width, height)
const r0 = getInt32Memory0()[retptr / 4 + 0]
const r1 = getInt32Memory0()[retptr / 4 + 1]
const v1 = getArrayU8FromWasm0(r0, r1).slice()
wasm.__wbindgen_free(r0, r1 * 1)
return v1
} finally {
wasm.__wbindgen_add_to_stack_pointer(16)
}
}
function getObject(idx) {
return heap[idx]
}
function dropObject(idx) {
if (idx < 36) return
heap[idx] = heap_next
heap_next = idx
}
function takeObject(idx) {
const ret = getObject(idx)
dropObject(idx)
return ret
}
/**
* @param {Uint8Array} data
* @returns {ImageData}
*/
export function decode(data) {
const ptr0 = passArray8ToWasm0(data, wasm.__wbindgen_malloc)
const len0 = WASM_VECTOR_LEN
const ret = wasm.decode(ptr0, len0)
return takeObject(ret)
}
async function load(module, imports) {
if (typeof Response === 'function' && module instanceof Response) {
if (typeof WebAssembly.instantiateStreaming === 'function') {
return await WebAssembly.instantiateStreaming(module, imports)
}
const bytes = await module.arrayBuffer()
return await WebAssembly.instantiate(bytes, imports)
} else {
const instance = await WebAssembly.instantiate(module, imports)
if (instance instanceof WebAssembly.Instance) {
return { instance, module }
} else {
return instance
}
}
}
async function init(input) {
const imports = {}
imports.wbg = {}
imports.wbg.__wbg_newwithownedu8clampedarrayandsh_787b2db8ea6bfd62 =
function (arg0, arg1, arg2, arg3) {
const v0 = getClampedArrayU8FromWasm0(arg0, arg1).slice()
wasm.__wbindgen_free(arg0, arg1 * 1)
const ret = new ImageData(v0, arg2 >>> 0, arg3 >>> 0)
return addHeapObject(ret)
}
imports.wbg.__wbindgen_throw = function (arg0, arg1) {
throw new Error(getStringFromWasm0(arg0, arg1))
}
if (
typeof input === 'string' ||
(typeof Request === 'function' && input instanceof Request) ||
(typeof URL === 'function' && input instanceof URL)
) {
input = fetch(input)
}
const { instance, module } = await load(await input, imports)
wasm = instance.exports
init.__wbindgen_wasm_module = module
return wasm
}
export default init
// Manually remove the wasm and memory references to trigger GC
export function cleanup() {
wasm = null
cachegetUint8ClampedMemory0 = null
cachegetUint8Memory0 = null
cachegetInt32Memory0 = null
}

File diff suppressed because one or more lines are too long

View file

@ -1,141 +0,0 @@
// @ts-nocheck
let wasm
let cachegetUint8Memory0 = null
function getUint8Memory0() {
if (
cachegetUint8Memory0 === null ||
cachegetUint8Memory0.buffer !== wasm.memory.buffer
) {
cachegetUint8Memory0 = new Uint8Array(wasm.memory.buffer)
}
return cachegetUint8Memory0
}
let WASM_VECTOR_LEN = 0
function passArray8ToWasm0(arg, malloc) {
const ptr = malloc(arg.length * 1)
getUint8Memory0().set(arg, ptr / 1)
WASM_VECTOR_LEN = arg.length
return ptr
}
let cachegetInt32Memory0 = null
function getInt32Memory0() {
if (
cachegetInt32Memory0 === null ||
cachegetInt32Memory0.buffer !== wasm.memory.buffer
) {
cachegetInt32Memory0 = new Int32Array(wasm.memory.buffer)
}
return cachegetInt32Memory0
}
let cachegetUint8ClampedMemory0 = null
function getUint8ClampedMemory0() {
if (
cachegetUint8ClampedMemory0 === null ||
cachegetUint8ClampedMemory0.buffer !== wasm.memory.buffer
) {
cachegetUint8ClampedMemory0 = new Uint8ClampedArray(wasm.memory.buffer)
}
return cachegetUint8ClampedMemory0
}
function getClampedArrayU8FromWasm0(ptr, len) {
return getUint8ClampedMemory0().subarray(ptr / 1, ptr / 1 + len)
}
/**
* @param {Uint8Array} input_image
* @param {number} input_width
* @param {number} input_height
* @param {number} output_width
* @param {number} output_height
* @param {number} typ_idx
* @param {boolean} premultiply
* @param {boolean} color_space_conversion
* @returns {Uint8ClampedArray}
*/
export function resize(
input_image,
input_width,
input_height,
output_width,
output_height,
typ_idx,
premultiply,
color_space_conversion
) {
try {
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16)
const ptr0 = passArray8ToWasm0(input_image, wasm.__wbindgen_malloc)
const len0 = WASM_VECTOR_LEN
wasm.resize(
retptr,
ptr0,
len0,
input_width,
input_height,
output_width,
output_height,
typ_idx,
premultiply,
color_space_conversion
)
const r0 = getInt32Memory0()[retptr / 4 + 0]
const r1 = getInt32Memory0()[retptr / 4 + 1]
const v1 = getClampedArrayU8FromWasm0(r0, r1).slice()
wasm.__wbindgen_free(r0, r1 * 1)
return v1
} finally {
wasm.__wbindgen_add_to_stack_pointer(16)
}
}
async function load(module, imports) {
if (typeof Response === 'function' && module instanceof Response) {
if (typeof WebAssembly.instantiateStreaming === 'function') {
return await WebAssembly.instantiateStreaming(module, imports)
}
const bytes = await module.arrayBuffer()
return await WebAssembly.instantiate(bytes, imports)
} else {
const instance = await WebAssembly.instantiate(module, imports)
if (instance instanceof WebAssembly.Instance) {
return { instance, module }
} else {
return instance
}
}
}
async function init(input) {
const imports = {}
if (
typeof input === 'string' ||
(typeof Request === 'function' && input instanceof Request) ||
(typeof URL === 'function' && input instanceof URL)
) {
input = fetch(input)
}
const { instance, module } = await load(await input, imports)
wasm = instance.exports
init.__wbindgen_wasm_module = module
return wasm
}
export default init
// Manually remove the wasm and memory references to trigger GC
export function cleanup() {
wasm = null
cachegetUint8Memory0 = null
cachegetInt32Memory0 = null
}

File diff suppressed because one or more lines are too long

View file

@ -1 +0,0 @@
export default Buffer.from("AGFzbQEAAAABDAJgAn9/AGADf39/AAMGBQAAAAABBQMBABAGEQJ/AEGAgMAAC38AQYCAwAALBy4EBm1lbW9yeQIABnJvdGF0ZQAECl9fZGF0YV9lbmQDAAtfX2hlYXBfYmFzZQMBCpsJBUkBAX8gACABbCIAQf////8DcSICBEBBCCEBIABBAnRBCGohAANAIAAgASgCADYCACABQQRqIQEgAEEEaiEAIAJBf2oiAg0ACwsLzQMBFH8gAUECdCERIAAgAWwiDEECdEEEaiESA0ACQAJAAkACQCAEQQFxRQRAIAMgAU8NAiADQQFqIQgMAQsgA0EPaiICIANJIggNASACIAFJIgVFDQEgASADQRBqIAgbIAEgBRshCCACIQMLIAEgA0EQaiICIAIgAUsbIQ0gA0F/cyETIBIgA0ECdGshFEEAIQVBACEOA0ACQAJAIA5FBEAgBSAASQ0BQQEhBAwGCyAAIAVBEGogBUEPaiICIAVJIgcbIAAgAiAASRshBUEBIQQgByACIABPcg0FDAELIAUiAkEBaiEFC0EBIQ4gAyANTw0AIAAgAkEQaiIPIAAgD0kbQQJ0IAJBAnRrIRUgEyABIAJsaiEHIBQgASACQQFqbEECdGohCSADIQoDQCAAIApsIgYgAmoiBEEQaiAAIAZqIA8gAEkbIgYgBEkgDCAGSXINAyAEIAZHBEAgBEECdEEIaiELIBUhBiAHIRAgCSEEA0AgDCABIBBqIhBNDQUgBCALKAIANgIAIAQgEWohBCALQQRqIQsgBkF8aiIGDQALCyAHQX9qIQcgCUF8aiEJIA0gCkEBaiIKRw0ACwwACwALDwsACyAIIQMMAAsAC1MBAX8CQCAAIAFsQQJ0IgJBCGoiAEEIRg0AIAAgAmpBfGohAEEAIQEDQCABIAJGDQEgACABQQhqKAIANgIAIABBfGohACACIAFBBGoiAUcNAAsLC9oDARN/IABBf2ohEEEAIAFBAnRrIREgACABbCIMQQJ0QQhqIRIDQAJAAkACQAJAIARBAXFFBEAgAyABTw0CIANBAWohCQwBCyADQQ9qIgIgA0kiCQ0BIAIgAUkiBUUNASABIANBEGogCRsgASAFGyEJIAIhAwsgASADQRBqIgIgAiABSxshDSASIANBAnRqIRNBACEFQQAhBgNAAkACQCAGQQFxRQRAIAUgAEkNAUEBIQQMBgsgACAFQRBqIAVBD2oiAiAFSSIIGyAAIAIgAEkbIQVBASEEIAggAiAAT3INBQwBCyAFIgJBAWohBQtBASEGIAMgDU8NACAAIAJBEGoiDiAAIA5JG0ECdCACQQJ0ayEUIAMgASAAIAJrbGohCCATIAEgECACa2xBAnRqIQogAyELA0AgACALbCIHIAJqIgRBEGogACAHaiAOIABJGyIHIARJIAwgB0lyDQMgBCAHRwRAIARBAnRBCGohBiAUIQcgCCEPIAohBANAIAwgDyABayIPTQ0FIAQgBigCADYCACAEIBFqIQQgBkEEaiEGIAdBfGoiBw0ACwtBASEGIAhBAWohCCAKQQRqIQogDSALQQFqIgtHDQALDAALAAsPCwALIAkhAwwACwALUAACQAJAAkACQCACQbMBTARAIAJFDQIgAkHaAEcNASAAIAEQAQ8LIAJBtAFGDQIgAkGOAkYNAwsACyAAIAEQAA8LIAAgARACDwsgACABEAMLAE0JcHJvZHVjZXJzAghsYW5ndWFnZQEEUnVzdAAMcHJvY2Vzc2VkLWJ5AQVydXN0Yx0xLjQ3LjAgKDE4YmY2YjRmMCAyMDIwLTEwLTA3KQ==", 'base64');

View file

@ -1,12 +0,0 @@
export default function execOnce<T extends (...args: any[]) => ReturnType<T>>(fn: T): T {
let used = false;
let result: ReturnType<T>;
return ((...args: any[]) => {
if (!used) {
used = true;
result = fn(...args);
}
return result;
}) as T;
}

View file

@ -1,121 +0,0 @@
import { parentPort, Worker } from 'worker_threads';
function uuid() {
return Array.from({ length: 16 }, () => Math.floor(Math.random() * 256).toString(16)).join('');
}
interface Job<I> {
msg: I;
resolve: (result: any) => void;
reject: (reason: any) => void;
}
export default class WorkerPool<I, O> {
public numWorkers: number;
public jobQueue: TransformStream<Job<I>, Job<I>>;
public workerQueue: TransformStream<Worker, Worker>;
public done: Promise<void>;
constructor(numWorkers: number, workerFile: string) {
this.numWorkers = numWorkers;
this.jobQueue = new TransformStream();
this.workerQueue = new TransformStream();
const writer = this.workerQueue.writable.getWriter();
for (let i = 0; i < numWorkers; i++) {
writer.write(new Worker(workerFile));
}
writer.releaseLock();
this.done = this._readLoop();
}
async _readLoop() {
const reader = this.jobQueue.readable.getReader();
while (true) {
const { value, done } = await reader.read();
if (done) {
await this._terminateAll();
return;
}
if (!value) {
throw new Error('Reader did not return any value');
}
const { msg, resolve, reject } = value;
const worker = await this._nextWorker();
this.jobPromise(worker, msg)
.then((result) => resolve(result))
.catch((reason) => reject(reason))
.finally(() => {
// Return the worker to the pool
const writer = this.workerQueue.writable.getWriter();
writer.write(worker);
writer.releaseLock();
});
}
}
async _nextWorker() {
const reader = this.workerQueue.readable.getReader();
const { value } = await reader.read();
reader.releaseLock();
if (!value) {
throw new Error('No worker left');
}
return value;
}
async _terminateAll() {
for (let n = 0; n < this.numWorkers; n++) {
const worker = await this._nextWorker();
worker.terminate();
}
this.workerQueue.writable.close();
}
async join() {
this.jobQueue.writable.getWriter().close();
await this.done;
}
dispatchJob(msg: I): Promise<O> {
return new Promise((resolve, reject) => {
const writer = this.jobQueue.writable.getWriter();
writer.write({ msg, resolve, reject });
writer.releaseLock();
});
}
private jobPromise(worker: Worker, msg: I) {
return new Promise((resolve, reject) => {
const id = uuid();
worker.postMessage({ msg, id });
worker.on('message', function f({ error, result, id: rid }) {
if (rid !== id) {
return;
}
if (error) {
reject(error);
return;
}
worker.off('message', f);
resolve(result);
});
});
}
static useThisThreadAsWorker<I, O>(cb: (msg: I) => O) {
parentPort!.on('message', async (data) => {
const { msg, id } = data;
try {
const result = await cb(msg);
parentPort!.postMessage({ result, id });
} catch (e: any) {
parentPort!.postMessage({ error: e.message, id });
}
});
}
}

View file

@ -1,42 +0,0 @@
export interface EncodeOptions {
quality: number
target_size: number
target_PSNR: number
method: number
sns_strength: number
filter_strength: number
filter_sharpness: number
filter_type: number
partitions: number
segments: number
pass: number
show_compressed: number
preprocessing: number
autofilter: number
partition_limit: number
alpha_compression: number
alpha_filtering: number
alpha_quality: number
lossless: number
exact: number
image_hint: number
emulate_jpeg_size: number
thread_level: number
low_memory: number
near_lossless: number
use_delta_palette: number
use_sharp_yuv: number
}
export interface WebPModule extends EmscriptenWasm.Module {
encode(
data: BufferSource,
width: number,
height: number,
options: EncodeOptions
): Uint8Array
}
declare var moduleFactory: EmscriptenWasm.ModuleFactory<WebPModule>
export default moduleFactory

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

View file

@ -245,11 +245,7 @@ export const AstroConfigSchema = z.object({
service: z
.object({
entrypoint: z
.union([
z.literal('astro/assets/services/sharp'),
z.literal('astro/assets/services/squoosh'),
z.string(),
])
.union([z.literal('astro/assets/services/sharp'), z.string()])
.default(ASTRO_CONFIG_DEFAULTS.image.service.entrypoint),
config: z.record(z.any()).default({}),
})

View file

@ -14,7 +14,6 @@ const EXPERIMENTAL = 'experimental';
const UNSUPPORTED_ASSETS_FEATURE: AstroAssetsFeature = {
supportKind: UNSUPPORTED,
isSquooshCompatible: false,
isSharpCompatible: false,
};
@ -139,7 +138,6 @@ function featureIsDeprecated(adapterName: string, logger: Logger, featureName: s
}
const SHARP_SERVICE = 'astro/assets/services/sharp';
const SQUOOSH_SERVICE = 'astro/assets/services/squoosh';
function validateAssetsFeature(
assets: AstroAssetsFeature,
@ -147,11 +145,7 @@ function validateAssetsFeature(
config: AstroConfig,
logger: Logger,
): boolean {
const {
supportKind = UNSUPPORTED,
isSharpCompatible = false,
isSquooshCompatible = false,
} = assets;
const { supportKind = UNSUPPORTED, isSharpCompatible = false } = assets;
if (config?.image?.service?.entrypoint === SHARP_SERVICE && !isSharpCompatible) {
logger.warn(
null,
@ -160,13 +154,5 @@ function validateAssetsFeature(
return false;
}
if (config?.image?.service?.entrypoint === SQUOOSH_SERVICE && !isSquooshCompatible) {
logger.warn(
null,
`The currently selected adapter \`${adapterName}\` is not compatible with the image service "Squoosh".`,
);
return false;
}
return validateSupportKind(supportKind, adapterName, logger, 'assets', () => true);
}

View file

@ -16,7 +16,7 @@ import type { AstroIntegration, RoutePriorityOverride } from './integrations.js'
export type Locales = (string | { codes: string[]; path: string })[];
export interface ImageServiceConfig<T extends Record<string, any> = Record<string, any>> {
entrypoint: 'astro/assets/services/sharp' | 'astro/assets/services/squoosh' | (string & {});
entrypoint: 'astro/assets/services/sharp' | (string & {});
config?: T;
}
@ -927,7 +927,7 @@ export interface AstroUserConfig {
/**
* @docs
* @name image.service
* @type {{entrypoint: 'astro/assets/services/sharp' | 'astro/assets/services/squoosh' | string, config: Record<string, any>}}
* @type {{entrypoint: 'astro/assets/services/sharp' | string, config: Record<string, any>}}
* @default `{entrypoint: 'astro/assets/services/sharp', config?: {}}`
* @version 2.1.0
* @description

View file

@ -118,10 +118,6 @@ export interface AstroAssetsFeature {
* Whether if this adapter deploys files in an environment that is compatible with the library `sharp`
*/
isSharpCompatible?: boolean;
/**
* Whether if this adapter deploys files in an environment that is compatible with the library `squoosh`
*/
isSquooshCompatible?: boolean;
}
export interface AstroInternationalizationFeature {

View file

@ -270,27 +270,6 @@ describe('Astro feature map', function () {
);
assert.equal(result['assets'], true);
});
it('should be supported when it is squoosh compatible', () => {
let result = validateSupportedFeatures(
'test',
{
assets: {
supportKind: 'stable',
isSquooshCompatible: true,
},
},
{
image: {
service: {
entrypoint: 'astro/assets/services/squoosh',
},
},
},
{},
defaultLogger,
);
assert.equal(result['assets'], true);
});
it("should not be valid if the config is correct, but the it's unsupported", () => {
let result = validateSupportedFeatures(

View file

@ -16,7 +16,6 @@ export function getAdapter(options: Options): AstroAdapter {
assets: {
supportKind: 'stable',
isSharpCompatible: true,
isSquooshCompatible: true,
},
i18nDomains: 'experimental',
envGetSecret: 'experimental',

View file

@ -26,7 +26,6 @@
"./speed-insights": "./dist/speed-insights.js",
"./build-image-service": "./dist/image/build-service.js",
"./dev-image-service": "./dist/image/dev-service.js",
"./squoosh-dev-service": "./dist/image/squoosh-dev-service.js",
"./package.json": "./package.json"
},
"typesVersions": {

View file

@ -13,7 +13,7 @@ export function isESMImportedImage(src: ImageMetadata | string): src is ImageMet
return typeof src === 'object';
}
export type DevImageService = 'sharp' | 'squoosh' | (string & {});
export type DevImageService = 'sharp' | (string & {});
// https://vercel.com/docs/build-output-api/v3/configuration#images
type ImageFormat = 'image/avif' | 'image/webp';
@ -76,9 +76,6 @@ export function getAstroImageConfig(
case 'sharp':
devService = '@astrojs/vercel/dev-image-service';
break;
case 'squoosh':
devService = '@astrojs/vercel/squoosh-dev-image-service';
break;
default:
if (typeof devImageService === 'string') {
devService = devImageService;

View file

@ -1,31 +0,0 @@
import type { LocalImageService } from 'astro';
import squooshService from 'astro/assets/services/squoosh';
import { baseDevService } from './shared-dev-service.js';
const service: LocalImageService = {
...baseDevService,
getHTMLAttributes(options, serviceOptions) {
const { inputtedWidth, ...props } = options;
// If `validateOptions` returned a different width than the one of the image, use it for attributes
if (inputtedWidth) {
props.width = inputtedWidth;
}
return squooshService.getHTMLAttributes
? squooshService.getHTMLAttributes(props, serviceOptions)
: {};
},
transform(inputBuffer, transform, serviceOptions) {
// NOTE: Hardcoding webp here isn't accurate to how the Vercel Image Optimization API works, normally what we should
// do is setup a custom endpoint that sniff the user's accept-content header and serve the proper format based on the
// user's Vercel config. However, that's: a lot of work for: not much. The dev service is inaccurate to the prod service
// in many more ways, this is one of the less offending cases and is, imo, okay, erika - 2023-04-27
transform.format = transform.src.endsWith('svg') ? 'svg' : 'webp';
// The base squoosh service works the same way as the Vercel Image Optimization API, so it's a safe fallback in local
return squooshService.transform(inputBuffer, transform, serviceOptions);
},
};
export default service;

View file

@ -92,7 +92,6 @@ function getAdapter({
assets: {
supportKind: 'stable',
isSharpCompatible: true,
isSquooshCompatible: true,
},
i18nDomains: 'experimental',
envGetSecret: 'experimental',

View file

@ -26,7 +26,6 @@ function getAdapter(): AstroAdapter {
supportedAstroFeatures: {
assets: {
supportKind: 'stable',
isSquooshCompatible: true,
isSharpCompatible: true,
},
staticOutput: 'stable',

View file

@ -65,16 +65,6 @@ export default async function build(...args) {
await clean(outdir);
}
const copyPlugin = copyWASM
? copy({
resolveFrom: 'cwd',
assets: {
from: ['./src/assets/services/vendor/squoosh/**/*.wasm'],
to: ['./dist/assets/services/vendor/squoosh'],
},
})
: null;
if (!isDev) {
await esbuild.build({
...config,
@ -83,8 +73,7 @@ export default async function build(...args) {
entryPoints,
outdir,
outExtension: forceCJS ? { '.js': '.cjs' } : {},
format,
plugins: [copyPlugin].filter(Boolean),
format
});
return;
}
@ -117,7 +106,7 @@ export default async function build(...args) {
outdir,
format,
sourcemap: 'linked',
plugins: [rebuildPlugin, copyPlugin].filter(Boolean),
plugins: [rebuildPlugin],
});
await builder.watch();