npm package diff

Package: @cloudflare/workers-types

Versions: 4.20241218.0 - 4.20241224.0

File: package/2022-08-04/index.ts

Index: package/2022-08-04/index.ts
===================================================================
--- package/2022-08-04/index.ts
+++ package/2022-08-04/index.ts
@@ -1444,22 +1444,46 @@
   getAttribute(name: string): string | null;
   hasAttribute(name: string): boolean;
   setAttribute(name: string, value: string): Element;
   removeAttribute(name: string): Element;
-  before(content: string, options?: ContentOptions): Element;
-  after(content: string, options?: ContentOptions): Element;
-  prepend(content: string, options?: ContentOptions): Element;
-  append(content: string, options?: ContentOptions): Element;
-  replace(content: string, options?: ContentOptions): Element;
+  before(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): Element;
+  after(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): Element;
+  prepend(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): Element;
+  append(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): Element;
+  replace(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): Element;
   remove(): Element;
   removeAndKeepContent(): Element;
-  setInnerContent(content: string, options?: ContentOptions): Element;
+  setInnerContent(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): Element;
   onEndTag(handler: (tag: EndTag) => void | Promise<void>): void;
 }
 export interface EndTag {
   name: string;
-  before(content: string, options?: ContentOptions): EndTag;
-  after(content: string, options?: ContentOptions): EndTag;
+  before(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): EndTag;
+  after(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): EndTag;
   remove(): EndTag;
 }
 export interface Comment {
   text: string;
@@ -1472,11 +1496,20 @@
 export interface Text {
   readonly text: string;
   readonly lastInTextNode: boolean;
   readonly removed: boolean;
-  before(content: string, options?: ContentOptions): Text;
-  after(content: string, options?: ContentOptions): Text;
-  replace(content: string, options?: ContentOptions): Text;
+  before(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): Text;
+  after(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): Text;
+  replace(
+    content: string | ReadableStream | Response,
+    options?: ContentOptions,
+  ): Text;
   remove(): Text;
 }
 export interface DocumentEnd {
   append(content: string, options?: ContentOptions): DocumentEnd;
@@ -3483,23 +3516,23 @@
 export declare abstract class BaseAiSentenceSimilarity {
   inputs: AiSentenceSimilarityInput;
   postProcessedOutputs: AiSentenceSimilarityOutput;
 }
-export type AiSpeechRecognitionInput = {
+export type AiAutomaticSpeechRecognitionInput = {
   audio: number[];
 };
-export type AiSpeechRecognitionOutput = {
+export type AiAutomaticSpeechRecognitionOutput = {
   text?: string;
   words?: {
     word: string;
     start: number;
     end: number;
   }[];
   vtt?: string;
 };
-export declare abstract class BaseAiSpeechRecognition {
-  inputs: AiSpeechRecognitionInput;
-  postProcessedOutputs: AiSpeechRecognitionOutput;
+export declare abstract class BaseAiAutomaticSpeechRecognition {
+  inputs: AiAutomaticSpeechRecognitionInput;
+  postProcessedOutputs: AiAutomaticSpeechRecognitionOutput;
 }
 export type AiSummarizationInput = {
   input_text: string;
   max_length?: number;
@@ -3533,18 +3566,38 @@
   inputs: AiTextEmbeddingsInput;
   postProcessedOutputs: AiTextEmbeddingsOutput;
 }
 export type RoleScopedChatInput = {
-  role: "user" | "assistant" | "system" | "tool";
+  role:
+    | "user"
+    | "assistant"
+    | "system"
+    | "tool"
+    | (string & NonNullable<unknown>);
   content: string;
+  name?: string;
 };
+export type AiTextGenerationToolLegacyInput = {
+  name: string;
+  description: string;
+  parameters?: {
+    type: "object" | (string & NonNullable<unknown>);
+    properties: {
+      [key: string]: {
+        type: string;
+        description?: string;
+      };
+    };
+    required: string[];
+  };
+};
 export type AiTextGenerationToolInput = {
-  type: "function";
+  type: "function" | (string & NonNullable<unknown>);
   function: {
     name: string;
     description: string;
     parameters?: {
-      type: "object";
+      type: "object" | (string & NonNullable<unknown>);
       properties: {
         [key: string]: {
           type: string;
           description?: string;
@@ -3553,8 +3606,12 @@
       required: string[];
     };
   };
 };
+export type AiTextGenerationFunctionsInput = {
+  name: string;
+  code: string;
+};
 export type AiTextGenerationInput = {
   prompt?: string;
   raw?: boolean;
   stream?: boolean;
@@ -3566,9 +3623,10 @@
   repetition_penalty?: number;
   frequency_penalty?: number;
   presence_penalty?: number;
   messages?: RoleScopedChatInput[];
-  tools?: AiTextGenerationToolInput[];
+  tools?: AiTextGenerationToolInput[] | AiTextGenerationToolLegacyInput[];
+  functions?: AiTextGenerationFunctionsInput[];
 };
 export type AiTextGenerationOutput =
   | {
       response?: string;
@@ -3581,15 +3639,33 @@
 export declare abstract class BaseAiTextGeneration {
   inputs: AiTextGenerationInput;
   postProcessedOutputs: AiTextGenerationOutput;
 }
+export type AiTextToSpeechInput = {
+  prompt: string;
+  lang?: string;
+};
+export type AiTextToSpeechOutput =
+  | Uint8Array
+  | {
+      audio: string;
+    };
+export declare abstract class BaseAiTextToSpeech {
+  inputs: AiTextToSpeechInput;
+  postProcessedOutputs: AiTextToSpeechOutput;
+}
 export type AiTextToImageInput = {
   prompt: string;
+  negative_prompt?: string;
+  height?: number;
+  width?: number;
   image?: number[];
+  image_b64?: string;
   mask?: number[];
   num_steps?: number;
   strength?: number;
   guidance?: number;
+  seed?: number;
 };
 export type AiTextToImageOutput = ReadableStream<Uint8Array>;
 export declare abstract class BaseAiTextToImage {
   inputs: AiTextToImageInput;
@@ -3611,121 +3687,75 @@
   gateway?: GatewayOptions;
   prefix?: string;
   extraHeaders?: object;
 };
-export type BaseAiTextClassificationModels =
-  "@cf/huggingface/distilbert-sst-2-int8";
-export type BaseAiTextToImageModels =
-  | "@cf/stabilityai/stable-diffusion-xl-base-1.0"
-  | "@cf/runwayml/stable-diffusion-v1-5-inpainting"
-  | "@cf/runwayml/stable-diffusion-v1-5-img2img"
-  | "@cf/lykon/dreamshaper-8-lcm"
-  | "@cf/bytedance/stable-diffusion-xl-lightning";
-export type BaseAiTextEmbeddingsModels =
-  | "@cf/baai/bge-small-en-v1.5"
-  | "@cf/baai/bge-base-en-v1.5"
-  | "@cf/baai/bge-large-en-v1.5";
-export type BaseAiSpeechRecognitionModels =
-  | "@cf/openai/whisper"
-  | "@cf/openai/whisper-tiny-en"
-  | "@cf/openai/whisper-sherpa";
-export type BaseAiImageClassificationModels = "@cf/microsoft/resnet-50";
-export type BaseAiObjectDetectionModels = "@cf/facebook/detr-resnet-50";
-export type BaseAiTextGenerationModels =
-  | "@cf/meta/llama-3.1-8b-instruct"
-  | "@cf/meta/llama-3-8b-instruct"
-  | "@cf/meta/llama-3-8b-instruct-awq"
-  | "@cf/meta/llama-2-7b-chat-int8"
-  | "@cf/mistral/mistral-7b-instruct-v0.1"
-  | "@cf/mistral/mistral-7b-instruct-v0.2-lora"
-  | "@cf/meta/llama-2-7b-chat-fp16"
-  | "@hf/thebloke/llama-2-13b-chat-awq"
-  | "@hf/thebloke/zephyr-7b-beta-awq"
-  | "@hf/thebloke/mistral-7b-instruct-v0.1-awq"
-  | "@hf/thebloke/codellama-7b-instruct-awq"
-  | "@hf/thebloke/openhermes-2.5-mistral-7b-awq"
-  | "@hf/thebloke/neural-chat-7b-v3-1-awq"
-  | "@hf/thebloke/llamaguard-7b-awq"
-  | "@hf/thebloke/deepseek-coder-6.7b-base-awq"
-  | "@hf/thebloke/deepseek-coder-6.7b-instruct-awq"
-  | "@hf/nousresearch/hermes-2-pro-mistral-7b"
-  | "@hf/mistral/mistral-7b-instruct-v0.2"
-  | "@hf/google/gemma-7b-it"
-  | "@hf/nexusflow/starling-lm-7b-beta"
-  | "@cf/deepseek-ai/deepseek-math-7b-instruct"
-  | "@cf/defog/sqlcoder-7b-2"
-  | "@cf/openchat/openchat-3.5-0106"
-  | "@cf/tiiuae/falcon-7b-instruct"
-  | "@cf/thebloke/discolm-german-7b-v1-awq"
-  | "@cf/qwen/qwen1.5-0.5b-chat"
-  | "@cf/qwen/qwen1.5-1.8b-chat"
-  | "@cf/qwen/qwen1.5-7b-chat-awq"
-  | "@cf/qwen/qwen1.5-14b-chat-awq"
-  | "@cf/tinyllama/tinyllama-1.1b-chat-v1.0"
-  | "@cf/microsoft/phi-2"
-  | "@cf/google/gemma-2b-it-lora"
-  | "@cf/google/gemma-7b-it-lora"
-  | "@cf/meta-llama/llama-2-7b-chat-hf-lora"
-  | "@cf/fblgit/una-cybertron-7b-v2-bf16"
-  | "@cf/fblgit/una-cybertron-7b-v2-awq";
-export type BaseAiTranslationModels = "@cf/meta/m2m100-1.2b";
-export type BaseAiSummarizationModels = "@cf/facebook/bart-large-cnn";
-export type BaseAiImageToTextModels =
-  | "@cf/unum/uform-gen2-qwen-500m"
-  | "@cf/llava-hf/llava-1.5-7b-hf";
-export declare abstract class Ai {
-  public aiGatewayLogId: string | null;
-  public gateway(gatewayId: string): AiGateway;
-  run(
-    model: BaseAiTextClassificationModels,
-    inputs: BaseAiTextClassification["inputs"],
+export type ModelType<Name extends keyof AiModels> = AiModels[Name];
+export interface AiModels {
+  "@cf/huggingface/distilbert-sst-2-int8": BaseAiTextClassification;
+  "@cf/stabilityai/stable-diffusion-xl-base-1.0": BaseAiTextToImage;
+  "@cf/runwayml/stable-diffusion-v1-5-inpainting": BaseAiTextToImage;
+  "@cf/runwayml/stable-diffusion-v1-5-img2img": BaseAiTextToImage;
+  "@cf/lykon/dreamshaper-8-lcm": BaseAiTextToImage;
+  "@cf/bytedance/stable-diffusion-xl-lightning": BaseAiTextToImage;
+  "@cf/baai/bge-base-en-v1.5": BaseAiTextEmbeddings;
+  "@cf/baai/bge-small-en-v1.5": BaseAiTextEmbeddings;
+  "@cf/baai/bge-large-en-v1.5": BaseAiTextEmbeddings;
+  "@cf/microsoft/resnet-50": BaseAiImageClassification;
+  "@cf/facebook/detr-resnet-50": BaseAiObjectDetection;
+  "@cf/meta/llama-2-7b-chat-int8": BaseAiTextGeneration;
+  "@cf/mistral/mistral-7b-instruct-v0.1": BaseAiTextGeneration;
+  "@cf/meta/llama-2-7b-chat-fp16": BaseAiTextGeneration;
+  "@hf/thebloke/llama-2-13b-chat-awq": BaseAiTextGeneration;
+  "@hf/thebloke/mistral-7b-instruct-v0.1-awq": BaseAiTextGeneration;
+  "@hf/thebloke/zephyr-7b-beta-awq": BaseAiTextGeneration;
+  "@hf/thebloke/openhermes-2.5-mistral-7b-awq": BaseAiTextGeneration;
+  "@hf/thebloke/neural-chat-7b-v3-1-awq": BaseAiTextGeneration;
+  "@hf/thebloke/llamaguard-7b-awq": BaseAiTextGeneration;
+  "@hf/thebloke/deepseek-coder-6.7b-base-awq": BaseAiTextGeneration;
+  "@hf/thebloke/deepseek-coder-6.7b-instruct-awq": BaseAiTextGeneration;
+  "@cf/deepseek-ai/deepseek-math-7b-instruct": BaseAiTextGeneration;
+  "@cf/defog/sqlcoder-7b-2": BaseAiTextGeneration;
+  "@cf/openchat/openchat-3.5-0106": BaseAiTextGeneration;
+  "@cf/tiiuae/falcon-7b-instruct": BaseAiTextGeneration;
+  "@cf/thebloke/discolm-german-7b-v1-awq": BaseAiTextGeneration;
+  "@cf/qwen/qwen1.5-0.5b-chat": BaseAiTextGeneration;
+  "@cf/qwen/qwen1.5-7b-chat-awq": BaseAiTextGeneration;
+  "@cf/qwen/qwen1.5-14b-chat-awq": BaseAiTextGeneration;
+  "@cf/tinyllama/tinyllama-1.1b-chat-v1.0": BaseAiTextGeneration;
+  "@cf/microsoft/phi-2": BaseAiTextGeneration;
+  "@cf/qwen/qwen1.5-1.8b-chat": BaseAiTextGeneration;
+  "@cf/mistral/mistral-7b-instruct-v0.2-lora": BaseAiTextGeneration;
+  "@hf/nousresearch/hermes-2-pro-mistral-7b": BaseAiTextGeneration;
+  "@hf/nexusflow/starling-lm-7b-beta": BaseAiTextGeneration;
+  "@hf/google/gemma-7b-it": BaseAiTextGeneration;
+  "@cf/meta-llama/llama-2-7b-chat-hf-lora": BaseAiTextGeneration;
+  "@cf/google/gemma-2b-it-lora": BaseAiTextGeneration;
+  "@cf/google/gemma-7b-it-lora": BaseAiTextGeneration;
+  "@hf/mistral/mistral-7b-instruct-v0.2": BaseAiTextGeneration;
+  "@cf/meta/llama-3-8b-instruct": BaseAiTextGeneration;
+  "@cf/fblgit/una-cybertron-7b-v2-bf16": BaseAiTextGeneration;
+  "@cf/meta/llama-3-8b-instruct-awq": BaseAiTextGeneration;
+  "@hf/meta-llama/meta-llama-3-8b-instruct": BaseAiTextGeneration;
+  "@cf/meta/llama-3.1-8b-instruct": BaseAiTextGeneration;
+  "@cf/meta/llama-3.1-8b-instruct-fp8": BaseAiTextGeneration;
+  "@cf/meta/llama-3.1-8b-instruct-awq": BaseAiTextGeneration;
+  "@cf/meta/llama-3.2-3b-instruct": BaseAiTextGeneration;
+  "@cf/meta/llama-3.2-1b-instruct": BaseAiTextGeneration;
+  "@cf/meta/llama-3.3-70b-instruct-fp8-fast": BaseAiTextGeneration;
+  "@cf/meta/m2m100-1.2b": BaseAiTranslation;
+  "@cf/facebook/bart-large-cnn": BaseAiSummarization;
+  "@cf/unum/uform-gen2-qwen-500m": BaseAiImageToText;
+  "@cf/llava-hf/llava-1.5-7b-hf": BaseAiImageToText;
+}
+export type ModelListType = Record<string, any>;
+export declare abstract class Ai<ModelList extends ModelListType = AiModels> {
+  aiGatewayLogId: string | null;
+  gateway(gatewayId: string): AiGateway;
+  run<Name extends keyof ModelList>(
+    model: Name,
+    inputs: ModelList[Name]["inputs"],
     options?: AiOptions,
-  ): Promise<BaseAiTextClassification["postProcessedOutputs"]>;
-  run(
-    model: BaseAiTextToImageModels,
-    inputs: BaseAiTextToImage["inputs"],
-    options?: AiOptions,
-  ): Promise<BaseAiTextToImage["postProcessedOutputs"]>;
-  run(
-    model: BaseAiTextEmbeddingsModels,
-    inputs: BaseAiTextEmbeddings["inputs"],
-    options?: AiOptions,
-  ): Promise<BaseAiTextEmbeddings["postProcessedOutputs"]>;
-  run(
-    model: BaseAiSpeechRecognitionModels,
-    inputs: BaseAiSpeechRecognition["inputs"],
-    options?: AiOptions,
-  ): Promise<BaseAiSpeechRecognition["postProcessedOutputs"]>;
-  run(
-    model: BaseAiImageClassificationModels,
-    inputs: BaseAiImageClassification["inputs"],
-    options?: AiOptions,
-  ): Promise<BaseAiImageClassification["postProcessedOutputs"]>;
-  run(
-    model: BaseAiObjectDetectionModels,
-    inputs: BaseAiObjectDetection["inputs"],
-    options?: AiOptions,
-  ): Promise<BaseAiObjectDetection["postProcessedOutputs"]>;
-  run(
-    model: BaseAiTextGenerationModels,
-    inputs: BaseAiTextGeneration["inputs"],
-    options?: AiOptions,
-  ): Promise<BaseAiTextGeneration["postProcessedOutputs"]>;
-  run(
-    model: BaseAiTranslationModels,
-    inputs: BaseAiTranslation["inputs"],
-    options?: AiOptions,
-  ): Promise<BaseAiTranslation["postProcessedOutputs"]>;
-  run(
-    model: BaseAiSummarizationModels,
-    inputs: BaseAiSummarization["inputs"],
-    options?: AiOptions,
-  ): Promise<BaseAiSummarization["postProcessedOutputs"]>;
-  run(
-    model: BaseAiImageToTextModels,
-    inputs: BaseAiImageToText["inputs"],
-    options?: AiOptions,
-  ): Promise<BaseAiImageToText["postProcessedOutputs"]>;
+  ): Promise<ModelList[Name]["postProcessedOutputs"]>;
 }
 export type GatewayOptions = {
   id: string;
   cacheKey?: string;