在 运行时 -> 兼容性标志 -> nodejs_compat_populate_process_env


worker.js

/**
 * Combined and Type-Safe Worker Script
 *
 * This script is a merger of multiple project files,
 * refactored to be type-safe and pass TypeScript static analysis.
 *
 * Fixes include:
 * - Resolved type conflicts (e.g., ReadableStream vs. String).
 * - Refactored TransformStream implementation to use closures for state.
 * - Corrected object shapes for API calls and internal functions.
 * - Added guards against potential runtime errors (e.g., undefined checks).
 * - Restored robust Buffer-based Base64 encoding for images.
 *
 * You can directly paste this code into the Cloudflare Worker editor.
 * The `nodejs_compat` flag is required.
 */

// Due to the `nodejs_compat` flag, the environment provides the 'Buffer' global.
// We do not need an explicit import statement in the final bundled worker.

// ++++++++++[ START: Merged and Fixed openai.mjs ]++++++++++
const openai = (() => {
  class HttpError extends Error {
    constructor(message, status) {
      super(message);
      this.name = this.constructor.name;
      this.status = status;
    }
  }

  const fixCors = ({ headers, status, statusText }) => {
    headers = new Headers(headers);
    headers.set("Access-Control-Allow-Origin", "*");
    headers.set("Access-Control-Allow-Methods", "*");
    headers.set("Access-Control-Allow-Headers", "*");
    return { headers, status, statusText };
  };

  const handleOPTIONS = () => {
    return new Response(null, {
      headers: {
        "Access-Control-Allow-Origin": "*",
        "Access-Control-Allow-Methods": "*",
        "Access-Control-Allow-Headers": "*",
      }
    });
  };

  const BASE_URL = "https://generativelanguage.googleapis.com";
  const API_VERSION = "v1beta";
  const API_CLIENT = "genai-js/0.21.0";

  const makeHeaders = (apiKey, more) => ({
    "x-goog-api-client": API_CLIENT,
    ...(apiKey && { "x-goog-api-key": apiKey }),
    ...more
  });

  async function handleModels(apiKey) {
    const response = await fetch(`${BASE_URL}/${API_VERSION}/models`, {
      headers: makeHeaders(apiKey),
    });
    if (response.ok) {
        const { models } = await response.json();
        const responseBody = JSON.stringify({
            object: "list",
            data: models.map(({ name }) => ({
                id: name.replace("models/", ""),
                object: "model",
                created: 0,
                owned_by: "",
            })),
        }, null, "  ");
        return new Response(responseBody, fixCors(response));
    }
    return new Response(response.body, fixCors(response));
  }


  const DEFAULT_EMBEDDINGS_MODEL = "text-embedding-004";
  async function handleEmbeddings(req, apiKey) {
    if (typeof req.model !== "string") {
      throw new HttpError("model is not specified", 400);
    }
    let model;
    if (req.model.startsWith("models/")) {
      model = req.model;
    } else {
      if (!req.model.startsWith("gemini-")) {
        req.model = DEFAULT_EMBEDDINGS_MODEL;
      }
      model = "models/" + req.model;
    }
    if (!Array.isArray(req.input)) {
      req.input = [req.input];
    }
    const response = await fetch(`${BASE_URL}/${API_VERSION}/${model}:batchEmbedContents`, {
      method: "POST",
      headers: makeHeaders(apiKey, { "Content-Type": "application/json" }),
      body: JSON.stringify({
        "requests": req.input.map(text => ({
          model,
          content: { parts: { text } },
          outputDimensionality: req.dimensions,
        }))
      })
    });

    if (response.ok) {
        const { embeddings } = await response.json();
        const responseBody = JSON.stringify({
            object: "list",
            data: embeddings.map(({ values }, index) => ({
                object: "embedding",
                index,
                embedding: values,
            })),
            model: req.model,
        }, null, "  ");
        return new Response(responseBody, fixCors(response));
    }
    return new Response(response.body, fixCors(response));
  }


  const DEFAULT_MODEL = "gemini-2.5-flash";
  async function handleCompletions(req, apiKey) {
    let model = DEFAULT_MODEL;
    switch (true) {
      case typeof req.model !== "string":
        break;
      case req.model.startsWith("models/"):
        model = req.model.substring(7);
        break;
      case req.model.startsWith("gemini-"):
      case req.model.startsWith("gemma-"):
      case req.model.startsWith("learnlm-"):
        model = req.model;
    }
    
    let body = await transformRequest(req);
    const extra = req.extra_body?.google;
    if (extra) {
      if (extra.safety_settings) {
        body.safetySettings = extra.safety_settings;
      }
      if (extra.cached_content) {
        body.cachedContent = extra.cached_content;
      }
      if (extra.thinking_config) {
        body.generationConfig.thinkingConfig = extra.thinking_config;
      }
    }
    switch (true) {
      case model.endsWith(":search"):
        model = model.substring(0, model.length - 7);
      case req.model.endsWith("-search-preview"):
      case req.tools?.some(tool => tool.function?.name === 'googleSearch'):
        body.tools = body.tools || [];
        body.tools.push({ "googleSearch": {} });
        break;
    }

    const TASK = req.stream ? "streamGenerateContent" : "generateContent";
    let url = `${BASE_URL}/${API_VERSION}/models/${model}:${TASK}`;
    if (req.stream) { url += "?alt=sse"; }

    const response = await fetch(url, {
      method: "POST",
      headers: makeHeaders(apiKey, { "Content-Type": "application/json" }),
      body: JSON.stringify(body),
    });

    let responseBody = response.body;
    if (response.ok) {
      let id = "chatcmpl-" + generateId();
      if (req.stream) {
        const streamParser = createStreamParser();
        const openaiTransformer = createOpenAiStreamTransformer(id, model, req.stream_options?.include_usage);
        responseBody = response.body
          .pipeThrough(new TextDecoderStream())
          .pipeThrough(new TransformStream(streamParser))
          .pipeThrough(new TransformStream(openaiTransformer))
          .pipeThrough(new TextEncoderStream());
      } else {
        const responseText = await response.text();
        let jsonData;
        try {
          jsonData = JSON.parse(responseText);
          if (!jsonData.candidates) {
            throw new Error("Invalid completion object");
          }
        } catch (err) {
          console.error("Error parsing response:", err);
          return new Response(responseText, fixCors(response));
        }
        responseBody = processCompletionsResponse(jsonData, model, id);
      }
    }
    return new Response(responseBody, fixCors(response));
  }

  const adjustProps = (schemaPart) => {
    if (typeof schemaPart !== "object" || schemaPart === null) return;
    if (Array.isArray(schemaPart)) {
      schemaPart.forEach(adjustProps);
    } else {
      if (schemaPart.type === "object" && schemaPart.properties && schemaPart.additionalProperties === false) {
        delete schemaPart.additionalProperties;
      }
      Object.values(schemaPart).forEach(adjustProps);
    }
  };

  const adjustSchema = (schema) => {
    const obj = schema[schema.type];
    if (obj) delete obj.strict;
    return adjustProps(schema);
  };

  const harmCategory = [
    "HARM_CATEGORY_HATE_SPEECH", "HARM_CATEGORY_SEXUALLY_EXPLICIT",
    "HARM_CATEGORY_DANGEROUS_CONTENT", "HARM_CATEGORY_HARASSMENT", "HARM_CATEGORY_CIVIC_INTEGRITY"
  ];
  const safetySettings = harmCategory.map(category => ({ category, threshold: "BLOCK_NONE" }));
  const fieldsMap = {
    frequency_penalty: "frequencyPenalty", max_completion_tokens: "maxOutputTokens",
    max_tokens: "maxOutputTokens", n: "candidateCount", presence_penalty: "presencePenalty",
    seed: "seed", stop: "stopSequences", temperature: "temperature", top_k: "topK", top_p: "topP"
  };
  const thinkingBudgetMap = { low: 1024, medium: 8192, high: 24576 };

  const transformConfig = (req) => {
    let cfg = {};
    for (let key in req) {
      const matchedKey = fieldsMap[key];
      if (matchedKey) cfg[matchedKey] = req[key];
    }
    if (req.response_format) {
      switch (req.response_format.type) {
        case "json_schema":
          adjustSchema(req.response_format);
          cfg.responseSchema = req.response_format.json_schema?.schema;
          if (cfg.responseSchema && "enum" in cfg.responseSchema) {
            cfg.responseMimeType = "text/x.enum";
            break;
          }
        case "json_object":
          cfg.responseMimeType = "application/json";
          break;
        case "text":
          cfg.responseMimeType = "text/plain";
          break;
        default:
          throw new HttpError("Unsupported response_format.type", 400);
      }
    }
    if (req.reasoning_effort) {
      cfg.thinkingConfig = { thinkingBudget: thinkingBudgetMap[req.reasoning_effort] };
    }
    return cfg;
  };

  const parseImg = async (url) => {
    let mimeType, data;
    if (url.startsWith("http://") || url.startsWith("https://")) {
        const response = await fetch(url);
        if (!response.ok) throw new Error(`${response.status} ${response.statusText} (${url})`);
        mimeType = response.headers.get("content-type");
        const arrayBuffer = await response.arrayBuffer();
        data = Buffer.from(arrayBuffer).toString('base64');
    } else {
        const match = url.match(/^data:(?<mimeType>.*?)(;base64)?,(?<data>.*)$/);
        if (!match || !match.groups) throw new HttpError("Invalid image data: " + url, 400);
        ({ mimeType, data } = match.groups);
    }
    return { inlineData: { mimeType, data } };
  };

  const transformFnResponse = ({ content, tool_call_id }, parts) => {
    if (!parts.calls) throw new HttpError("No function calls found in the previous message", 400);
    let response;
    try {
      response = JSON.parse(content);
    } catch (err) {
      console.error("Error parsing function response content:", err);
      throw new HttpError("Invalid function response: " + content, 400);
    }
    if (typeof response !== "object" || response === null || Array.isArray(response)) {
      response = { result: response };
    }
    if (!tool_call_id) throw new HttpError("tool_call_id not specified", 400);
    const { i, name } = parts.calls[tool_call_id] ?? {};
    if (!name) throw new HttpError("Unknown tool_call_id: " + tool_call_id, 400);
    if (parts[i]) throw new HttpError("Duplicated tool_call_id: " + tool_call_id, 400);
    parts[i] = { functionResponse: { id: tool_call_id.startsWith("call_") ? null : tool_call_id, name, response } };
  };

  const transformFnCalls = ({ tool_calls }) => {
    const calls = {};
    const parts = tool_calls.map(({ function: { arguments: argstr, name }, id, type }, i) => {
      if (type !== "function") throw new HttpError(`Unsupported tool_call type: "${type}"`, 400);
      let args;
      try {
        args = JSON.parse(argstr);
      } catch (err) {
        console.error("Error parsing function arguments:", err);
        throw new HttpError("Invalid function arguments: " + argstr, 400);
      }
      calls[id] = { i, name };
      return { functionCall: { id: id.startsWith("call_") ? null : id, name, args } };
    });
    parts.calls = calls;
    return parts;
  };

  const transformMsg = async ({ content }) => {
    const parts = [];
    if (content === null || typeof content === 'undefined') {
        return parts;
    }
    if (!Array.isArray(content)) {
      parts.push({ text: content });
      return parts;
    }
    for (const item of content) {
      switch (item.type) {
        case "text":
          parts.push({ text: item.text });
          break;
        case "image_url":
          parts.push(await parseImg(item.image_url.url));
          break;
        case "input_audio":
          parts.push({ inlineData: { mimeType: "audio/" + item.input_audio.format, data: item.input_audio.data } });
          break;
        default:
          throw new HttpError(`Unknown "content" item type: "${item.type}"`, 400);
      }
    }
    if (content.every(item => item.type === "image_url")) {
      parts.push({ text: "" });
    }
    return parts;
  };

  const transformMessages = async (messages) => {
    if (!messages) return {};
    const contents = [];
    let system_instruction;
    for (const item of messages) {
      switch (item.role) {
        case "system":
          system_instruction = { parts: await transformMsg(item) };
          continue;
        case "tool":
          let { role, parts } = contents[contents.length - 1] ?? {};
          if (role !== "function") {
            const calls = parts?.calls;
            parts = []; parts.calls = calls;
            contents.push({ role: "function", parts });
          }
          transformFnResponse(item, parts);
          continue;
        case "assistant":
          item.role = "model";
          break;
        case "user":
          break;
        default:
          throw new HttpError(`Unknown message role: "${item.role}"`, 400);
      }
      contents.push({
        role: item.role,
        parts: item.tool_calls ? transformFnCalls(item) : await transformMsg(item)
      });
    }
    if (system_instruction) {
        const firstUserContent = contents[0];
        if (!firstUserContent || !firstUserContent.parts.some(part => typeof part.text === 'string' && part.text.trim() !== '')) {
            contents.unshift({ role: "user", parts: [{ text: " " }] });
        }
    }
    return { system_instruction, contents };
  };

  const transformTools = (req) => {
    let tools, tool_config;
    if (req.tools) {
      const funcs = req.tools.filter(tool => tool.type === "function" && tool.function?.name !== 'googleSearch');
      if (funcs.length > 0) {
        funcs.forEach(adjustSchema);
        tools = [{ function_declarations: funcs.map(schema => schema.function) }];
      }
    }
    if (req.tool_choice) {
      const allowed_function_names = req.tool_choice?.type === "function" ? [req.tool_choice?.function?.name] : undefined;
      if (allowed_function_names || typeof req.tool_choice === "string") {
        tool_config = { function_calling_config: { mode: allowed_function_names ? "ANY" : req.tool_choice.toUpperCase(), allowed_function_names } };
      }
    }
    return { tools, tool_config };
  };

  const transformRequest = async (req) => {
      const { system_instruction, contents } = await transformMessages(req.messages);
      const { tools, tool_config } = transformTools(req);
      
      const generationConfig = transformConfig(req);
      
      // Using 'any' to allow dynamic properties like cachedContent
      const body = {
          system_instruction,
          contents,
          safetySettings,
          generationConfig,
          tools,
          tool_config,
      };

      return body;
  };


  const generateId = () => {
    const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
    const randomChar = () => characters[Math.floor(Math.random() * characters.length)];
    return Array.from({ length: 29 }, randomChar).join("");
  };

  const reasonsMap = { "STOP": "stop", "MAX_TOKENS": "length", "SAFETY": "content_filter", "RECITATION": "content_filter" };
  const SEP = "\n\n|>";
  const transformCandidates = (key, cand) => {
    const contentParts = [];
    const message = { role: "assistant", tool_calls: undefined, content: null };

    for (const part of cand.content?.parts ?? []) {
      if (part.functionCall) {
        const fc = part.functionCall;
        message.tool_calls = message.tool_calls ?? [];
        message.tool_calls.push({ id: fc.id ?? "call_" + generateId(), type: "function", function: { name: fc.name, arguments: JSON.stringify(fc.args) } });
      } else if (part.text) {
        contentParts.push(part.text);
      }
    }
    if(contentParts.length > 0){
        message.content = contentParts.join(SEP);
    }
    
    return {
      index: cand.index || 0,
      [key]: message,
      logprobs: null,
      finish_reason: message.tool_calls ? "tool_calls" : reasonsMap[cand.finishReason] || cand.finishReason,
    };
  };
  const transformCandidatesMessage = transformCandidates.bind(null, "message");
  const transformCandidatesDelta = transformCandidates.bind(null, "delta");
  const transformUsage = (data) => ({ completion_tokens: data.candidatesTokenCount, prompt_tokens: data.promptTokenCount, total_tokens: data.totalTokenCount });

  const checkPromptBlock = (choices, promptFeedback, key) => {
    if (choices.length) return false;
    if (promptFeedback?.blockReason) {
      console.log("Prompt block reason:", promptFeedback.blockReason);
      if (promptFeedback.blockReason === "SAFETY") promptFeedback.safetyRatings.filter(r => r.blocked).forEach(r => console.log(r));
      choices.push({ index: 0, [key]: null, finish_reason: "content_filter" });
      return true;
    }
    return false;
  };

  const processCompletionsResponse = (data, model, id) => {
    const obj = {
      id,
      choices: data.candidates.map(transformCandidatesMessage),
      created: Math.floor(Date.now() / 1000),
      model: data.modelVersion ?? model,
      object: "chat.completion",
      usage: data.usageMetadata && transformUsage(data.usageMetadata),
    };
    if (obj.choices.length === 0) checkPromptBlock(obj.choices, data.promptFeedback, "message");
    return JSON.stringify(obj);
  };

  function createStreamParser() {
    let buffer = "";
    const responseLineRE = /^data: (.*)(?:\n\n|\r\r|\r\n\r\n)/;
    return {
      transform(chunk, controller) {
        buffer += chunk;
        let match;
        while ((match = buffer.match(responseLineRE))) {
          controller.enqueue(match[1]);
          buffer = buffer.substring(match[0].length);
        }
      },
      flush(controller) {
        if (buffer) {
          console.error("Invalid data in stream buffer:", buffer);
          controller.enqueue(buffer);
        }
      }
    };
  }
  
  function createOpenAiStreamTransformer(id, model, streamIncludeUsage) {
      let last = [];
      const delimiter = "\n\n";
      const sseline = (obj) => {
          obj.created = Math.floor(Date.now() / 1000);
          return "data: " + JSON.stringify(obj) + delimiter;
      };

      return {
          transform(line, controller) {
              let data;
              try {
                  data = JSON.parse(line);
                  if (!data.candidates && !data.promptFeedback) throw new Error("Invalid completion chunk object");
              } catch (err) {
                  console.error("Error parsing response stream line:", err);
                  controller.enqueue(line + delimiter);
                  return;
              }

              const obj = {
                  id: id,
                  choices: data.candidates ? data.candidates.map(transformCandidatesDelta) : [],
                  model: data.modelVersion ?? model,
                  object: "chat.completion.chunk",
                  usage: data.usageMetadata && streamIncludeUsage ? transformUsage(data.usageMetadata) : undefined,
              };

              if (checkPromptBlock(obj.choices, data.promptFeedback, "delta")) {
                  controller.enqueue(sseline(obj));
                  return;
              }
              
              if(!obj.choices || obj.choices.length === 0){
                   return; // Nothing to process
              }

              const cand = obj.choices[0];
              if (cand === undefined) return;

              cand.index = cand.index || 0;
              const finish_reason = cand.finish_reason;
              cand.finish_reason = null;

              if (!last[cand.index]) {
                  controller.enqueue(sseline({ ...obj, choices: [{ ...cand, delta: { role: "assistant", content: "" } }] }));
              }
              
              if (cand.delta) delete cand.delta.role;

              if (cand.delta && "content" in cand.delta) {
                  controller.enqueue(sseline(obj));
              }

              cand.finish_reason = finish_reason;
              if (data.usageMetadata && streamIncludeUsage) {
                  obj.usage = transformUsage(data.usageMetadata);
              }
              if(cand.delta) cand.delta = {};
              last[cand.index] = obj;
          },

          flush(controller) {
              if (last.length > 0) {
                  for (const obj of last) {
                      if (obj) controller.enqueue(sseline(obj));
                  }
                  controller.enqueue("data: [DONE]" + delimiter);
              }
          }
      };
  }

  // The main fetch method for this module
  return {
    async fetch(request) {
      if (request.method === "OPTIONS") return handleOPTIONS();
      const errHandler = (err) => {
        console.error(err);
        const status = err.status ?? 500;
        return new Response(err.message, fixCors({ headers: new Headers(), status, statusText: err.name || `Error ${status}` }));
      };
      try {
        const auth = request.headers.get("Authorization");
        let apiKey = auth?.split(" ")[1];
        if (apiKey && apiKey.includes(',')) {
          const apiKeys = apiKey.split(',').map(k => k.trim()).filter(k => k);
          apiKey = apiKeys[Math.floor(Math.random() * apiKeys.length)];
          console.log(`OpenAI Selected API Key: ${apiKey}`);
        }
        const assert = (success) => {
          if (!success) throw new HttpError("The specified HTTP method is not allowed for the requested resource", 400);
        };
        const { pathname } = new URL(request.url);
        switch (true) {
          case pathname.endsWith("/chat/completions"):
            assert(request.method === "POST");
            return handleCompletions(await request.json(), apiKey).catch(errHandler);
          case pathname.endsWith("/embeddings"):
            assert(request.method === "POST");
            return handleEmbeddings(await request.json(), apiKey).catch(errHandler);
          case pathname.endsWith("/models"):
            assert(request.method === "GET");
            return handleModels(apiKey).catch(errHandler);
          default:
            throw new HttpError("404 Not Found", 404);
        }
      } catch (err) {
        return errHandler(err);
      }
    }
  };
})();
// ++++++++++[ END: Merged and Fixed openai.mjs ]++++++++++


// ++++++++++[ START: Merged from verify_keys.js ]++++++++++
async function verifyKey(key, controller) {
  const url = 'https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent';
  const body = { "contents": [{ "role": "user", "parts": [{ "text": "Hello" }] }] };
  let result;
  try {
    const response = await fetch(url, {
      method: 'POST',
      headers: { 'Content-Type': 'application/json', 'X-goog-api-key': key },
      body: JSON.stringify(body),
    });
    if (response.ok) {
      await response.text(); // Consume body
      result = { key: `${key.slice(0, 7)}......${key.slice(-7)}`, status: 'GOOD' };
    } else {
      const errorData = await response.json().catch(() => ({ error: { message: 'Unknown error' } }));
      const errorMessage = errorData.error?.message || 'Unknown error';
      result = { key: `${key.slice(0, 7)}......${key.slice(-7)}`, status: 'BAD', error: errorMessage };
    }
  } catch (e) {
    result = { key: `${key.slice(0, 7)}......${key.slice(-7)}`, status: 'ERROR', error: e.message };
  }
  controller.enqueue(new TextEncoder().encode('data: ' + JSON.stringify(result) + '\n\n'));
}

async function handleVerification(request) {
  try {
    const authHeader = request.headers.get('x-goog-api-key');
    if (!authHeader) {
      return new Response(JSON.stringify({ error: 'Missing x-goog-api-key header.' }), {
        status: 400, headers: { 'Content-Type': 'application/json' },
      });
    }
    const keys = authHeader.split(',').map(k => k.trim()).filter(Boolean);
    const stream = new ReadableStream({
      async start(controller) {
        const verificationPromises = keys.map(key => verifyKey(key, controller));
        await Promise.all(verificationPromises);
        controller.close();
      }
    });
    return new Response(stream, {
      status: 200,
      headers: {
        'Content-Type': 'text/event-stream',
        'Cache-Control': 'no-cache',
        'Connection': 'keep-alive',
      }
    });
  } catch (e) {
    return new Response(JSON.stringify({ error: 'An unexpected error occurred: ' + e.message }), { status: 500, headers: { 'Content-Type': 'application/json' } });
  }
}
// ++++++++++[ END: Merged from verify_keys.js ]++++++++++


// ++++++++++[ START: Merged from handle_request.js ]++++++++++
async function handleRequest(request) {
  const url = new URL(request.url);
  const pathname = url.pathname;
  const search = url.search;

  if (pathname === '/' || pathname === '/index.html') {
    return new Response('Proxy is Running!  More Details: https://github.com/tech-shrimp/gemini-balance-lite', {
      status: 200,
      headers: { 'Content-Type': 'text/html' }
    });
  }

  if (pathname === '/verify' && request.method === 'POST') {
    return handleVerification(request);
  }

  if (url.pathname.endsWith("/chat/completions") || url.pathname.endsWith("/completions") || url.pathname.endsWith("/embeddings") || url.pathname.endsWith("/models")) {
    return openai.fetch(request);
  }

  const targetUrl = `https://generativelanguage.googleapis.com${pathname}${search}`;
  try {
    const headers = new Headers();
    for (const [key, value] of request.headers.entries()) {
      if (key.trim().toLowerCase() === 'x-goog-api-key') {
        const apiKeys = value.split(',').map(k => k.trim()).filter(k => k);
        if (apiKeys.length > 0) {
          const selectedKey = apiKeys[Math.floor(Math.random() * apiKeys.length)];
          console.log(`Gemini Selected API Key: ${selectedKey}`);
          headers.set('x-goog-api-key', selectedKey);
        }
      } else {
        if (key.trim().toLowerCase() === 'content-type') {
          headers.set(key, value);
        }
      }
    }

    console.log('Request Sending to Gemini');
    console.log('targetUrl:' + targetUrl);

    const response = await fetch(targetUrl, {
      method: request.method,
      headers: headers,
      body: request.body
    });

    console.log("Call Gemini Success");
    const responseHeaders = new Headers(response.headers);

    responseHeaders.delete('transfer-encoding');
    responseHeaders.delete('connection');
    responseHeaders.delete('keep-alive');
    responseHeaders.delete('content-encoding');
    responseHeaders.set('Referrer-Policy', 'no-referrer');
    responseHeaders.set('Access-Control-Allow-Origin', '*');


    return new Response(response.body, {
      status: response.status,
      statusText: response.statusText,
      headers: responseHeaders
    });

  } catch (error) {
    console.error('Failed to fetch:', error);
    return new Response('Internal Server Error\n' + error?.stack, {
      status: 500,
      headers: { 'Content-Type': 'text/plain' }
    });
  }
}
// ++++++++++[ END: Merged from handle_request.js ]++++++++++


// ++++++++++[ START: Merged from index.js ]++++++++++
export default {
  async fetch(req, env, context) {
    console.log('Request URL:', req.url);
    return handleRequest(req);
  }
}
// ++++++++++[ END: Merged from index.js ]++++++++++

添加新评论