Turn a Cloudflare AI worker into Ollama REST API-compatible endpoint (migrate from Ollama to Cloudflare AI)
I’ve recently had to switch from a self-hosted ollama to a Cloudflare worker for development.
As I’m using API calls (with Tines) I wanted the transition to go as smooth as possible so I wanted my Ollama API calls to work on my cloudflare worker.
This is the result of the worker.
import { Ai } from './vendor/@cloudflare/ai.js'; function decodeBase64(str) { try { return Buffer.from(str, 'base64').toString(); } catch (error) { console.error('Error decoding Base64 string:', error); return null; } } function isValidAuth(header, expectedUsername, expectedPassword) { if (!header) return false; const [type, credentials] = header.split(' '); if (type !== 'Basic') return false; const decodedCredentials = decodeBase64(credentials); if (!decodedCredentials) return false; const [username, password] = decodedCredentials.split(':'); const usernameBuffer = Buffer.from(username); const passwordBuffer = Buffer.from(password); const expectedUsernameBuffer = Buffer.from(expectedUsername); const expectedPasswordBuffer = Buffer.from(expectedPassword); try { const usernameEqual = crypto.timingSafeEqual(usernameBuffer, expectedUsernameBuffer); const passwordEqual = crypto.timingSafeEqual(passwordBuffer, expectedPasswordBuffer); return usernameEqual && passwordEqual; } catch (error) { console.error('Error in secure comparison:', error); return false; } } export default { async fetch(request, env) { if (request.method !== 'POST') { return new Response('Method Not Allowed', { status: 405, headers: { 'Allow': 'POST' } }); } const tasks = []; // Initialize tasks array try { const ai = new Ai(env.AI); const url = new URL(request.url); const requestBody = await request.json(); const requiredFields = ['model']; const isMissingField = requiredFields.some(field => !requestBody[field]); if (isMissingField) { return new Response('Missing required input', { status: 400, headers: { 'Content-Type': 'text/plain' }, }); } const modelRef = requestBody.model; // Use user-defined model console.log(`Requested model is ${modelRef}`); let response; switch (url.pathname) { case '/api/chat': if (!requestBody.messages || !Array.isArray(requestBody.messages)) { return new Response('Missing or invalid "messages" field', { status: 400, headers: { 'Content-Type': 'text/plain' }, }); } const chat = { model: modelRef, // Use user-defined model messages: requestBody.messages, stream: requestBody.stream }; console.log(`Chat initiated with model ${modelRef}`); response = await ai.run(modelRef, chat); // Dynamically use the user-defined model tasks.push({ inputs: chat, response }); // Push the task break; case '/api/generate': if (!requestBody.prompt) { return new Response('Missing "prompt" field', { status: 400, headers: { 'Content-Type': 'text/plain' }, }); } const simple = { model: modelRef, // Use user-defined model prompt: requestBody.prompt }; console.log(`Generating response for model ${modelRef}`); response = await ai.run(modelRef, simple); // Dynamically use the user-defined model tasks.push({ inputs: simple, response }); // Push the task break; default: return new Response('Unsupported endpoint', { status: 404 }); } return new Response(JSON.stringify(tasks), { // Return the tasks array headers: { 'Content-Type': 'application/json' }, }); } catch (error) { console.error('An unexpected error occurred:', error); return new Response('An unexpected error occurred', { status: 500, headers: { 'Content-Type': 'text/plain' }, }); } } };