AI Chat Worker with Llama-2
export interface Env {
AI: any;
}
export default {
async fetch(request: Request, env: Env): Promise<Response> {
if (request.method !== 'POST') {
return new Response('Method not allowed', { status: 405 });
}
const { messages } = await request.json<{ messages: any[] }>();
const response = await env.AI.run('@cf/meta/llama-2-7b-chat-int8', {
messages: [
{ role: 'system', content: 'You are a helpful assistant.' },
...messages,
],
stream: true,
});
return new Response(response, {
headers: {
'content-type': 'text/event-stream',
'cache-control': 'no-cache',
},
});
},
};