Streaming Real-Time Results with nectar.js, OpenAI, and Server Sent Events (SSE): A Step-by-Step Guide
- September 19, 2023
Build Brilliant AI-Powered Websites Effortlessly with Nectar.js 🚀: Your gateway to crafting stunning AI websites and applications! Nectar.js is an open-source repository that empowers developers to seamlessly create AI-driven web experiences. Leverage the power of Server-Sent Events (SSE), React, OpenAI’s GPT models, and Vite to craft dynamic…
🚀 Quick start Clone the repo
git clone https://github.com/socialtribexyz/nectar.js Install dependencies
npm install # or pnpm install Edit .env.example file to .env and add your API keys.
Start developing
npm run dev # or pnpm run dev Open the source code and start editing!
Your site is now running at http://localhost:5173
Streaming Real-Time Results with nectar.js, OpenAI, and Server Sent Events (SSE): A Step-by-Step Guide
create a backend file inside your directory
create a fetch-sse.mjs file:
import { createParser } from "eventsource-parser";
import { streamAsyncIterable } from "./stream-async-iterable.mjs";
// Server-Sent Events (SSE) is a technology for sending data from a server to a web client in real time.
export async function fetchSSE(resource, options) {
const { onMessage, …fetchOptions } = options;
const resp = await fetch(resource, fetchOptions);
const parser = createParser((event) => {
if (event.type === “event”) {
onMessage(event.data);
}
});
for await (const chunk of streamAsyncIterable(resp.body)) {
const str = new TextDecoder().decode(chunk);
parser.feed(str);
}
}
next you need to create a index.mjs file:
import ExpiryMap from "expiry-map";
import { v4 as uuidv4 } from "uuid";
import Browser from "webextension-polyfill";
import { fetchSSE } from "./fetch-sse.mjs";
const KEY_ACCESS_TOKEN = “accessToken”;
const cache = new ExpiryMap(10 * 1000);
async function getAccessToken() {
if (cache.get(KEY_ACCESS_TOKEN)) {
return cache.get(KEY_ACCESS_TOKEN);
}
const resp = await fetch(”https://chat.openai.com/api/auth/session”)
.then((r) => r.json())
.catch(() => ({}));
if (!resp.accessToken) {
throw new Error(“UNAUTHORIZED”);
}
console.log(resp.accessToken);
cache.set(KEY_ACCESS_TOKEN, resp.accessToken);
return resp.accessToken;
}
async function getAnswer(question, callback) {
const accessToken = await getAccessToken();
await fetchSSE(”https://chat.openai.com/backend-api/conversation”, {
method: “POST”,
headers: {
“Content-Type”: “application/json”,
Authorization: Bearer <span class="hljs-subst">${accessToken}</span>
,
},
body: JSON.stringify({
action: “next”,
messages: [
{
id: uuidv4(),
role: “user”,
content: {
content_type: “text”,
parts: [question],
},
},
],
model: “text-davinci-002-render”,
parent_message_id: uuidv4(),
}),
onMessage(message) {
console.debug(“sse message”, message);
if (message === “[DONE]”) {
return;
}
const data = JSON.parse(message);
const text = data.message?.content?.parts?.[0];
if (text) {
callback(text);
}
},
});
}
Browser.runtime.onConnect.addListener((port) => {
port.onMessage.addListener(async (msg) => {
console.debug(“received msg”, msg);
try {
await getAnswer(msg.question, (answer) => {
console.log(“answer: ” + answer);
port.postMessage({ answer });
});
} catch (err) {
console.error(err);
port.postMessage({ error: err.message });
cache.delete(KEY_ACCESS_TOKEN);
}
});
});
finally you create a stream-async-iterable.mjs file and connect your backend with your frontend code:
export async function* streamAsyncIterable(stream) {
const reader = stream.getReader();
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
return;
}
yield value;
}
} finally {
reader.releaseLock();
}
}