Skip to content

Instantly share code, notes, and snippets.

@IlmariKu
Last active January 8, 2024 07:52
Show Gist options
  • Save IlmariKu/ca59013e5a76f3636f02daa75ac93549 to your computer and use it in GitHub Desktop.
Save IlmariKu/ca59013e5a76f3636f02daa75ac93549 to your computer and use it in GitHub Desktop.
Mock Azure OpenAI Node Server For Genie (VSCode)
// Mock-server for Azure OpenAI
// Using this as a basis to create a reverse-proxy, with our own API-system
// for different teams / people / billing to use OpenAI in our company
// This server is specifically built for VSCode Genie-responses
// https://github.com/ai-genie/chatgpt-vscode
// and uses Node GPT-wrapper underneath
// https://github.com/transitive-bullshit/chatgpt-api
// Code totally not cleaned up. Has artificial delay of 100ms
const http = require("http");
const streamedData = [
{
id: "chatcmpl-8de5ti2ZHbhEIpC2miolFmfz3pHPe",
object: "chat.completion.chunk",
created: 1704459605,
model: "gpt-35-turbo",
choices: [{ finish_reason: null, index: 0, delta: { role: "assistant" } }],
},
{
id: "chatcmpl-8de5ti2ZHbhEIpC2miolFmfz3pHPe",
object: "chat.completion.chunk",
created: 1704459605,
model: "gpt-35-turbo",
choices: [{ finish_reason: null, index: 0, delta: { content: "I" } }],
},
{
id: "chatcmpl-8de5ti2ZHbhEIpC2miolFmfz3pHPe",
object: "chat.completion.chunk",
created: 1704459605,
model: "gpt-35-turbo",
choices: [{ finish_reason: null, index: 0, delta: { content: "'m" } }],
},
{
id: "chatcmpl-8de5ti2ZHbhEIpC2miolFmfz3pHPe",
object: "chat.completion.chunk",
created: 1704459605,
model: "gpt-35-turbo",
choices: [{ finish_reason: null, index: 0, delta: { content: " sorry" } }],
},
{
id: "chatcmpl-8de5ti2ZHbhEIpC2miolFmfz3pHPe",
object: "chat.completion.chunk",
created: 1704459605,
model: "gpt-35-turbo",
choices: [{ finish_reason: null, index: 0, delta: { content: "," } }],
},
{
id: "chatcmpl-8de5ti2ZHbhEIpC2miolFmfz3pHPe",
object: "chat.completion.chunk",
created: 1704459605,
model: "gpt-35-turbo",
choices: [{ finish_reason: null, index: 0, delta: { content: " but" } }],
},
{
id: "chatcmpl-8de5ti2ZHbhEIpC2miolFmfz3pHPe",
object: "chat.completion.chunk",
created: 1704459605,
model: "gpt-35-turbo",
choices: [{ finish_reason: null, index: 0, delta: { content: " I" } }],
},
{
id: "chatcmpl-8de5ti2ZHbhEIpC2miolFmfz3pHPe",
object: "chat.completion.chunk",
created: 1704459605,
model: "gpt-35-turbo",
choices: [{ finish_reason: null, index: 0, delta: { content: " do" } }],
},
{
id: "chatcmpl-8edmrM9XPTp9Co0XOhzZUPmWekf91",
object: "chat.completion.chunk",
created: 1704696753,
model: "gpt-35-turbo",
choices: [{ finish_reason: "stop", index: 0, delta: {} }],
},
];
const server = http.createServer((req, res) => {
req.on("error", (err) => {
console.error(err);
});
res.writeHead(200, {
"access-control-allow-origin": "*",
"cache-control": "no-cache, must-revalidate",
"transfer-encoding": "chunked",
"content-type": "text/event-stream",
"access-control-allow-origin": "*",
"apim-request-id": "5397caa6-778d-41c3-9435-1d80c158c5e9",
"strict-transport-security": "max-age=31536000; includeSubDomains; preload",
"azureml-model-session": "turbo-0314-8acdca95",
date: "Fri, 05 Jan 2024 14:36:24 GMT",
});
let datanumero = 0;
let intervallinumero;
function callbackFunc() {
const streamDataString = JSON.stringify(streamedData[datanumero]);
res.write(`data: ${streamDataString}\n\n`);
datanumero += 1;
if (datanumero === striimidata.length) {
clearInterval(intervallinumero);
res.end("data: [DONE]");
}
}
intervallinumero = setInterval(callbackFunc, 100);
});
console.log("Server started at 8003");
server.listen(8003);
@IlmariKu
Copy link
Author

IlmariKu commented Jan 8, 2024

You probably need to add "http.proxyStrictSSL": falseto you vscode-settings json for it to work

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment