You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

110 lines
3.1 KiB

11 months ago
  1. const { v4: uuidv4 } = require("uuid");
  2. const { reqBody, multiUserMode } = require("../../utils/http");
  3. const { Telemetry } = require("../../models/telemetry");
  4. const { streamChatWithForEmbed } = require("../../utils/chats/embed");
  5. const { EmbedChats } = require("../../models/embedChats");
  6. const {
  7. validEmbedConfig,
  8. canRespond,
  9. setConnectionMeta,
  10. } = require("../../utils/middleware/embedMiddleware");
  11. const {
  12. convertToChatHistory,
  13. writeResponseChunk,
  14. } = require("../../utils/helpers/chat/responses");
  15. function embeddedEndpoints(app) {
  16. if (!app) return;
  17. app.post(
  18. "/embed/:embedId/stream-chat",
  19. [validEmbedConfig, setConnectionMeta, canRespond],
  20. async (request, response) => {
  21. try {
  22. const embed = response.locals.embedConfig;
  23. const {
  24. sessionId,
  25. message,
  26. // optional keys for override of defaults if enabled.
  27. prompt = null,
  28. model = null,
  29. temperature = null,
  30. username = null,
  31. } = reqBody(request);
  32. response.setHeader("Cache-Control", "no-cache");
  33. response.setHeader("Content-Type", "text/event-stream");
  34. response.setHeader("Access-Control-Allow-Origin", "*");
  35. response.setHeader("Connection", "keep-alive");
  36. response.flushHeaders();
  37. await streamChatWithForEmbed(response, embed, message, sessionId, {
  38. prompt,
  39. model,
  40. temperature,
  41. username,
  42. });
  43. await Telemetry.sendTelemetry("embed_sent_chat", {
  44. multiUserMode: multiUserMode(response),
  45. LLMSelection: process.env.LLM_PROVIDER || "openai",
  46. Embedder: process.env.EMBEDDING_ENGINE || "inherit",
  47. VectorDbSelection: process.env.VECTOR_DB || "lancedb",
  48. });
  49. response.end();
  50. } catch (e) {
  51. console.error(e);
  52. writeResponseChunk(response, {
  53. id: uuidv4(),
  54. type: "abort",
  55. sources: [],
  56. textResponse: null,
  57. close: true,
  58. error: e.message,
  59. });
  60. response.end();
  61. }
  62. }
  63. );
  64. app.get(
  65. "/embed/:embedId/:sessionId",
  66. [validEmbedConfig],
  67. async (request, response) => {
  68. try {
  69. const { sessionId } = request.params;
  70. const embed = response.locals.embedConfig;
  71. const history = await EmbedChats.forEmbedByUser(
  72. embed.id,
  73. sessionId,
  74. null,
  75. null,
  76. true
  77. );
  78. response.status(200).json({ history: convertToChatHistory(history) });
  79. } catch (e) {
  80. console.error(e.message, e);
  81. response.sendStatus(500).end();
  82. }
  83. }
  84. );
  85. app.delete(
  86. "/embed/:embedId/:sessionId",
  87. [validEmbedConfig],
  88. async (request, response) => {
  89. try {
  90. const { sessionId } = request.params;
  91. const embed = response.locals.embedConfig;
  92. await EmbedChats.markHistoryInvalid(embed.id, sessionId);
  93. response.status(200).end();
  94. } catch (e) {
  95. console.error(e.message, e);
  96. response.sendStatus(500).end();
  97. }
  98. }
  99. );
  100. }
  101. module.exports = { embeddedEndpoints };