You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

210 lines
6.4 KiB

11 months ago
  1. const { v4: uuidv4 } = require("uuid");
  2. const { reqBody, userFromSession, multiUserMode } = require("../utils/http");
  3. const { validatedRequest } = require("../utils/middleware/validatedRequest");
  4. const { Telemetry } = require("../models/telemetry");
  5. const { streamChatWithWorkspace } = require("../utils/chats/stream");
  6. const {
  7. ROLES,
  8. flexUserRoleValid,
  9. } = require("../utils/middleware/multiUserProtected");
  10. const { EventLogs } = require("../models/eventLogs");
  11. const {
  12. validWorkspaceAndThreadSlug,
  13. validWorkspaceSlug,
  14. } = require("../utils/middleware/validWorkspace");
  15. const { writeResponseChunk } = require("../utils/helpers/chat/responses");
  16. const { WorkspaceThread } = require("../models/workspaceThread");
  17. const { User } = require("../models/user");
  18. const truncate = require("truncate");
  19. function chatEndpoints(app) {
  20. if (!app) return;
  21. app.post(
  22. "/workspace/:slug/stream-chat",
  23. [validatedRequest, flexUserRoleValid([ROLES.all]), validWorkspaceSlug],
  24. async (request, response) => {
  25. try {
  26. const user = await userFromSession(request, response);
  27. const { message, attachments = [] } = reqBody(request);
  28. const workspace = response.locals.workspace;
  29. if (!message?.length) {
  30. response.status(400).json({
  31. id: uuidv4(),
  32. type: "abort",
  33. textResponse: null,
  34. sources: [],
  35. close: true,
  36. error: !message?.length ? "Message is empty." : null,
  37. });
  38. return;
  39. }
  40. response.setHeader("Cache-Control", "no-cache");
  41. response.setHeader("Content-Type", "text/event-stream");
  42. response.setHeader("Access-Control-Allow-Origin", "*");
  43. response.setHeader("Connection", "keep-alive");
  44. response.flushHeaders();
  45. if (multiUserMode(response) && !(await User.canSendChat(user))) {
  46. writeResponseChunk(response, {
  47. id: uuidv4(),
  48. type: "abort",
  49. textResponse: null,
  50. sources: [],
  51. close: true,
  52. error: `You have met your maximum 24 hour chat quota of ${user.dailyMessageLimit} chats. Try again later.`,
  53. });
  54. return;
  55. }
  56. await streamChatWithWorkspace(
  57. response,
  58. workspace,
  59. message,
  60. workspace?.chatMode,
  61. user,
  62. null,
  63. attachments
  64. );
  65. await Telemetry.sendTelemetry("sent_chat", {
  66. multiUserMode: multiUserMode(response),
  67. LLMSelection: process.env.LLM_PROVIDER || "openai",
  68. Embedder: process.env.EMBEDDING_ENGINE || "inherit",
  69. VectorDbSelection: process.env.VECTOR_DB || "lancedb",
  70. multiModal: Array.isArray(attachments) && attachments?.length !== 0,
  71. TTSSelection: process.env.TTS_PROVIDER || "native",
  72. });
  73. await EventLogs.logEvent(
  74. "sent_chat",
  75. {
  76. workspaceName: workspace?.name,
  77. chatModel: workspace?.chatModel || "System Default",
  78. },
  79. user?.id
  80. );
  81. response.end();
  82. } catch (e) {
  83. console.error(e);
  84. writeResponseChunk(response, {
  85. id: uuidv4(),
  86. type: "abort",
  87. textResponse: null,
  88. sources: [],
  89. close: true,
  90. error: e.message,
  91. });
  92. response.end();
  93. }
  94. }
  95. );
  96. app.post(
  97. "/workspace/:slug/thread/:threadSlug/stream-chat",
  98. [
  99. validatedRequest,
  100. flexUserRoleValid([ROLES.all]),
  101. validWorkspaceAndThreadSlug,
  102. ],
  103. async (request, response) => {
  104. try {
  105. const user = await userFromSession(request, response);
  106. const { message, attachments = [] } = reqBody(request);
  107. const workspace = response.locals.workspace;
  108. const thread = response.locals.thread;
  109. if (!message?.length) {
  110. response.status(400).json({
  111. id: uuidv4(),
  112. type: "abort",
  113. textResponse: null,
  114. sources: [],
  115. close: true,
  116. error: !message?.length ? "Message is empty." : null,
  117. });
  118. return;
  119. }
  120. response.setHeader("Cache-Control", "no-cache");
  121. response.setHeader("Content-Type", "text/event-stream");
  122. response.setHeader("Access-Control-Allow-Origin", "*");
  123. response.setHeader("Connection", "keep-alive");
  124. response.flushHeaders();
  125. if (multiUserMode(response) && !(await User.canSendChat(user))) {
  126. writeResponseChunk(response, {
  127. id: uuidv4(),
  128. type: "abort",
  129. textResponse: null,
  130. sources: [],
  131. close: true,
  132. error: `You have met your maximum 24 hour chat quota of ${user.dailyMessageLimit} chats. Try again later.`,
  133. });
  134. return;
  135. }
  136. await streamChatWithWorkspace(
  137. response,
  138. workspace,
  139. message,
  140. workspace?.chatMode,
  141. user,
  142. thread,
  143. attachments
  144. );
  145. // If thread was renamed emit event to frontend via special `action` response.
  146. await WorkspaceThread.autoRenameThread({
  147. thread,
  148. workspace,
  149. user,
  150. newName: truncate(message, 22),
  151. onRename: (thread) => {
  152. writeResponseChunk(response, {
  153. action: "rename_thread",
  154. thread: {
  155. slug: thread.slug,
  156. name: thread.name,
  157. },
  158. });
  159. },
  160. });
  161. await Telemetry.sendTelemetry("sent_chat", {
  162. multiUserMode: multiUserMode(response),
  163. LLMSelection: process.env.LLM_PROVIDER || "openai",
  164. Embedder: process.env.EMBEDDING_ENGINE || "inherit",
  165. VectorDbSelection: process.env.VECTOR_DB || "lancedb",
  166. multiModal: Array.isArray(attachments) && attachments?.length !== 0,
  167. TTSSelection: process.env.TTS_PROVIDER || "native",
  168. });
  169. await EventLogs.logEvent(
  170. "sent_chat",
  171. {
  172. workspaceName: workspace.name,
  173. thread: thread.name,
  174. chatModel: workspace?.chatModel || "System Default",
  175. },
  176. user?.id
  177. );
  178. response.end();
  179. } catch (e) {
  180. console.error(e);
  181. writeResponseChunk(response, {
  182. id: uuidv4(),
  183. type: "abort",
  184. textResponse: null,
  185. sources: [],
  186. close: true,
  187. error: e.message,
  188. });
  189. response.end();
  190. }
  191. }
  192. );
  193. }
  194. module.exports = { chatEndpoints };