You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

64 lines
2.1 KiB

11 months ago
  1. const OpenAI = require("openai");
  2. /**
  3. * @type {import("openai").OpenAI}
  4. */
  5. const client = new OpenAI({
  6. baseURL: "http://localhost:3001/api/v1/openai",
  7. apiKey: "ENTER_ANYTHINGLLM_API_KEY_HERE",
  8. });
  9. (async () => {
  10. // Models endpoint testing.
  11. console.log("Fetching /models");
  12. const modelList = await client.models.list();
  13. for await (const model of modelList) {
  14. console.log({ model });
  15. }
  16. // Test sync chat completion
  17. console.log("Running synchronous chat message");
  18. const syncCompletion = await client.chat.completions.create({
  19. messages: [
  20. {
  21. role: "system",
  22. content: "You are a helpful assistant who only speaks like a pirate.",
  23. },
  24. { role: "user", content: "What is AnythingLLM?" },
  25. // {
  26. // role: 'assistant',
  27. // content: "Arrr, matey! AnythingLLM be a fine tool fer sailin' the treacherous sea o' information with a powerful language model at yer helm. It's a potent instrument to handle all manner o' tasks involvin' text, like answerin' questions, generating prose, or even havin' a chat with digital scallywags like meself. Be there any specific treasure ye seek in the realm o' AnythingLLM?"
  28. // },
  29. // { role: "user", content: "Why are you talking like a pirate?" },
  30. ],
  31. model: "anythingllm", // must be workspace-slug
  32. });
  33. console.log(syncCompletion.choices[0]);
  34. // Test sync chat streaming completion
  35. console.log("Running asynchronous chat message");
  36. const asyncCompletion = await client.chat.completions.create({
  37. messages: [
  38. {
  39. role: "system",
  40. content: "You are a helpful assistant who only speaks like a pirate.",
  41. },
  42. { role: "user", content: "What is AnythingLLM?" },
  43. ],
  44. model: "anythingllm", // must be workspace-slug
  45. stream: true,
  46. });
  47. let message = "";
  48. for await (const chunk of asyncCompletion) {
  49. message += chunk.choices[0].delta.content;
  50. console.log({ message });
  51. }
  52. // Vector DB functionality
  53. console.log("Fetching /vector_stores");
  54. const vectorDBList = await client.beta.vectorStores.list();
  55. for await (const db of vectorDBList) {
  56. console.log(db);
  57. }
  58. })();