diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..f0083ba105b43c6d3f07cbfbd1fa9b385509027d --- /dev/null +++ b/.dockerignore @@ -0,0 +1,7 @@ +node_modules +dist +build + +**/node_modules +**/build +**/dist diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 0000000000000000000000000000000000000000..f842b51fd7fffad9ed5746316d64a86b6ea22273 --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,28 @@ +module.exports = { + extends: [ + 'eslint:recommended', + 'plugin:markdown/recommended', + 'plugin:react/recommended', + 'plugin:react/jsx-runtime', + 'plugin:react-hooks/recommended', + 'plugin:jsx-a11y/recommended', + 'plugin:prettier/recommended' + ], + settings: { + react: { + version: 'detect' + } + }, + parser: '@typescript-eslint/parser', + ignorePatterns: ['**/node_modules', '**/dist', '**/build', '**/package-lock.json'], + plugins: ['unused-imports'], + rules: { + '@typescript-eslint/explicit-module-boundary-types': 'off', + 'no-unused-vars': 'off', + 'unused-imports/no-unused-imports': 'warn', + 'unused-imports/no-unused-vars': ['warn', { vars: 'all', varsIgnorePattern: '^_', args: 'after-used', argsIgnorePattern: '^_' }], + 'no-undef': 'off', + 'no-console': [process.env.CI ? 'error' : 'warn', { allow: ['warn', 'error', 'info'] }], + 'prettier/prettier': 'error' + } +} diff --git a/.gitattributes b/.gitattributes index c7d9f3332a950355d5a77d85000f05e6f45435ea..94d2acef4fd841c7a810890ca8ca73eac2055cbd 100644 --- a/.gitattributes +++ b/.gitattributes @@ -32,3 +32,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +*.gif filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..9f5ef2e56b5eec823f2b355b8d167a9d34509ead --- /dev/null +++ b/.gitignore @@ -0,0 +1,45 @@ +# editor +.idea +.vscode + +# dependencies +**/node_modules +**/package-lock.json +**/yarn.lock + +## logs +**/*.log + +## build +**/dist +**/build + +## temp +**/tmp +**/temp + +## test +**/coverage + +# misc +.DS_Store + +## env +.env.local +.env.development.local +.env.test.local +.env.production.local +.env + +## turbo +.turbo + +## secrets +**/*.key +**/api.json + +## uploads +**/uploads + +## compressed +**/*.tgz \ No newline at end of file diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 0000000000000000000000000000000000000000..7aba0476222485710208a70968cde6d8cde1a772 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,5 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +yarn quick # prettify +yarn lint-staged # eslint lint(also include prettify but prettify support more file extensions than eslint, so run prettify first) \ No newline at end of file diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000000000000000000000000000000000000..ba08339e832e2b150abde4a931b3573ac19d4ab2 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,3 @@ +**/node_modules +**/dist +**/build \ No newline at end of file diff --git a/.prettierrc.js b/.prettierrc.js new file mode 100644 index 0000000000000000000000000000000000000000..00085763e34e6e71ad47c490a481db2ac813ace2 --- /dev/null +++ b/.prettierrc.js @@ -0,0 +1,9 @@ +module.exports = { + printWidth: 140, + singleQuote: true, + jsxSingleQuote: true, + trailingComma: 'none', + tabWidth: 4, + semi: false, + endOfLine: 'auto' +} diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000000000000000000000000000000..7865b84e0bfee9bb4e7cdbd1f59ffa88e6357692 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +- The use of sexualized language or imagery and unwelcome sexual attention or + advances +- Trolling, insulting/derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or electronic + address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at hello@flowiseai.com. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..a09051f321d98eb2ccbf3e5323dd92b61a5f9603 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,127 @@ + + +# Contributing to Flowise + +We appreciate any form of contributions. + +## ⭐ Star + +Star and share the [Github Repo](https://github.com/FlowiseAI/Flowise). + +## 🙋 Q&A + +Search up for any questions in [Q&A section](https://github.com/FlowiseAI/Flowise/discussions/categories/q-a), if you can't find one, don't hesitate to create one. It might helps others that have similar question. + +## 🙌 Share Chatflow + +Yes! Sharing how you use Flowise is a way of contribution. Export your chatflow as JSON, attach a screenshot and share it in [Show and Tell section](https://github.com/FlowiseAI/Flowise/discussions/categories/show-and-tell). + +## 💡 Ideas + +Ideas are welcome such as new feature, apps integration, and blockchain networks. Submit in [Ideas section](https://github.com/FlowiseAI/Flowise/discussions/categories/ideas). + +## 🐞 Report Bugs + +Found an issue? [Report it](https://github.com/FlowiseAI/Flowise/issues/new/choose). + +## 👨‍💻 Contribute to Code + +Not sure what to contribute? Some ideas: + +- Create new components from Langchain +- Update existing components such as extending functionality, fixing bugs +- Add new chatflow ideas + +### Developers + +Flowise has 3 different modules in a single mono repository. + +- `server`: Node backend to serve API logics +- `ui`: React frontend +- `components`: Langchain components + +#### Prerequisite + +- Install Yarn + ```bash + npm i -g yarn + ``` + +#### Step by step + +1. Fork the official [Flowise Github Repository](https://github.com/FlowiseAI/Flowise). + +2. Clone your forked repository. + +3. Create a new branch, see [guide](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-and-deleting-branches-within-your-repository). Naming conventions: + + - For feature branch: `feature/` + - For bug fix branch: `bugfix/`. + +4. Switch to the newly created branch. + +5. Go into repository folder + + ```bash + cd Flowise + ``` + +6. Install all dependencies of all modules: + + ```bash + yarn install + ``` + +7. Build all the code: + + ```bash + yarn build + ``` + +8. Start the app on [http://localhost:3000](http://localhost:3000) + + ```bash + yarn start + ``` + +9. For development, run + + ```bash + yarn dev + ``` + + Any changes made in `packages/ui` or `packages/server` will be reflected on [http://localhost:8080](http://localhost:8080) + + For changes made in `packages/components`, run `yarn build` again to pickup the changes. + +10. After making all the changes, run + + ```bash + yarn build + ``` + + and + + ```bash + yarn start + ``` + + to make sure everything works fine in production. + +11. Commit code and submit Pull Request from forked branch pointing to [Flowise master](https://github.com/FlowiseAI/Flowise/tree/master). + +## 📖 Contribute to Docs + +In-Progress + +## 🏷️ Pull Request process + +A member of the FlowiseAI team will automatically be notified/assigned when you open a pull request. You can also reach out to us on [Discord](https://discord.gg/jbaHfsRVBW). + +## 📃 Contributor License Agreement + +Before we can merge your contribution you have to sign our [Contributor License Agreement (CLA)](https://cla-assistant.io/FlowiseAI/Flowise). The CLA contains the terms and conditions under which the contribution is submitted. You need to do this only once for your first pull request. Keep in mind that without a signed CLA we cannot merge your contribution. + +## 📜 Code of Conduct + +This project and everyone participating in it are governed by the Code of Conduct which can be found in the [file](CODE_OF_CONDUCT.md). By participating, you are expected to uphold this code. Please report unacceptable behavior to hello@flowiseai.com. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..bee5164f40bdba395d346f6753cd9c680efc2e8a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,33 @@ +# Build local monorepo image +# docker build --no-cache -t flowise . + +# Run image +# docker run -d -p 3000:3000 flowise + +FROM node:18-alpine +RUN apk add --update libc6-compat python3 make g++ + +WORKDIR /usr/src/packages + +# Copy root package.json and lockfile +COPY package.json yarn.loc[k] ./ + +# Copy components package.json +COPY packages/components/package.json ./packages/components/package.json + +# Copy ui package.json +COPY packages/ui/package.json ./packages/ui/package.json + +# Copy server package.json +COPY packages/server/package.json ./packages/server/package.json + +RUN yarn install + +# Copy app source +COPY . . + +RUN yarn build + +EXPOSE 7860 + +CMD [ "yarn", "start" ] diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..56552bdf8cf79398569997ad28abc114131406df --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2023 FlowiseAI + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/README.md b/README.md index 8251cb577238cfee2a0ff49eedddd3776f0da248..7d6de543044b261e76a8990627e34e253b0d58cd 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ colorFrom: gray colorTo: pink sdk: docker pinned: false +app_port: 7860 --- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference diff --git a/assets/Demo.png b/assets/Demo.png new file mode 100644 index 0000000000000000000000000000000000000000..5b875c8c7a5091b06d440f4f516ea159ef1fc10e Binary files /dev/null and b/assets/Demo.png differ diff --git a/assets/FloWiseAI.png b/assets/FloWiseAI.png new file mode 100644 index 0000000000000000000000000000000000000000..cb3d56362b00ba5c5f909328782ac262439da071 Binary files /dev/null and b/assets/FloWiseAI.png differ diff --git a/assets/FloWiseAI_black.png b/assets/FloWiseAI_black.png new file mode 100644 index 0000000000000000000000000000000000000000..bc49f25fbf4e368d63259fcdad4a763be4c24542 Binary files /dev/null and b/assets/FloWiseAI_black.png differ diff --git a/assets/FloWiseAI_dark.png b/assets/FloWiseAI_dark.png new file mode 100644 index 0000000000000000000000000000000000000000..55ca9771b84dc463120f479fa9e5392e8f02a7f8 Binary files /dev/null and b/assets/FloWiseAI_dark.png differ diff --git a/assets/FloWiseAI_primary.png b/assets/FloWiseAI_primary.png new file mode 100644 index 0000000000000000000000000000000000000000..3a9f1f0c14547447b288dc8b4b7fdcb47a30e489 Binary files /dev/null and b/assets/FloWiseAI_primary.png differ diff --git a/babel.config.js b/babel.config.js new file mode 100644 index 0000000000000000000000000000000000000000..4a7f96e9ac2bdd5319eb0923fbf24a4ee15ac9fa --- /dev/null +++ b/babel.config.js @@ -0,0 +1,13 @@ +module.exports = { + presets: [ + '@babel/preset-typescript', + [ + '@babel/preset-env', + { + targets: { + node: 'current' + } + } + ] + ] +} diff --git a/docker/.env.example b/docker/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..f32111966ddfbbd10e161a786c2f8ca69aa94f10 --- /dev/null +++ b/docker/.env.example @@ -0,0 +1,3 @@ +PORT=3000 +# FLOWISE_USERNAME=user +# FLOWISE_PASSWORD=1234 diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..e4bf704a0e93719ec77c6249bdf1b6e89494f16f --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,13 @@ +FROM node:18-alpine + +USER root + +RUN apk add --no-cache git +RUN apk add --no-cache python3 py3-pip make g++ + +# You can install a specific version like: flowise@1.0.0 +RUN npm install -g flowise + +WORKDIR /data + +CMD "flowise" \ No newline at end of file diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 0000000000000000000000000000000000000000..7f991a042dd66c05eb53343e4852c58d5e112e65 --- /dev/null +++ b/docker/README.md @@ -0,0 +1,24 @@ +# Flowise Docker Hub Image + +Starts Flowise from [DockerHub Image](https://hub.docker.com/repository/docker/flowiseai/flowise/general) + +## Usage + +1. Create `.env` file and specify the `PORT` (refer to `.env.example`) +2. `docker-compose up -d` +3. Open [http://localhost:3000](http://localhost:3000) +4. You can bring the containers down by `docker-compose stop` + +## With Authrorization + +1. Create `.env` file and specify the `PORT`, `FLOWISE_USERNAME`, and `FLOWISE_PASSWORD` (refer to `.env.example`) +2. Pass `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `docker-compose.yml` file: + ``` + environment: + - PORT=${PORT} + - FLOWISE_USERNAME=${FLOWISE_USERNAME} + - FLOWISE_PASSWORD=${FLOWISE_PASSWORD} + ``` +3. `docker-compose up -d` +4. Open [http://localhost:3000](http://localhost:3000) +5. You can bring the containers down by `docker-compose stop` diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml new file mode 100644 index 0000000000000000000000000000000000000000..c776f96e2c5e59dd72fcf19006d052dd90f4670b --- /dev/null +++ b/docker/docker-compose.yml @@ -0,0 +1,15 @@ +version: '3.1' + +services: + flowise: + image: flowiseai/flowise + restart: always + environment: + - PORT=${PORT} + - FLOWISE_USERNAME=${FLOWISE_USERNAME} + - FLOWISE_PASSWORD=${FLOWISE_PASSWORD} + ports: + - '${PORT}:${PORT}' + volumes: + - ~/.flowise:/root/.flowise + command: /bin/sh -c "sleep 3; flowise start" diff --git a/images/flowise.gif b/images/flowise.gif new file mode 100644 index 0000000000000000000000000000000000000000..bca41f11bf189fa874974cd706fe2a7cf386d9eb --- /dev/null +++ b/images/flowise.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9540a0d3514f0840d00ef06acd48f6468baf4f114c01809e7f5ec845a726ee8e +size 4920556 diff --git a/package.json b/package.json new file mode 100644 index 0000000000000000000000000000000000000000..b077c7ecdcb0a2825d2d6c327302c4f99ee9b2fb --- /dev/null +++ b/package.json @@ -0,0 +1,56 @@ +{ + "name": "flowise", + "version": "1.2.11", + "private": true, + "homepage": "https://flowiseai.com", + "workspaces": [ + "packages/*", + "flowise", + "ui", + "components" + ], + "scripts": { + "build": "turbo run build", + "build-force": "turbo run build --force", + "dev": "turbo run dev --parallel", + "start": "run-script-os", + "start:windows": "cd packages/server/bin && run start", + "start:default": "cd packages/server/bin && ./run start", + "clean": "npm exec -ws -- rimraf dist build", + "format": "prettier --write \"**/*.{ts,tsx,md}\"", + "test": "turbo run test", + "lint": "eslint \"**/*.{js,jsx,ts,tsx,json,md}\"", + "lint-fix": "yarn lint --fix", + "quick": "pretty-quick --staged", + "postinstall": "husky install" + }, + "lint-staged": { + "*.{js,jsx,ts,tsx,json,md}": "eslint --fix" + }, + "devDependencies": { + "turbo": "1.7.4", + "@babel/preset-env": "^7.19.4", + "@babel/preset-typescript": "7.18.6", + "@types/express": "^4.17.13", + "@typescript-eslint/typescript-estree": "^5.39.0", + "eslint": "^8.24.0", + "eslint-config-prettier": "^8.3.0", + "eslint-config-react-app": "^7.0.1", + "eslint-plugin-jsx-a11y": "^6.6.1", + "eslint-plugin-markdown": "^3.0.0", + "eslint-plugin-prettier": "^3.4.0", + "eslint-plugin-react": "^7.26.1", + "eslint-plugin-react-hooks": "^4.6.0", + "eslint-plugin-unused-imports": "^2.0.0", + "husky": "^8.0.1", + "lint-staged": "^13.0.3", + "prettier": "^2.7.1", + "pretty-quick": "^3.1.3", + "rimraf": "^3.0.2", + "run-script-os": "^1.1.6", + "typescript": "^4.8.4" + }, + "engines": { + "node": ">=18.15.0" + } +} diff --git a/packages/components/.env.example b/packages/components/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..352bc6cb0a84f997533289f90b32c3467cf5a64e --- /dev/null +++ b/packages/components/.env.example @@ -0,0 +1 @@ +DEBUG=true \ No newline at end of file diff --git a/packages/components/README.md b/packages/components/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8014661e14645b582d41dfee93ec72618f899686 --- /dev/null +++ b/packages/components/README.md @@ -0,0 +1,25 @@ + + +# Flowise Components + +Apps integration for Flowise. Contain Nodes and Credentials. + +![Flowise](https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.gif?raw=true) + +Install: + +```bash +npm i flowise-components +``` + +## Debug + +To view all the logs, create an `.env` file and add: + +``` +DEBUG=true +``` + +## License + +Source code in this repository is made available under the [MIT License](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/gulpfile.ts b/packages/components/gulpfile.ts new file mode 100644 index 0000000000000000000000000000000000000000..c4d2d35354e09b7a573bde679fed1c0ba7f4a06c --- /dev/null +++ b/packages/components/gulpfile.ts @@ -0,0 +1,9 @@ +import gulp from 'gulp' + +const { src, dest } = gulp + +function copyIcons() { + return src(['nodes/**/*.{jpg,png,svg}']).pipe(dest('dist/nodes')) +} + +exports.default = copyIcons diff --git a/packages/components/nodes/agents/AutoGPT/AutoGPT.ts b/packages/components/nodes/agents/AutoGPT/AutoGPT.ts new file mode 100644 index 0000000000000000000000000000000000000000..ca1185004156055ea15c2cdfdee37dae7f4f5137 --- /dev/null +++ b/packages/components/nodes/agents/AutoGPT/AutoGPT.ts @@ -0,0 +1,99 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { BaseChatModel } from 'langchain/chat_models/base' +import { AutoGPT } from 'langchain/experimental/autogpt' +import { Tool } from 'langchain/tools' +import { VectorStoreRetriever } from 'langchain/vectorstores/base' +import { flatten } from 'lodash' + +class AutoGPT_Agents implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'AutoGPT' + this.name = 'autoGPT' + this.type = 'AutoGPT' + this.category = 'Agents' + this.icon = 'autogpt.png' + this.description = 'Autonomous agent with chain of thoughts for self-guided task completion' + this.baseClasses = ['AutoGPT'] + this.inputs = [ + { + label: 'Allowed Tools', + name: 'tools', + type: 'Tool', + list: true + }, + { + label: 'Chat Model', + name: 'model', + type: 'BaseChatModel' + }, + { + label: 'Vector Store Retriever', + name: 'vectorStoreRetriever', + type: 'BaseRetriever' + }, + { + label: 'AutoGPT Name', + name: 'aiName', + type: 'string', + placeholder: 'Tom', + optional: true + }, + { + label: 'AutoGPT Role', + name: 'aiRole', + type: 'string', + placeholder: 'Assistant', + optional: true + }, + { + label: 'Maximum Loop', + name: 'maxLoop', + type: 'number', + default: 5, + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseChatModel + const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as VectorStoreRetriever + let tools = nodeData.inputs?.tools as Tool[] + tools = flatten(tools) + const aiName = (nodeData.inputs?.aiName as string) || 'AutoGPT' + const aiRole = (nodeData.inputs?.aiRole as string) || 'Assistant' + const maxLoop = nodeData.inputs?.maxLoop as string + + const autogpt = AutoGPT.fromLLMAndTools(model, tools, { + memory: vectorStoreRetriever, + aiName, + aiRole + }) + + autogpt.maxIterations = parseInt(maxLoop, 10) + + return autogpt + } + + async run(nodeData: INodeData, input: string): Promise { + const executor = nodeData.instance as AutoGPT + try { + const res = await executor.run([input]) + return res || 'I have completed all my tasks.' + } catch (e) { + console.error(e) + throw new Error(e) + } + } +} + +module.exports = { nodeClass: AutoGPT_Agents } diff --git a/packages/components/nodes/agents/AutoGPT/autogpt.png b/packages/components/nodes/agents/AutoGPT/autogpt.png new file mode 100644 index 0000000000000000000000000000000000000000..bdeff726c9ad905b2293d6a8bc5780fba06bf7f7 Binary files /dev/null and b/packages/components/nodes/agents/AutoGPT/autogpt.png differ diff --git a/packages/components/nodes/agents/BabyAGI/BabyAGI.ts b/packages/components/nodes/agents/BabyAGI/BabyAGI.ts new file mode 100644 index 0000000000000000000000000000000000000000..91af14699bc426b93d7bae6c5024e6357c3b8aa1 --- /dev/null +++ b/packages/components/nodes/agents/BabyAGI/BabyAGI.ts @@ -0,0 +1,63 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { BabyAGI } from './core' +import { BaseChatModel } from 'langchain/chat_models/base' +import { VectorStore } from 'langchain/vectorstores' + +class BabyAGI_Agents implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'BabyAGI' + this.name = 'babyAGI' + this.type = 'BabyAGI' + this.category = 'Agents' + this.icon = 'babyagi.jpg' + this.description = 'Task Driven Autonomous Agent which creates new task and reprioritizes task list based on objective' + this.baseClasses = ['BabyAGI'] + this.inputs = [ + { + label: 'Chat Model', + name: 'model', + type: 'BaseChatModel' + }, + { + label: 'Vector Store', + name: 'vectorStore', + type: 'VectorStore' + }, + { + label: 'Task Loop', + name: 'taskLoop', + type: 'number', + default: 3 + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseChatModel + const vectorStore = nodeData.inputs?.vectorStore as VectorStore + const taskLoop = nodeData.inputs?.taskLoop as string + const k = (vectorStore as any)?.k ?? 4 + + const babyAgi = BabyAGI.fromLLM(model, vectorStore, parseInt(taskLoop, 10), k) + return babyAgi + } + + async run(nodeData: INodeData, input: string): Promise { + const executor = nodeData.instance as BabyAGI + const objective = input + + const res = await executor.call({ objective }) + return res + } +} + +module.exports = { nodeClass: BabyAGI_Agents } diff --git a/packages/components/nodes/agents/BabyAGI/babyagi.jpg b/packages/components/nodes/agents/BabyAGI/babyagi.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cd5851391776f214998bd85cc2a1a1eb0203c68a Binary files /dev/null and b/packages/components/nodes/agents/BabyAGI/babyagi.jpg differ diff --git a/packages/components/nodes/agents/BabyAGI/core.ts b/packages/components/nodes/agents/BabyAGI/core.ts new file mode 100644 index 0000000000000000000000000000000000000000..444aa3eb569e17f6dae074a4a62229564c86cc72 --- /dev/null +++ b/packages/components/nodes/agents/BabyAGI/core.ts @@ -0,0 +1,270 @@ +import { LLMChain } from 'langchain/chains' +import { BaseChatModel } from 'langchain/chat_models/base' +import { VectorStore } from 'langchain/dist/vectorstores/base' +import { Document } from 'langchain/document' +import { PromptTemplate } from 'langchain/prompts' + +class TaskCreationChain extends LLMChain { + constructor(prompt: PromptTemplate, llm: BaseChatModel) { + super({ prompt, llm }) + } + + static from_llm(llm: BaseChatModel): LLMChain { + const taskCreationTemplate: string = + 'You are a task creation AI that uses the result of an execution agent' + + ' to create new tasks with the following objective: {objective},' + + ' The last completed task has the result: {result}.' + + ' This result was based on this task description: {task_description}.' + + ' These are incomplete tasks list: {incomplete_tasks}.' + + ' Based on the result, create new tasks to be completed' + + ' by the AI system that do not overlap with incomplete tasks.' + + ' Return the tasks as an array.' + + const prompt = new PromptTemplate({ + template: taskCreationTemplate, + inputVariables: ['result', 'task_description', 'incomplete_tasks', 'objective'] + }) + + return new TaskCreationChain(prompt, llm) + } +} + +class TaskPrioritizationChain extends LLMChain { + constructor(prompt: PromptTemplate, llm: BaseChatModel) { + super({ prompt, llm }) + } + + static from_llm(llm: BaseChatModel): TaskPrioritizationChain { + const taskPrioritizationTemplate: string = + 'You are a task prioritization AI tasked with cleaning the formatting of and reprioritizing' + + ' the following task list: {task_names}.' + + ' Consider the ultimate objective of your team: {objective}.' + + ' Do not remove any tasks. Return the result as a numbered list, like:' + + ' #. First task' + + ' #. Second task' + + ' Start the task list with number {next_task_id}.' + const prompt = new PromptTemplate({ + template: taskPrioritizationTemplate, + inputVariables: ['task_names', 'next_task_id', 'objective'] + }) + return new TaskPrioritizationChain(prompt, llm) + } +} + +class ExecutionChain extends LLMChain { + constructor(prompt: PromptTemplate, llm: BaseChatModel) { + super({ prompt, llm }) + } + + static from_llm(llm: BaseChatModel): LLMChain { + const executionTemplate: string = + 'You are an AI who performs one task based on the following objective: {objective}.' + + ' Take into account these previously completed tasks: {context}.' + + ' Your task: {task}.' + + ' Response:' + + const prompt = new PromptTemplate({ + template: executionTemplate, + inputVariables: ['objective', 'context', 'task'] + }) + + return new ExecutionChain(prompt, llm) + } +} + +async function getNextTask( + taskCreationChain: LLMChain, + result: string, + taskDescription: string, + taskList: string[], + objective: string +): Promise { + const incompleteTasks: string = taskList.join(', ') + const response: string = await taskCreationChain.predict({ + result, + task_description: taskDescription, + incomplete_tasks: incompleteTasks, + objective + }) + + const newTasks: string[] = response.split('\n') + + return newTasks.filter((taskName) => taskName.trim()).map((taskName) => ({ task_name: taskName })) +} + +interface Task { + task_id: number + task_name: string +} + +async function prioritizeTasks( + taskPrioritizationChain: LLMChain, + thisTaskId: number, + taskList: Task[], + objective: string +): Promise { + const next_task_id = thisTaskId + 1 + const task_names = taskList.map((t) => t.task_name).join(', ') + const response = await taskPrioritizationChain.predict({ task_names, next_task_id, objective }) + const newTasks = response.split('\n') + const prioritizedTaskList: Task[] = [] + + for (const taskString of newTasks) { + if (!taskString.trim()) { + // eslint-disable-next-line no-continue + continue + } + const taskParts = taskString.trim().split('. ', 2) + if (taskParts.length === 2) { + const task_id = parseInt(taskParts[0].trim(), 10) + const task_name = taskParts[1].trim() + prioritizedTaskList.push({ task_id, task_name }) + } + } + + return prioritizedTaskList +} + +export async function get_top_tasks(vectorStore: VectorStore, query: string, k: number): Promise { + const docs = await vectorStore.similaritySearch(query, k) + let returnDocs: string[] = [] + for (const doc of docs) { + returnDocs.push(doc.metadata.task) + } + return returnDocs +} + +async function executeTask(vectorStore: VectorStore, executionChain: LLMChain, objective: string, task: string, k = 5): Promise { + const context = await get_top_tasks(vectorStore, objective, k) + return executionChain.predict({ objective, context, task }) +} + +export class BabyAGI { + taskList: Array = [] + + taskCreationChain: TaskCreationChain + + taskPrioritizationChain: TaskPrioritizationChain + + executionChain: ExecutionChain + + taskIdCounter = 1 + + vectorStore: VectorStore + + maxIterations = 3 + + topK = 4 + + constructor( + taskCreationChain: TaskCreationChain, + taskPrioritizationChain: TaskPrioritizationChain, + executionChain: ExecutionChain, + vectorStore: VectorStore, + maxIterations: number, + topK: number + ) { + this.taskCreationChain = taskCreationChain + this.taskPrioritizationChain = taskPrioritizationChain + this.executionChain = executionChain + this.vectorStore = vectorStore + this.maxIterations = maxIterations + this.topK = topK + } + + addTask(task: Task) { + this.taskList.push(task) + } + + printTaskList() { + // eslint-disable-next-line no-console + console.log('\x1b[95m\x1b[1m\n*****TASK LIST*****\n\x1b[0m\x1b[0m') + // eslint-disable-next-line no-console + this.taskList.forEach((t) => console.log(`${t.task_id}: ${t.task_name}`)) + } + + printNextTask(task: Task) { + // eslint-disable-next-line no-console + console.log('\x1b[92m\x1b[1m\n*****NEXT TASK*****\n\x1b[0m\x1b[0m') + // eslint-disable-next-line no-console + console.log(`${task.task_id}: ${task.task_name}`) + } + + printTaskResult(result: string) { + // eslint-disable-next-line no-console + console.log('\x1b[93m\x1b[1m\n*****TASK RESULT*****\n\x1b[0m\x1b[0m') + // eslint-disable-next-line no-console + console.log(result) + } + + getInputKeys(): string[] { + return ['objective'] + } + + getOutputKeys(): string[] { + return [] + } + + async call(inputs: Record): Promise { + const { objective } = inputs + const firstTask = inputs.first_task || 'Make a todo list' + this.addTask({ task_id: 1, task_name: firstTask }) + let numIters = 0 + let loop = true + let finalResult = '' + + while (loop) { + if (this.taskList.length) { + this.printTaskList() + + // Step 1: Pull the first task + const task = this.taskList.shift() + if (!task) break + this.printNextTask(task) + + // Step 2: Execute the task + const result = await executeTask(this.vectorStore, this.executionChain, objective, task.task_name, this.topK) + const thisTaskId = task.task_id + finalResult = result + this.printTaskResult(result) + + // Step 3: Store the result in Pinecone + const docs = new Document({ pageContent: result, metadata: { task: task.task_name } }) + this.vectorStore.addDocuments([docs]) + + // Step 4: Create new tasks and reprioritize task list + const newTasks = await getNextTask( + this.taskCreationChain, + result, + task.task_name, + this.taskList.map((t) => t.task_name), + objective + ) + newTasks.forEach((newTask) => { + this.taskIdCounter += 1 + // eslint-disable-next-line no-param-reassign + newTask.task_id = this.taskIdCounter + this.addTask(newTask) + }) + this.taskList = await prioritizeTasks(this.taskPrioritizationChain, thisTaskId, this.taskList, objective) + } + + numIters += 1 + if (this.maxIterations !== null && numIters === this.maxIterations) { + // eslint-disable-next-line no-console + console.log('\x1b[91m\x1b[1m\n*****TASK ENDING*****\n\x1b[0m\x1b[0m') + loop = false + this.taskList = [] + } + } + + return finalResult + } + + static fromLLM(llm: BaseChatModel, vectorstore: VectorStore, maxIterations = 3, topK = 4): BabyAGI { + const taskCreationChain = TaskCreationChain.from_llm(llm) + const taskPrioritizationChain = TaskPrioritizationChain.from_llm(llm) + const executionChain = ExecutionChain.from_llm(llm) + return new BabyAGI(taskCreationChain, taskPrioritizationChain, executionChain, vectorstore, maxIterations, topK) + } +} diff --git a/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts b/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts new file mode 100644 index 0000000000000000000000000000000000000000..363b390767bb608915cc222ff9bc72d847465b6a --- /dev/null +++ b/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts @@ -0,0 +1,116 @@ +import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface' +import { initializeAgentExecutorWithOptions, AgentExecutor, InitializeAgentExecutorOptions } from 'langchain/agents' +import { Tool } from 'langchain/tools' +import { BaseChatMemory, ChatMessageHistory } from 'langchain/memory' +import { getBaseClasses } from '../../../src/utils' +import { AIChatMessage, HumanChatMessage } from 'langchain/schema' +import { BaseLanguageModel } from 'langchain/base_language' +import { flatten } from 'lodash' + +class ConversationalAgent_Agents implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Conversational Agent' + this.name = 'conversationalAgent' + this.type = 'AgentExecutor' + this.category = 'Agents' + this.icon = 'agent.svg' + this.description = 'Conversational agent for a chat model. It will utilize chat specific prompts' + this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)] + this.inputs = [ + { + label: 'Allowed Tools', + name: 'tools', + type: 'Tool', + list: true + }, + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'Memory', + name: 'memory', + type: 'BaseChatMemory' + }, + { + label: 'System Message', + name: 'systemMessage', + type: 'string', + rows: 4, + optional: true, + additionalParams: true + }, + { + label: 'Human Message', + name: 'humanMessage', + type: 'string', + rows: 4, + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + let tools = nodeData.inputs?.tools as Tool[] + tools = flatten(tools) + const memory = nodeData.inputs?.memory as BaseChatMemory + const humanMessage = nodeData.inputs?.humanMessage as string + const systemMessage = nodeData.inputs?.systemMessage as string + + const obj: InitializeAgentExecutorOptions = { + agentType: 'chat-conversational-react-description', + verbose: process.env.DEBUG === 'true' ? true : false + } + + const agentArgs: any = {} + if (humanMessage) { + agentArgs.humanMessage = humanMessage + } + if (systemMessage) { + agentArgs.systemMessage = systemMessage + } + + if (Object.keys(agentArgs).length) obj.agentArgs = agentArgs + + const executor = await initializeAgentExecutorWithOptions(tools, model, obj) + executor.memory = memory + return executor + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const executor = nodeData.instance as AgentExecutor + const memory = nodeData.inputs?.memory as BaseChatMemory + + if (options && options.chatHistory) { + const chatHistory = [] + const histories: IMessage[] = options.chatHistory + + for (const message of histories) { + if (message.type === 'apiMessage') { + chatHistory.push(new AIChatMessage(message.message)) + } else if (message.type === 'userMessage') { + chatHistory.push(new HumanChatMessage(message.message)) + } + } + memory.chatHistory = new ChatMessageHistory(chatHistory) + executor.memory = memory + } + const result = await executor.call({ input }) + + return result?.output + } +} + +module.exports = { nodeClass: ConversationalAgent_Agents } diff --git a/packages/components/nodes/agents/ConversationalAgent/agent.svg b/packages/components/nodes/agents/ConversationalAgent/agent.svg new file mode 100644 index 0000000000000000000000000000000000000000..c87861e5c58e1d278491328113e5209c010a25f4 --- /dev/null +++ b/packages/components/nodes/agents/ConversationalAgent/agent.svg @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/agents/MRKLAgentChat/MRKLAgentChat.ts b/packages/components/nodes/agents/MRKLAgentChat/MRKLAgentChat.ts new file mode 100644 index 0000000000000000000000000000000000000000..d2a52d6cf16f60623288be15a644773ee0f6f675 --- /dev/null +++ b/packages/components/nodes/agents/MRKLAgentChat/MRKLAgentChat.ts @@ -0,0 +1,60 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents' +import { getBaseClasses } from '../../../src/utils' +import { Tool } from 'langchain/tools' +import { BaseLanguageModel } from 'langchain/base_language' +import { flatten } from 'lodash' + +class MRKLAgentChat_Agents implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'MRKL Agent for Chat Models' + this.name = 'mrklAgentChat' + this.type = 'AgentExecutor' + this.category = 'Agents' + this.icon = 'agent.svg' + this.description = 'Agent that uses the ReAct Framework to decide what action to take, optimized to be used with Chat Models' + this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)] + this.inputs = [ + { + label: 'Allowed Tools', + name: 'tools', + type: 'Tool', + list: true + }, + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + let tools = nodeData.inputs?.tools as Tool[] + tools = flatten(tools) + const executor = await initializeAgentExecutorWithOptions(tools, model, { + agentType: 'chat-zero-shot-react-description', + verbose: process.env.DEBUG === 'true' ? true : false + }) + return executor + } + + async run(nodeData: INodeData, input: string): Promise { + const executor = nodeData.instance as AgentExecutor + const result = await executor.call({ input }) + + return result?.output + } +} + +module.exports = { nodeClass: MRKLAgentChat_Agents } diff --git a/packages/components/nodes/agents/MRKLAgentChat/agent.svg b/packages/components/nodes/agents/MRKLAgentChat/agent.svg new file mode 100644 index 0000000000000000000000000000000000000000..c87861e5c58e1d278491328113e5209c010a25f4 --- /dev/null +++ b/packages/components/nodes/agents/MRKLAgentChat/agent.svg @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/agents/MRKLAgentLLM/MRKLAgentLLM.ts b/packages/components/nodes/agents/MRKLAgentLLM/MRKLAgentLLM.ts new file mode 100644 index 0000000000000000000000000000000000000000..eb685531c2fef2cfb801053daed3dffbbe5d04d8 --- /dev/null +++ b/packages/components/nodes/agents/MRKLAgentLLM/MRKLAgentLLM.ts @@ -0,0 +1,61 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { initializeAgentExecutorWithOptions, AgentExecutor } from 'langchain/agents' +import { Tool } from 'langchain/tools' +import { getBaseClasses } from '../../../src/utils' +import { BaseLanguageModel } from 'langchain/base_language' +import { flatten } from 'lodash' + +class MRKLAgentLLM_Agents implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'MRKL Agent for LLMs' + this.name = 'mrklAgentLLM' + this.type = 'AgentExecutor' + this.category = 'Agents' + this.icon = 'agent.svg' + this.description = 'Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs' + this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)] + this.inputs = [ + { + label: 'Allowed Tools', + name: 'tools', + type: 'Tool', + list: true + }, + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + let tools = nodeData.inputs?.tools as Tool[] + tools = flatten(tools) + + const executor = await initializeAgentExecutorWithOptions(tools, model, { + agentType: 'zero-shot-react-description', + verbose: process.env.DEBUG === 'true' ? true : false + }) + return executor + } + + async run(nodeData: INodeData, input: string): Promise { + const executor = nodeData.instance as AgentExecutor + const result = await executor.call({ input }) + + return result?.output + } +} + +module.exports = { nodeClass: MRKLAgentLLM_Agents } diff --git a/packages/components/nodes/agents/MRKLAgentLLM/agent.svg b/packages/components/nodes/agents/MRKLAgentLLM/agent.svg new file mode 100644 index 0000000000000000000000000000000000000000..c87861e5c58e1d278491328113e5209c010a25f4 --- /dev/null +++ b/packages/components/nodes/agents/MRKLAgentLLM/agent.svg @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/chains/ApiChain/GETApiChain.ts b/packages/components/nodes/chains/ApiChain/GETApiChain.ts new file mode 100644 index 0000000000000000000000000000000000000000..8e6577494bb3213f76f1fa0efeb369c51ebee3f7 --- /dev/null +++ b/packages/components/nodes/chains/ApiChain/GETApiChain.ts @@ -0,0 +1,129 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { APIChain } from 'langchain/chains' +import { CustomChainHandler, getBaseClasses } from '../../../src/utils' +import { BaseLanguageModel } from 'langchain/base_language' +import { PromptTemplate } from 'langchain/prompts' + +export const API_URL_RAW_PROMPT_TEMPLATE = `You are given the below API Documentation: +{api_docs} +Using this documentation, generate the full API url to call for answering the user question. +You should build the API url in order to get a response that is as short as possible, while still getting the necessary information to answer the question. Pay attention to deliberately exclude any unnecessary pieces of data in the API call. + +Question:{question} +API url:` + +export const API_RESPONSE_RAW_PROMPT_TEMPLATE = + 'Given this {api_response} response for {api_url}. use the given response to answer this {question}' + +class GETApiChain_Chains implements INode { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'GET API Chain' + this.name = 'getApiChain' + this.type = 'GETApiChain' + this.icon = 'apichain.svg' + this.category = 'Chains' + this.description = 'Chain to run queries against GET API' + this.baseClasses = [this.type, ...getBaseClasses(APIChain)] + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'API Documentation', + name: 'apiDocs', + type: 'string', + description: + 'Description of how API works. Please refer to more examples', + rows: 4 + }, + { + label: 'Headers', + name: 'headers', + type: 'json', + additionalParams: true, + optional: true + }, + { + label: 'URL Prompt', + name: 'urlPrompt', + type: 'string', + description: 'Prompt used to tell LLMs how to construct the URL. Must contains {api_docs} and {question}', + default: API_URL_RAW_PROMPT_TEMPLATE, + rows: 4, + additionalParams: true + }, + { + label: 'Answer Prompt', + name: 'ansPrompt', + type: 'string', + description: + 'Prompt used to tell LLMs how to return the API response. Must contains {api_response}, {api_url}, and {question}', + default: API_RESPONSE_RAW_PROMPT_TEMPLATE, + rows: 4, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const apiDocs = nodeData.inputs?.apiDocs as string + const headers = nodeData.inputs?.headers as string + const urlPrompt = nodeData.inputs?.urlPrompt as string + const ansPrompt = nodeData.inputs?.ansPrompt as string + + const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt) + return chain + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const apiDocs = nodeData.inputs?.apiDocs as string + const headers = nodeData.inputs?.headers as string + const urlPrompt = nodeData.inputs?.urlPrompt as string + const ansPrompt = nodeData.inputs?.ansPrompt as string + + const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt) + if (options.socketIO && options.socketIOClientId) { + const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2) + const res = await chain.run(input, [handler]) + return res + } else { + const res = await chain.run(input) + return res + } + } +} + +const getAPIChain = async (documents: string, llm: BaseLanguageModel, headers: string, urlPrompt: string, ansPrompt: string) => { + const apiUrlPrompt = new PromptTemplate({ + inputVariables: ['api_docs', 'question'], + template: urlPrompt ? urlPrompt : API_URL_RAW_PROMPT_TEMPLATE + }) + + const apiResponsePrompt = new PromptTemplate({ + inputVariables: ['api_docs', 'question', 'api_url', 'api_response'], + template: ansPrompt ? ansPrompt : API_RESPONSE_RAW_PROMPT_TEMPLATE + }) + + const chain = APIChain.fromLLMAndAPIDocs(llm, documents, { + apiUrlPrompt, + apiResponsePrompt, + verbose: process.env.DEBUG === 'true' ? true : false, + headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {} + }) + return chain +} + +module.exports = { nodeClass: GETApiChain_Chains } diff --git a/packages/components/nodes/chains/ApiChain/POSTApiChain.ts b/packages/components/nodes/chains/ApiChain/POSTApiChain.ts new file mode 100644 index 0000000000000000000000000000000000000000..3c6ea677297e70cd3f0972f6968ab86f5b1afdb5 --- /dev/null +++ b/packages/components/nodes/chains/ApiChain/POSTApiChain.ts @@ -0,0 +1,118 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { CustomChainHandler, getBaseClasses } from '../../../src/utils' +import { BaseLanguageModel } from 'langchain/base_language' +import { PromptTemplate } from 'langchain/prompts' +import { API_RESPONSE_RAW_PROMPT_TEMPLATE, API_URL_RAW_PROMPT_TEMPLATE, APIChain } from './postCore' + +class POSTApiChain_Chains implements INode { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'POST API Chain' + this.name = 'postApiChain' + this.type = 'POSTApiChain' + this.icon = 'apichain.svg' + this.category = 'Chains' + this.description = 'Chain to run queries against POST API' + this.baseClasses = [this.type, ...getBaseClasses(APIChain)] + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'API Documentation', + name: 'apiDocs', + type: 'string', + description: + 'Description of how API works. Please refer to more examples', + rows: 4 + }, + { + label: 'Headers', + name: 'headers', + type: 'json', + additionalParams: true, + optional: true + }, + { + label: 'URL Prompt', + name: 'urlPrompt', + type: 'string', + description: 'Prompt used to tell LLMs how to construct the URL. Must contains {api_docs} and {question}', + default: API_URL_RAW_PROMPT_TEMPLATE, + rows: 4, + additionalParams: true + }, + { + label: 'Answer Prompt', + name: 'ansPrompt', + type: 'string', + description: + 'Prompt used to tell LLMs how to return the API response. Must contains {api_response}, {api_url}, and {question}', + default: API_RESPONSE_RAW_PROMPT_TEMPLATE, + rows: 4, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const apiDocs = nodeData.inputs?.apiDocs as string + const headers = nodeData.inputs?.headers as string + const urlPrompt = nodeData.inputs?.urlPrompt as string + const ansPrompt = nodeData.inputs?.ansPrompt as string + + const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt) + return chain + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const apiDocs = nodeData.inputs?.apiDocs as string + const headers = nodeData.inputs?.headers as string + const urlPrompt = nodeData.inputs?.urlPrompt as string + const ansPrompt = nodeData.inputs?.ansPrompt as string + + const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt) + if (options.socketIO && options.socketIOClientId) { + const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, 2) + const res = await chain.run(input, [handler]) + return res + } else { + const res = await chain.run(input) + return res + } + } +} + +const getAPIChain = async (documents: string, llm: BaseLanguageModel, headers: string, urlPrompt: string, ansPrompt: string) => { + const apiUrlPrompt = new PromptTemplate({ + inputVariables: ['api_docs', 'question'], + template: urlPrompt ? urlPrompt : API_URL_RAW_PROMPT_TEMPLATE + }) + + const apiResponsePrompt = new PromptTemplate({ + inputVariables: ['api_docs', 'question', 'api_url_body', 'api_response'], + template: ansPrompt ? ansPrompt : API_RESPONSE_RAW_PROMPT_TEMPLATE + }) + + const chain = APIChain.fromLLMAndAPIDocs(llm, documents, { + apiUrlPrompt, + apiResponsePrompt, + verbose: process.env.DEBUG === 'true' ? true : false, + headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {} + }) + return chain +} + +module.exports = { nodeClass: POSTApiChain_Chains } diff --git a/packages/components/nodes/chains/ApiChain/apichain.svg b/packages/components/nodes/chains/ApiChain/apichain.svg new file mode 100644 index 0000000000000000000000000000000000000000..3b86b90513e2414124b9fc2607b36d4aec0adf26 --- /dev/null +++ b/packages/components/nodes/chains/ApiChain/apichain.svg @@ -0,0 +1,3 @@ + \ No newline at end of file diff --git a/packages/components/nodes/chains/ApiChain/postCore.ts b/packages/components/nodes/chains/ApiChain/postCore.ts new file mode 100644 index 0000000000000000000000000000000000000000..de7215d92485648796231574b34356561c28bfaa --- /dev/null +++ b/packages/components/nodes/chains/ApiChain/postCore.ts @@ -0,0 +1,162 @@ +import { BaseLanguageModel } from 'langchain/base_language' +import { CallbackManagerForChainRun } from 'langchain/callbacks' +import { BaseChain, ChainInputs, LLMChain, SerializedAPIChain } from 'langchain/chains' +import { BasePromptTemplate, PromptTemplate } from 'langchain/prompts' +import { ChainValues } from 'langchain/schema' +import fetch from 'node-fetch' + +export const API_URL_RAW_PROMPT_TEMPLATE = `You are given the below API Documentation: +{api_docs} +Using this documentation, generate a json string with two keys: "url" and "data". +The value of "url" should be a string, which is the API url to call for answering the user question. +The value of "data" should be a dictionary of key-value pairs you want to POST to the url as a JSON body. +Be careful to always use double quotes for strings in the json string. +You should build the json string in order to get a response that is as short as possible, while still getting the necessary information to answer the question. Pay attention to deliberately exclude any unnecessary pieces of data in the API call. + +Question:{question} +json string:` + +export const API_RESPONSE_RAW_PROMPT_TEMPLATE = `${API_URL_RAW_PROMPT_TEMPLATE} {api_url_body} + +Here is the response from the API: + +{api_response} + +Summarize this response to answer the original question. + +Summary:` + +const defaultApiUrlPrompt = new PromptTemplate({ + inputVariables: ['api_docs', 'question'], + template: API_URL_RAW_PROMPT_TEMPLATE +}) + +const defaultApiResponsePrompt = new PromptTemplate({ + inputVariables: ['api_docs', 'question', 'api_url_body', 'api_response'], + template: API_RESPONSE_RAW_PROMPT_TEMPLATE +}) + +export interface APIChainInput extends Omit { + apiAnswerChain: LLMChain + apiRequestChain: LLMChain + apiDocs: string + inputKey?: string + headers?: Record + /** Key to use for output, defaults to `output` */ + outputKey?: string +} + +export type APIChainOptions = { + headers?: Record + apiUrlPrompt?: BasePromptTemplate + apiResponsePrompt?: BasePromptTemplate +} + +export class APIChain extends BaseChain implements APIChainInput { + apiAnswerChain: LLMChain + + apiRequestChain: LLMChain + + apiDocs: string + + headers = {} + + inputKey = 'question' + + outputKey = 'output' + + get inputKeys() { + return [this.inputKey] + } + + get outputKeys() { + return [this.outputKey] + } + + constructor(fields: APIChainInput) { + super(fields) + this.apiRequestChain = fields.apiRequestChain + this.apiAnswerChain = fields.apiAnswerChain + this.apiDocs = fields.apiDocs + this.inputKey = fields.inputKey ?? this.inputKey + this.outputKey = fields.outputKey ?? this.outputKey + this.headers = fields.headers ?? this.headers + } + + /** @ignore */ + async _call(values: ChainValues, runManager?: CallbackManagerForChainRun): Promise { + try { + const question: string = values[this.inputKey] + + const api_url_body = await this.apiRequestChain.predict({ question, api_docs: this.apiDocs }, runManager?.getChild()) + + const { url, data } = JSON.parse(api_url_body) + + const res = await fetch(url, { + method: 'POST', + headers: this.headers, + body: JSON.stringify(data) + }) + + const api_response = await res.text() + + const answer = await this.apiAnswerChain.predict( + { question, api_docs: this.apiDocs, api_url_body, api_response }, + runManager?.getChild() + ) + + return { [this.outputKey]: answer } + } catch (error) { + return { [this.outputKey]: error } + } + } + + _chainType() { + return 'api_chain' as const + } + + static async deserialize(data: SerializedAPIChain) { + const { api_request_chain, api_answer_chain, api_docs } = data + + if (!api_request_chain) { + throw new Error('LLMChain must have api_request_chain') + } + if (!api_answer_chain) { + throw new Error('LLMChain must have api_answer_chain') + } + if (!api_docs) { + throw new Error('LLMChain must have api_docs') + } + + return new APIChain({ + apiAnswerChain: await LLMChain.deserialize(api_answer_chain), + apiRequestChain: await LLMChain.deserialize(api_request_chain), + apiDocs: api_docs + }) + } + + serialize(): SerializedAPIChain { + return { + _type: this._chainType(), + api_answer_chain: this.apiAnswerChain.serialize(), + api_request_chain: this.apiRequestChain.serialize(), + api_docs: this.apiDocs + } + } + + static fromLLMAndAPIDocs( + llm: BaseLanguageModel, + apiDocs: string, + options: APIChainOptions & Omit = {} + ): APIChain { + const { apiUrlPrompt = defaultApiUrlPrompt, apiResponsePrompt = defaultApiResponsePrompt } = options + const apiRequestChain = new LLMChain({ prompt: apiUrlPrompt, llm }) + const apiAnswerChain = new LLMChain({ prompt: apiResponsePrompt, llm }) + return new this({ + apiAnswerChain, + apiRequestChain, + apiDocs, + ...options + }) + } +} diff --git a/packages/components/nodes/chains/ConversationChain/ConversationChain.ts b/packages/components/nodes/chains/ConversationChain/ConversationChain.ts new file mode 100644 index 0000000000000000000000000000000000000000..843e05fc6e3f7781fa1c01db49cd11b43ace04a1 --- /dev/null +++ b/packages/components/nodes/chains/ConversationChain/ConversationChain.ts @@ -0,0 +1,104 @@ +import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface' +import { ConversationChain } from 'langchain/chains' +import { CustomChainHandler, getBaseClasses } from '../../../src/utils' +import { ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate } from 'langchain/prompts' +import { BufferMemory, ChatMessageHistory } from 'langchain/memory' +import { BaseChatModel } from 'langchain/chat_models/base' +import { AIChatMessage, HumanChatMessage } from 'langchain/schema' + +const systemMessage = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.` + +class ConversationChain_Chains implements INode { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Conversation Chain' + this.name = 'conversationChain' + this.type = 'ConversationChain' + this.icon = 'chain.svg' + this.category = 'Chains' + this.description = 'Chat models specific conversational chain with memory' + this.baseClasses = [this.type, ...getBaseClasses(ConversationChain)] + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseChatModel' + }, + { + label: 'Memory', + name: 'memory', + type: 'BaseMemory' + }, + { + label: 'System Message', + name: 'systemMessagePrompt', + type: 'string', + rows: 4, + additionalParams: true, + optional: true, + placeholder: 'You are a helpful assistant that write codes' + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseChatModel + const memory = nodeData.inputs?.memory as BufferMemory + const prompt = nodeData.inputs?.systemMessagePrompt as string + + const obj: any = { + llm: model, + memory, + verbose: process.env.DEBUG === 'true' ? true : false + } + + const chatPrompt = ChatPromptTemplate.fromPromptMessages([ + SystemMessagePromptTemplate.fromTemplate(prompt ? `${prompt}\n${systemMessage}` : systemMessage), + new MessagesPlaceholder(memory.memoryKey ?? 'chat_history'), + HumanMessagePromptTemplate.fromTemplate('{input}') + ]) + obj.prompt = chatPrompt + + const chain = new ConversationChain(obj) + return chain + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const chain = nodeData.instance as ConversationChain + const memory = nodeData.inputs?.memory as BufferMemory + + if (options && options.chatHistory) { + const chatHistory = [] + const histories: IMessage[] = options.chatHistory + + for (const message of histories) { + if (message.type === 'apiMessage') { + chatHistory.push(new AIChatMessage(message.message)) + } else if (message.type === 'userMessage') { + chatHistory.push(new HumanChatMessage(message.message)) + } + } + memory.chatHistory = new ChatMessageHistory(chatHistory) + chain.memory = memory + } + + if (options.socketIO && options.socketIOClientId) { + const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) + const res = await chain.call({ input }, [handler]) + return res?.response + } else { + const res = await chain.call({ input }) + return res?.response + } + } +} + +module.exports = { nodeClass: ConversationChain_Chains } diff --git a/packages/components/nodes/chains/ConversationChain/chain.svg b/packages/components/nodes/chains/ConversationChain/chain.svg new file mode 100644 index 0000000000000000000000000000000000000000..a5b32f90aa33a5a1f436ffaec7a814f6b350e6a5 --- /dev/null +++ b/packages/components/nodes/chains/ConversationChain/chain.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts b/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts new file mode 100644 index 0000000000000000000000000000000000000000..3b7e1413f015ee7c6115a0210c3cccc08f7eb0c2 --- /dev/null +++ b/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts @@ -0,0 +1,163 @@ +import { BaseLanguageModel } from 'langchain/base_language' +import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface' +import { CustomChainHandler, getBaseClasses } from '../../../src/utils' +import { ConversationalRetrievalQAChain } from 'langchain/chains' +import { AIChatMessage, BaseRetriever, HumanChatMessage } from 'langchain/schema' +import { BaseChatMemory, BufferMemory, ChatMessageHistory } from 'langchain/memory' +import { PromptTemplate } from 'langchain/prompts' + +const default_qa_template = `Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer. + +{context} + +Question: {question} +Helpful Answer:` + +const qa_template = `Use the following pieces of context to answer the question at the end. + +{context} + +Question: {question} +Helpful Answer:` + +class ConversationalRetrievalQAChain_Chains implements INode { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Conversational Retrieval QA Chain' + this.name = 'conversationalRetrievalQAChain' + this.type = 'ConversationalRetrievalQAChain' + this.icon = 'chain.svg' + this.category = 'Chains' + this.description = 'Document QA - built on RetrievalQAChain to provide a chat history component' + this.baseClasses = [this.type, ...getBaseClasses(ConversationalRetrievalQAChain)] + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'Vector Store Retriever', + name: 'vectorStoreRetriever', + type: 'BaseRetriever' + }, + { + label: 'Return Source Documents', + name: 'returnSourceDocuments', + type: 'boolean', + optional: true + }, + { + label: 'System Message', + name: 'systemMessagePrompt', + type: 'string', + rows: 4, + additionalParams: true, + optional: true, + placeholder: + 'I want you to act as a document that I am having a conversation with. Your name is "AI Assistant". You will provide me with answers from the given info. If the answer is not included, say exactly "Hmm, I am not sure." and stop after that. Refuse to answer any question not about the info. Never break character.' + }, + { + label: 'Chain Option', + name: 'chainOption', + type: 'options', + options: [ + { + label: 'MapReduceDocumentsChain', + name: 'map_reduce', + description: + 'Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time' + }, + { + label: 'RefineDocumentsChain', + name: 'refine', + description: 'Suitable for QA tasks over a large number of documents.' + }, + { + label: 'StuffDocumentsChain', + name: 'stuff', + description: 'Suitable for QA tasks over a small number of documents.' + } + ], + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever + const systemMessagePrompt = nodeData.inputs?.systemMessagePrompt as string + const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean + const chainOption = nodeData.inputs?.chainOption as string + + const obj: any = { + verbose: process.env.DEBUG === 'true' ? true : false, + qaChainOptions: { + type: 'stuff', + prompt: PromptTemplate.fromTemplate(systemMessagePrompt ? `${systemMessagePrompt}\n${qa_template}` : default_qa_template) + }, + memory: new BufferMemory({ + memoryKey: 'chat_history', + inputKey: 'question', + outputKey: 'text', + returnMessages: true + }) + } + if (returnSourceDocuments) obj.returnSourceDocuments = returnSourceDocuments + if (chainOption) obj.qaChainOptions = { ...obj.qaChainOptions, type: chainOption } + + const chain = ConversationalRetrievalQAChain.fromLLM(model, vectorStoreRetriever, obj) + return chain + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const chain = nodeData.instance as ConversationalRetrievalQAChain + const returnSourceDocuments = nodeData.inputs?.returnSourceDocuments as boolean + let model = nodeData.inputs?.model + + // Temporary fix: https://github.com/hwchase17/langchainjs/issues/754 + model.streaming = false + chain.questionGeneratorChain.llm = model + + const obj = { question: input } + + if (chain.memory && options && options.chatHistory) { + const chatHistory = [] + const histories: IMessage[] = options.chatHistory + const memory = chain.memory as BaseChatMemory + + for (const message of histories) { + if (message.type === 'apiMessage') { + chatHistory.push(new AIChatMessage(message.message)) + } else if (message.type === 'userMessage') { + chatHistory.push(new HumanChatMessage(message.message)) + } + } + memory.chatHistory = new ChatMessageHistory(chatHistory) + chain.memory = memory + } + + if (options.socketIO && options.socketIOClientId) { + const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId, undefined, returnSourceDocuments) + const res = await chain.call(obj, [handler]) + if (res.text && res.sourceDocuments) return res + return res?.text + } else { + const res = await chain.call(obj) + if (res.text && res.sourceDocuments) return res + return res?.text + } + } +} + +module.exports = { nodeClass: ConversationalRetrievalQAChain_Chains } diff --git a/packages/components/nodes/chains/ConversationalRetrievalQAChain/chain.svg b/packages/components/nodes/chains/ConversationalRetrievalQAChain/chain.svg new file mode 100644 index 0000000000000000000000000000000000000000..a5b32f90aa33a5a1f436ffaec7a814f6b350e6a5 --- /dev/null +++ b/packages/components/nodes/chains/ConversationalRetrievalQAChain/chain.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts new file mode 100644 index 0000000000000000000000000000000000000000..9cd08d353035c8ead872fa1d4ca1223735852d86 --- /dev/null +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -0,0 +1,167 @@ +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { CustomChainHandler, getBaseClasses } from '../../../src/utils' +import { LLMChain } from 'langchain/chains' +import { BaseLanguageModel } from 'langchain/base_language' + +class LLMChain_Chains implements INode { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description: string + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'LLM Chain' + this.name = 'llmChain' + this.type = 'LLMChain' + this.icon = 'chain.svg' + this.category = 'Chains' + this.description = 'Chain to run queries against LLMs' + this.baseClasses = [this.type, ...getBaseClasses(LLMChain)] + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'Prompt', + name: 'prompt', + type: 'BasePromptTemplate' + }, + { + label: 'Chain Name', + name: 'chainName', + type: 'string', + placeholder: 'Name Your Chain', + optional: true + } + ] + this.outputs = [ + { + label: 'LLM Chain', + name: 'llmChain', + baseClasses: [this.type, ...getBaseClasses(LLMChain)] + }, + { + label: 'Output Prediction', + name: 'outputPrediction', + baseClasses: ['string'] + } + ] + } + + async init(nodeData: INodeData, input: string): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const prompt = nodeData.inputs?.prompt + const output = nodeData.outputs?.output as string + const promptValues = prompt.promptValues as ICommonObject + + if (output === this.name) { + const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false }) + return chain + } else if (output === 'outputPrediction') { + const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false }) + const inputVariables = chain.prompt.inputVariables as string[] // ["product"] + const res = await runPrediction(inputVariables, chain, input, promptValues) + // eslint-disable-next-line no-console + console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m') + // eslint-disable-next-line no-console + console.log(res) + return res + } + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const inputVariables = nodeData.instance.prompt.inputVariables as string[] // ["product"] + const chain = nodeData.instance as LLMChain + const promptValues = nodeData.inputs?.prompt.promptValues as ICommonObject + + const res = options.socketIO + ? await runPrediction(inputVariables, chain, input, promptValues, true, options.socketIO, options.socketIOClientId) + : await runPrediction(inputVariables, chain, input, promptValues) + // eslint-disable-next-line no-console + console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m') + // eslint-disable-next-line no-console + console.log(res) + return res + } +} + +const runPrediction = async ( + inputVariables: string[], + chain: LLMChain, + input: string, + promptValues: ICommonObject, + isStreaming?: boolean, + socketIO?: any, + socketIOClientId = '' +) => { + if (inputVariables.length === 1) { + if (isStreaming) { + const handler = new CustomChainHandler(socketIO, socketIOClientId) + const res = await chain.run(input, [handler]) + return res + } else { + const res = await chain.run(input) + return res + } + } else if (inputVariables.length > 1) { + let seen: string[] = [] + + for (const variable of inputVariables) { + seen.push(variable) + if (promptValues[variable]) { + seen.pop() + } + } + + if (seen.length === 0) { + // All inputVariables have fixed values specified + const options = { + ...promptValues + } + if (isStreaming) { + const handler = new CustomChainHandler(socketIO, socketIOClientId) + const res = await chain.call(options, [handler]) + return res?.text + } else { + const res = await chain.call(options) + return res?.text + } + } else if (seen.length === 1) { + // If one inputVariable is not specify, use input (user's question) as value + const lastValue = seen.pop() + if (!lastValue) throw new Error('Please provide Prompt Values') + const options = { + ...promptValues, + [lastValue]: input + } + if (isStreaming) { + const handler = new CustomChainHandler(socketIO, socketIOClientId) + const res = await chain.call(options, [handler]) + return res?.text + } else { + const res = await chain.call(options) + return res?.text + } + } else { + throw new Error(`Please provide Prompt Values for: ${seen.join(', ')}`) + } + } else { + if (isStreaming) { + const handler = new CustomChainHandler(socketIO, socketIOClientId) + const res = await chain.run(input, [handler]) + return res + } else { + const res = await chain.run(input) + return res + } + } +} + +module.exports = { nodeClass: LLMChain_Chains } diff --git a/packages/components/nodes/chains/LLMChain/chain.svg b/packages/components/nodes/chains/LLMChain/chain.svg new file mode 100644 index 0000000000000000000000000000000000000000..a5b32f90aa33a5a1f436ffaec7a814f6b350e6a5 --- /dev/null +++ b/packages/components/nodes/chains/LLMChain/chain.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts b/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts new file mode 100644 index 0000000000000000000000000000000000000000..c74e3257f3de599d00ce3c3e527afe150c877656 --- /dev/null +++ b/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts @@ -0,0 +1,74 @@ +import { BaseLanguageModel } from 'langchain/base_language' +import { ICommonObject, INode, INodeData, INodeParams, PromptRetriever } from '../../../src/Interface' +import { CustomChainHandler, getBaseClasses } from '../../../src/utils' +import { MultiPromptChain } from 'langchain/chains' + +class MultiPromptChain_Chains implements INode { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Multi Prompt Chain' + this.name = 'multiPromptChain' + this.type = 'MultiPromptChain' + this.icon = 'chain.svg' + this.category = 'Chains' + this.description = 'Chain automatically picks an appropriate prompt from multiple prompt templates' + this.baseClasses = [this.type, ...getBaseClasses(MultiPromptChain)] + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'Prompt Retriever', + name: 'promptRetriever', + type: 'PromptRetriever', + list: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const promptRetriever = nodeData.inputs?.promptRetriever as PromptRetriever[] + const promptNames = [] + const promptDescriptions = [] + const promptTemplates = [] + + for (const prompt of promptRetriever) { + promptNames.push(prompt.name) + promptDescriptions.push(prompt.description) + promptTemplates.push(prompt.systemMessage) + } + + const chain = MultiPromptChain.fromPrompts(model, promptNames, promptDescriptions, promptTemplates, undefined, { + verbose: process.env.DEBUG === 'true' ? true : false + } as any) + + return chain + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const chain = nodeData.instance as MultiPromptChain + const obj = { input } + + if (options.socketIO && options.socketIOClientId) { + const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) + const res = await chain.call(obj, [handler]) + return res?.text + } else { + const res = await chain.call(obj) + return res?.text + } + } +} + +module.exports = { nodeClass: MultiPromptChain_Chains } diff --git a/packages/components/nodes/chains/MultiPromptChain/chain.svg b/packages/components/nodes/chains/MultiPromptChain/chain.svg new file mode 100644 index 0000000000000000000000000000000000000000..a5b32f90aa33a5a1f436ffaec7a814f6b350e6a5 --- /dev/null +++ b/packages/components/nodes/chains/MultiPromptChain/chain.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts b/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts new file mode 100644 index 0000000000000000000000000000000000000000..b17125c2101eddb6705a0dfa7c5501cf8780277b --- /dev/null +++ b/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts @@ -0,0 +1,74 @@ +import { BaseLanguageModel } from 'langchain/base_language' +import { ICommonObject, INode, INodeData, INodeParams, VectorStoreRetriever } from '../../../src/Interface' +import { CustomChainHandler, getBaseClasses } from '../../../src/utils' +import { MultiRetrievalQAChain } from 'langchain/chains' + +class MultiRetrievalQAChain_Chains implements INode { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Multi Retrieval QA Chain' + this.name = 'multiRetrievalQAChain' + this.type = 'MultiRetrievalQAChain' + this.icon = 'chain.svg' + this.category = 'Chains' + this.description = 'QA Chain that automatically picks an appropriate vector store from multiple retrievers' + this.baseClasses = [this.type, ...getBaseClasses(MultiRetrievalQAChain)] + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'Vector Store Retriever', + name: 'vectorStoreRetriever', + type: 'VectorStoreRetriever', + list: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as VectorStoreRetriever[] + const retrieverNames = [] + const retrieverDescriptions = [] + const retrievers = [] + + for (const vs of vectorStoreRetriever) { + retrieverNames.push(vs.name) + retrieverDescriptions.push(vs.description) + retrievers.push(vs.vectorStore.asRetriever((vs.vectorStore as any).k ?? 4)) + } + + const chain = MultiRetrievalQAChain.fromRetrievers(model, retrieverNames, retrieverDescriptions, retrievers, undefined, { + verbose: process.env.DEBUG === 'true' ? true : false + } as any) + + return chain + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const chain = nodeData.instance as MultiRetrievalQAChain + const obj = { input } + + if (options.socketIO && options.socketIOClientId) { + const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) + const res = await chain.call(obj, [handler]) + return res?.text + } else { + const res = await chain.call(obj) + return res?.text + } + } +} + +module.exports = { nodeClass: MultiRetrievalQAChain_Chains } diff --git a/packages/components/nodes/chains/MultiRetrievalQAChain/chain.svg b/packages/components/nodes/chains/MultiRetrievalQAChain/chain.svg new file mode 100644 index 0000000000000000000000000000000000000000..a5b32f90aa33a5a1f436ffaec7a814f6b350e6a5 --- /dev/null +++ b/packages/components/nodes/chains/MultiRetrievalQAChain/chain.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts b/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts new file mode 100644 index 0000000000000000000000000000000000000000..97fa51a1cc5b3252709d2f2c59f614048878352a --- /dev/null +++ b/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts @@ -0,0 +1,64 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { RetrievalQAChain } from 'langchain/chains' +import { BaseRetriever } from 'langchain/schema' +import { CustomChainHandler, getBaseClasses } from '../../../src/utils' +import { BaseLanguageModel } from 'langchain/base_language' + +class RetrievalQAChain_Chains implements INode { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Retrieval QA Chain' + this.name = 'retrievalQAChain' + this.type = 'RetrievalQAChain' + this.icon = 'chain.svg' + this.category = 'Chains' + this.description = 'QA chain to answer a question based on the retrieved documents' + this.baseClasses = [this.type, ...getBaseClasses(RetrievalQAChain)] + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'Vector Store Retriever', + name: 'vectorStoreRetriever', + type: 'BaseRetriever' + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever + + const chain = RetrievalQAChain.fromLLM(model, vectorStoreRetriever, { verbose: process.env.DEBUG === 'true' ? true : false }) + return chain + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const chain = nodeData.instance as RetrievalQAChain + const obj = { + query: input + } + + if (options.socketIO && options.socketIOClientId) { + const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) + const res = await chain.call(obj, [handler]) + return res?.text + } else { + const res = await chain.call(obj) + return res?.text + } + } +} + +module.exports = { nodeClass: RetrievalQAChain_Chains } diff --git a/packages/components/nodes/chains/RetrievalQAChain/chain.svg b/packages/components/nodes/chains/RetrievalQAChain/chain.svg new file mode 100644 index 0000000000000000000000000000000000000000..a5b32f90aa33a5a1f436ffaec7a814f6b350e6a5 --- /dev/null +++ b/packages/components/nodes/chains/RetrievalQAChain/chain.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts b/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts new file mode 100644 index 0000000000000000000000000000000000000000..27a245e1aeeaba3aeb6318ef1a0a8000bc819566 --- /dev/null +++ b/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts @@ -0,0 +1,99 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { SqlDatabaseChain, SqlDatabaseChainInput } from 'langchain/chains' +import { CustomChainHandler, getBaseClasses } from '../../../src/utils' +import { DataSource } from 'typeorm' +import { SqlDatabase } from 'langchain/sql_db' +import { BaseLanguageModel } from 'langchain/base_language' + +class SqlDatabaseChain_Chains implements INode { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'Sql Database Chain' + this.name = 'sqlDatabaseChain' + this.type = 'SqlDatabaseChain' + this.icon = 'sqlchain.svg' + this.category = 'Chains' + this.description = 'Answer questions over a SQL database' + this.baseClasses = [this.type, ...getBaseClasses(SqlDatabaseChain)] + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'Database', + name: 'database', + type: 'options', + options: [ + { + label: 'SQlite', + name: 'sqlite' + } + ], + default: 'sqlite' + }, + { + label: 'Database File Path', + name: 'dbFilePath', + type: 'string', + placeholder: 'C:/Users/chinook.db' + } + ] + } + + async init(nodeData: INodeData): Promise { + const databaseType = nodeData.inputs?.database as 'sqlite' + const model = nodeData.inputs?.model as BaseLanguageModel + const dbFilePath = nodeData.inputs?.dbFilePath + + const chain = await getSQLDBChain(databaseType, dbFilePath, model) + return chain + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const databaseType = nodeData.inputs?.database as 'sqlite' + const model = nodeData.inputs?.model as BaseLanguageModel + const dbFilePath = nodeData.inputs?.dbFilePath + + const chain = await getSQLDBChain(databaseType, dbFilePath, model) + if (options.socketIO && options.socketIOClientId) { + const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) + const res = await chain.run(input, [handler]) + return res + } else { + const res = await chain.run(input) + return res + } + } +} + +const getSQLDBChain = async (databaseType: 'sqlite', dbFilePath: string, llm: BaseLanguageModel) => { + const datasource = new DataSource({ + type: databaseType, + database: dbFilePath + }) + + const db = await SqlDatabase.fromDataSourceParams({ + appDataSource: datasource + }) + + const obj: SqlDatabaseChainInput = { + llm, + database: db, + verbose: process.env.DEBUG === 'true' ? true : false + } + + const chain = new SqlDatabaseChain(obj) + return chain +} + +module.exports = { nodeClass: SqlDatabaseChain_Chains } diff --git a/packages/components/nodes/chains/SqlDatabaseChain/sqlchain.svg b/packages/components/nodes/chains/SqlDatabaseChain/sqlchain.svg new file mode 100644 index 0000000000000000000000000000000000000000..dcf937b350ef108c4c66c7700d1e34e91bdc65c6 --- /dev/null +++ b/packages/components/nodes/chains/SqlDatabaseChain/sqlchain.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts b/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts new file mode 100644 index 0000000000000000000000000000000000000000..5850ed7bc6491e60c80e632eeb6ebd685ec2b3ce --- /dev/null +++ b/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts @@ -0,0 +1,67 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { CustomChainHandler, getBaseClasses } from '../../../src/utils' +import { VectorDBQAChain } from 'langchain/chains' +import { BaseLanguageModel } from 'langchain/base_language' +import { VectorStore } from 'langchain/vectorstores' + +class VectorDBQAChain_Chains implements INode { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description: string + inputs: INodeParams[] + + constructor() { + this.label = 'VectorDB QA Chain' + this.name = 'vectorDBQAChain' + this.type = 'VectorDBQAChain' + this.icon = 'chain.svg' + this.category = 'Chains' + this.description = 'QA chain for vector databases' + this.baseClasses = [this.type, ...getBaseClasses(VectorDBQAChain)] + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'Vector Store', + name: 'vectorStore', + type: 'VectorStore' + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const vectorStore = nodeData.inputs?.vectorStore as VectorStore + + const chain = VectorDBQAChain.fromLLM(model, vectorStore, { + k: (vectorStore as any)?.k ?? 4, + verbose: process.env.DEBUG === 'true' ? true : false + }) + return chain + } + + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + const chain = nodeData.instance as VectorDBQAChain + const obj = { + query: input + } + + if (options.socketIO && options.socketIOClientId) { + const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId) + const res = await chain.call(obj, [handler]) + return res?.text + } else { + const res = await chain.call(obj) + return res?.text + } + } +} + +module.exports = { nodeClass: VectorDBQAChain_Chains } diff --git a/packages/components/nodes/chains/VectorDBQAChain/chain.svg b/packages/components/nodes/chains/VectorDBQAChain/chain.svg new file mode 100644 index 0000000000000000000000000000000000000000..a5b32f90aa33a5a1f436ffaec7a814f6b350e6a5 --- /dev/null +++ b/packages/components/nodes/chains/VectorDBQAChain/chain.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/chatmodels/AzureChatOpenAI/Azure.svg b/packages/components/nodes/chatmodels/AzureChatOpenAI/Azure.svg new file mode 100644 index 0000000000000000000000000000000000000000..51eb62535a52e016a28807f036c37acd9380b19f --- /dev/null +++ b/packages/components/nodes/chatmodels/AzureChatOpenAI/Azure.svg @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts b/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts new file mode 100644 index 0000000000000000000000000000000000000000..7857bfdf4965b99b078573741dfc270247507e44 --- /dev/null +++ b/packages/components/nodes/chatmodels/AzureChatOpenAI/AzureChatOpenAI.ts @@ -0,0 +1,146 @@ +import { OpenAIBaseInput } from 'langchain/dist/types/openai-types' +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { AzureOpenAIInput, ChatOpenAI } from 'langchain/chat_models/openai' + +class AzureChatOpenAI_ChatModels implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Azure ChatOpenAI' + this.name = 'azureChatOpenAI' + this.type = 'AzureChatOpenAI' + this.icon = 'Azure.svg' + this.category = 'Chat Models' + this.description = 'Wrapper around Azure OpenAI large language models that use the Chat endpoint' + this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.inputs = [ + { + label: 'Azure OpenAI Api Key', + name: 'azureOpenAIApiKey', + type: 'password' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'gpt-4', + name: 'gpt-4' + }, + { + label: 'gpt-4-32k', + name: 'gpt-4-32k' + }, + { + label: 'gpt-35-turbo', + name: 'gpt-35-turbo' + } + ], + default: 'gpt-35-turbo', + optional: true + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + default: 0.9, + optional: true + }, + { + label: 'Azure OpenAI Api Instance Name', + name: 'azureOpenAIApiInstanceName', + type: 'string', + placeholder: 'YOUR-INSTANCE-NAME' + }, + { + label: 'Azure OpenAI Api Deployment Name', + name: 'azureOpenAIApiDeploymentName', + type: 'string', + placeholder: 'YOUR-DEPLOYMENT-NAME' + }, + { + label: 'Azure OpenAI Api Version', + name: 'azureOpenAIApiVersion', + type: 'options', + options: [ + { + label: '2023-03-15-preview', + name: '2023-03-15-preview' + } + ], + default: '2023-03-15-preview' + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const azureOpenAIApiKey = nodeData.inputs?.azureOpenAIApiKey as string + const modelName = nodeData.inputs?.modelName as string + const temperature = nodeData.inputs?.temperature as string + const azureOpenAIApiInstanceName = nodeData.inputs?.azureOpenAIApiInstanceName as string + const azureOpenAIApiDeploymentName = nodeData.inputs?.azureOpenAIApiDeploymentName as string + const azureOpenAIApiVersion = nodeData.inputs?.azureOpenAIApiVersion as string + const maxTokens = nodeData.inputs?.maxTokens as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const timeout = nodeData.inputs?.timeout as string + const streaming = nodeData.inputs?.streaming as boolean + + const obj: Partial & Partial = { + temperature: parseInt(temperature, 10), + modelName, + azureOpenAIApiKey, + azureOpenAIApiInstanceName, + azureOpenAIApiDeploymentName, + azureOpenAIApiVersion, + streaming: streaming ?? true + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10) + if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10) + if (timeout) obj.timeout = parseInt(timeout, 10) + + const model = new ChatOpenAI(obj) + return model + } +} + +module.exports = { nodeClass: AzureChatOpenAI_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts b/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts new file mode 100644 index 0000000000000000000000000000000000000000..708849e53aebeda5193954f9027d176f6479446d --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts @@ -0,0 +1,138 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { AnthropicInput, ChatAnthropic } from 'langchain/chat_models/anthropic' + +class ChatAnthropic_ChatModels implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'ChatAnthropic' + this.name = 'chatAnthropic' + this.type = 'ChatAnthropic' + this.icon = 'chatAnthropic.png' + this.category = 'Chat Models' + this.description = 'Wrapper around ChatAnthropic large language models that use the Chat endpoint' + this.baseClasses = [this.type, ...getBaseClasses(ChatAnthropic)] + this.inputs = [ + { + label: 'ChatAnthropic Api Key', + name: 'anthropicApiKey', + type: 'password' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'claude-v1', + name: 'claude-v1' + }, + { + label: 'claude-v1-100k', + name: 'claude-v1-100k' + }, + { + label: 'claude-v1.0', + name: 'claude-v1.0' + }, + { + label: 'claude-v1.2', + name: 'claude-v1.2' + }, + { + label: 'claude-v1.3', + name: 'claude-v1.3' + }, + { + label: 'claude-v1.3-100k', + name: 'claude-v1.3-100k' + }, + { + label: 'claude-instant-v1', + name: 'claude-instant-v1' + }, + { + label: 'claude-instant-v1-100k', + name: 'claude-instant-v1-100k' + }, + { + label: 'claude-instant-v1.0', + name: 'claude-instant-v1.0' + }, + { + label: 'claude-instant-v1.1', + name: 'claude-instant-v1.1' + }, + { + label: 'claude-instant-v1.1-100k', + name: 'claude-instant-v1.1-100k' + } + ], + default: 'claude-v1', + optional: true + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + default: 0.9, + optional: true + }, + { + label: 'Max Tokens', + name: 'maxTokensToSample', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Top P', + name: 'topP', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + type: 'number', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const anthropicApiKey = nodeData.inputs?.anthropicApiKey as string + const maxTokensToSample = nodeData.inputs?.maxTokensToSample as string + const topP = nodeData.inputs?.topP as string + const topK = nodeData.inputs?.topK as string + const streaming = nodeData.inputs?.streaming as boolean + + const obj: Partial & { anthropicApiKey?: string } = { + temperature: parseInt(temperature, 10), + modelName, + anthropicApiKey, + streaming: streaming ?? true + } + + if (maxTokensToSample) obj.maxTokensToSample = parseInt(maxTokensToSample, 10) + if (topP) obj.topP = parseInt(topP, 10) + if (topK) obj.topK = parseInt(topK, 10) + + const model = new ChatAnthropic(obj) + return model + } +} + +module.exports = { nodeClass: ChatAnthropic_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatAnthropic/chatAnthropic.png b/packages/components/nodes/chatmodels/ChatAnthropic/chatAnthropic.png new file mode 100644 index 0000000000000000000000000000000000000000..42324cb7ca52c23f950f13bd54cdd38af8b02336 Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatAnthropic/chatAnthropic.png differ diff --git a/packages/components/nodes/chatmodels/ChatLocalAI/ChatLocalAI.ts b/packages/components/nodes/chatmodels/ChatLocalAI/ChatLocalAI.ts new file mode 100644 index 0000000000000000000000000000000000000000..bd25a9fa65484507ae970fe30b4e819ed7352588 --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatLocalAI/ChatLocalAI.ts @@ -0,0 +1,92 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { OpenAIChat } from 'langchain/llms/openai' +import { OpenAIChatInput } from 'langchain/chat_models/openai' + +class ChatLocalAI_ChatModels implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'ChatLocalAI' + this.name = 'chatLocalAI' + this.type = 'ChatLocalAI' + this.icon = 'localai.png' + this.category = 'Chat Models' + this.description = 'Use local LLMs like llama.cpp, gpt4all using LocalAI' + this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(OpenAIChat)] + this.inputs = [ + { + label: 'Base Path', + name: 'basePath', + type: 'string', + placeholder: 'http://localhost:8080/v1' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + placeholder: 'gpt4all-lora-quantized.bin' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + default: 0.9, + optional: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const timeout = nodeData.inputs?.timeout as string + const basePath = nodeData.inputs?.basePath as string + + const obj: Partial & { openAIApiKey?: string } = { + temperature: parseInt(temperature, 10), + modelName, + openAIApiKey: 'sk-' + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseInt(topP, 10) + if (timeout) obj.timeout = parseInt(timeout, 10) + + const model = new OpenAIChat(obj, { basePath }) + + return model + } +} + +module.exports = { nodeClass: ChatLocalAI_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatLocalAI/localai.png b/packages/components/nodes/chatmodels/ChatLocalAI/localai.png new file mode 100644 index 0000000000000000000000000000000000000000..321403973dae17c99fbf0435440e36abd81e2f6b Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatLocalAI/localai.png differ diff --git a/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts b/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts new file mode 100644 index 0000000000000000000000000000000000000000..f07fce60d5a13857485f71d3f2b0e2271d2a7acf --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatOpenAI/ChatOpenAI.ts @@ -0,0 +1,142 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { ChatOpenAI, OpenAIChatInput } from 'langchain/chat_models/openai' + +class ChatOpenAI_ChatModels implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'ChatOpenAI' + this.name = 'chatOpenAI' + this.type = 'ChatOpenAI' + this.icon = 'openai.png' + this.category = 'Chat Models' + this.description = 'Wrapper around OpenAI large language models that use the Chat endpoint' + this.baseClasses = [this.type, ...getBaseClasses(ChatOpenAI)] + this.inputs = [ + { + label: 'OpenAI Api Key', + name: 'openAIApiKey', + type: 'password' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'gpt-4', + name: 'gpt-4' + }, + { + label: 'gpt-4-0314', + name: 'gpt-4-0314' + }, + { + label: 'gpt-4-32k-0314', + name: 'gpt-4-32k-0314' + }, + { + label: 'gpt-3.5-turbo', + name: 'gpt-3.5-turbo' + }, + { + label: 'gpt-3.5-turbo-0301', + name: 'gpt-3.5-turbo-0301' + } + ], + default: 'gpt-3.5-turbo', + optional: true + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + default: 0.9, + optional: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'BasePath', + name: 'basepath', + type: 'string', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const openAIApiKey = nodeData.inputs?.openAIApiKey as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const timeout = nodeData.inputs?.timeout as string + const streaming = nodeData.inputs?.streaming as boolean + const basePath = nodeData.inputs?.basepath as string + + const obj: Partial & { openAIApiKey?: string } = { + temperature: parseInt(temperature, 10), + modelName, + openAIApiKey, + streaming: streaming ?? true + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseInt(topP, 10) + if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10) + if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10) + if (timeout) obj.timeout = parseInt(timeout, 10) + + const model = new ChatOpenAI(obj, { + basePath + }) + return model + } +} + +module.exports = { nodeClass: ChatOpenAI_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatOpenAI/openai.png b/packages/components/nodes/chatmodels/ChatOpenAI/openai.png new file mode 100644 index 0000000000000000000000000000000000000000..de08a05b28979826c4cc669c4899789763a938a1 Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatOpenAI/openai.png differ diff --git a/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts b/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts new file mode 100644 index 0000000000000000000000000000000000000000..9e1135059abe35ce640782935b1b643c770a08fa --- /dev/null +++ b/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts @@ -0,0 +1,117 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { CheerioWebBaseLoader } from 'langchain/document_loaders/web/cheerio' +import { test } from 'linkifyjs' +import { getAvailableURLs } from '../../../src' + +class Cheerio_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Cheerio Web Scraper' + this.name = 'cheerioWebScraper' + this.type = 'Document' + this.icon = 'cheerio.svg' + this.category = 'Document Loaders' + this.description = `Load data from webpages` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'URL', + name: 'url', + type: 'string' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Web Scrap for Relative Links', + name: 'webScrap', + type: 'boolean', + optional: true, + additionalParams: true + }, + { + label: 'Web Scrap Links Limit', + name: 'limit', + type: 'number', + default: 10, + optional: true, + additionalParams: true + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const metadata = nodeData.inputs?.metadata + const webScrap = nodeData.inputs?.webScrap as boolean + let limit = nodeData.inputs?.limit as string + + let url = nodeData.inputs?.url as string + url = url.trim() + if (!test(url)) { + throw new Error('Invalid URL') + } + + const cheerioLoader = async (url: string): Promise => { + let docs = [] + const loader = new CheerioWebBaseLoader(url) + if (textSplitter) { + docs = await loader.loadAndSplit(textSplitter) + } else { + docs = await loader.load() + } + return docs + } + + let availableUrls: string[] + let docs = [] + if (webScrap) { + if (!limit) limit = '10' + availableUrls = await getAvailableURLs(url, parseInt(limit)) + for (let i = 0; i < availableUrls.length; i++) { + docs.push(...(await cheerioLoader(availableUrls[i]))) + } + } else { + docs = await cheerioLoader(url) + } + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + let finaldocs = [] + for (const doc of docs) { + const newdoc = { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + finaldocs.push(newdoc) + } + return finaldocs + } + + return docs + } +} + +module.exports = { nodeClass: Cheerio_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Cheerio/cheerio.svg b/packages/components/nodes/documentloaders/Cheerio/cheerio.svg new file mode 100644 index 0000000000000000000000000000000000000000..8e3334b9fd7e987f15939c75e73c1f4084413626 --- /dev/null +++ b/packages/components/nodes/documentloaders/Cheerio/cheerio.svg @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/packages/components/nodes/documentloaders/Confluence/Confluence.ts b/packages/components/nodes/documentloaders/Confluence/Confluence.ts new file mode 100644 index 0000000000000000000000000000000000000000..9a69be14efdfd16fc2bb740838a5e0f9589d6be3 --- /dev/null +++ b/packages/components/nodes/documentloaders/Confluence/Confluence.ts @@ -0,0 +1,118 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { ConfluencePagesLoader, ConfluencePagesLoaderParams } from 'langchain/document_loaders/web/confluence' + +class Confluence_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Confluence' + this.name = 'confluence' + this.type = 'Document' + this.icon = 'confluence.png' + this.category = 'Document Loaders' + this.description = `Load data from a Confluence Document` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Username', + name: 'username', + type: 'string', + placeholder: '' + }, + { + label: 'Access Token', + name: 'accessToken', + type: 'password', + placeholder: '' + }, + { + label: 'Base URL', + name: 'baseUrl', + type: 'string', + placeholder: 'https://example.atlassian.net/wiki' + }, + { + label: 'Space Key', + name: 'spaceKey', + type: 'string', + placeholder: '~EXAMPLE362906de5d343d49dcdbae5dEXAMPLE' + }, + { + label: 'Limit', + name: 'limit', + type: 'number', + default: 0, + optional: true + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const username = nodeData.inputs?.username as string + const accessToken = nodeData.inputs?.accessToken as string + const spaceKey = nodeData.inputs?.spaceKey as string + const baseUrl = nodeData.inputs?.baseUrl as string + const limit = nodeData.inputs?.limit as number + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const metadata = nodeData.inputs?.metadata + + const options: ConfluencePagesLoaderParams = { + username, + accessToken, + baseUrl, + spaceKey, + limit + } + + const loader = new ConfluencePagesLoader(options) + + let docs = [] + + if (textSplitter) { + docs = await loader.loadAndSplit(textSplitter) + } else { + docs = await loader.load() + } + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + let finaldocs = [] + for (const doc of docs) { + const newdoc = { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + finaldocs.push(newdoc) + } + return finaldocs + } + + return docs + } +} + +module.exports = { nodeClass: Confluence_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Confluence/confluence.png b/packages/components/nodes/documentloaders/Confluence/confluence.png new file mode 100644 index 0000000000000000000000000000000000000000..3cbb7b3dc2492867afc54c6ddca6ae2219b91aec Binary files /dev/null and b/packages/components/nodes/documentloaders/Confluence/confluence.png differ diff --git a/packages/components/nodes/documentloaders/Csv/Csv.png b/packages/components/nodes/documentloaders/Csv/Csv.png new file mode 100644 index 0000000000000000000000000000000000000000..41b84e16a04dd8b6b6eb1606f4fa9f5317bffc96 Binary files /dev/null and b/packages/components/nodes/documentloaders/Csv/Csv.png differ diff --git a/packages/components/nodes/documentloaders/Csv/Csv.ts b/packages/components/nodes/documentloaders/Csv/Csv.ts new file mode 100644 index 0000000000000000000000000000000000000000..f4b36ad03ca06ea258ee338e2cbc93b674eab540 --- /dev/null +++ b/packages/components/nodes/documentloaders/Csv/Csv.ts @@ -0,0 +1,105 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { CSVLoader } from 'langchain/document_loaders/fs/csv' + +class Csv_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Csv File' + this.name = 'csvFile' + this.type = 'Document' + this.icon = 'Csv.png' + this.category = 'Document Loaders' + this.description = `Load data from CSV files` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Csv File', + name: 'csvFile', + type: 'file', + fileType: '.csv' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Single Column Extraction', + name: 'columnName', + type: 'string', + description: 'Extracting a single column', + placeholder: 'Enter column name', + optional: true + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const csvFileBase64 = nodeData.inputs?.csvFile as string + const columnName = nodeData.inputs?.columnName as string + const metadata = nodeData.inputs?.metadata + + let alldocs = [] + let files: string[] = [] + + if (csvFileBase64.startsWith('[') && csvFileBase64.endsWith(']')) { + files = JSON.parse(csvFileBase64) + } else { + files = [csvFileBase64] + } + + for (const file of files) { + const splitDataURI = file.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const blob = new Blob([bf]) + const loader = new CSVLoader(blob, columnName.trim().length === 0 ? undefined : columnName.trim()) + + if (textSplitter) { + const docs = await loader.loadAndSplit(textSplitter) + alldocs.push(...docs) + } else { + const docs = await loader.load() + alldocs.push(...docs) + } + } + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + let finaldocs = [] + for (const doc of alldocs) { + const newdoc = { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + finaldocs.push(newdoc) + } + return finaldocs + } + + return alldocs + } +} + +module.exports = { nodeClass: Csv_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Docx/Docx.png b/packages/components/nodes/documentloaders/Docx/Docx.png new file mode 100644 index 0000000000000000000000000000000000000000..6d527bd2dd4289623b925b0ab76ded6352b0641f Binary files /dev/null and b/packages/components/nodes/documentloaders/Docx/Docx.png differ diff --git a/packages/components/nodes/documentloaders/Docx/Docx.ts b/packages/components/nodes/documentloaders/Docx/Docx.ts new file mode 100644 index 0000000000000000000000000000000000000000..e27991a514748eb89885971af2acd0d72f94fb35 --- /dev/null +++ b/packages/components/nodes/documentloaders/Docx/Docx.ts @@ -0,0 +1,96 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { DocxLoader } from 'langchain/document_loaders/fs/docx' + +class Docx_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Docx File' + this.name = 'docxFile' + this.type = 'Document' + this.icon = 'Docx.png' + this.category = 'Document Loaders' + this.description = `Load data from DOCX files` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Docx File', + name: 'docxFile', + type: 'file', + fileType: '.docx' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const docxFileBase64 = nodeData.inputs?.docxFile as string + const metadata = nodeData.inputs?.metadata + + let alldocs = [] + let files: string[] = [] + + if (docxFileBase64.startsWith('[') && docxFileBase64.endsWith(']')) { + files = JSON.parse(docxFileBase64) + } else { + files = [docxFileBase64] + } + + for (const file of files) { + const splitDataURI = file.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const blob = new Blob([bf]) + const loader = new DocxLoader(blob) + + if (textSplitter) { + const docs = await loader.loadAndSplit(textSplitter) + alldocs.push(...docs) + } else { + const docs = await loader.load() + alldocs.push(...docs) + } + } + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + let finaldocs = [] + for (const doc of alldocs) { + const newdoc = { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + finaldocs.push(newdoc) + } + return finaldocs + } + + return alldocs + } +} + +module.exports = { nodeClass: Docx_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Folder/Folder.ts b/packages/components/nodes/documentloaders/Folder/Folder.ts new file mode 100644 index 0000000000000000000000000000000000000000..2290133e4192b96795d112a8a02a69056e0ff343 --- /dev/null +++ b/packages/components/nodes/documentloaders/Folder/Folder.ts @@ -0,0 +1,92 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { TextLoader } from 'langchain/document_loaders/fs/text' +import { DirectoryLoader } from 'langchain/document_loaders/fs/directory' +import { JSONLoader } from 'langchain/document_loaders/fs/json' +import { CSVLoader } from 'langchain/document_loaders/fs/csv' +import { PDFLoader } from 'langchain/document_loaders/fs/pdf' +import { DocxLoader } from 'langchain/document_loaders/fs/docx' + +class Folder_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Folder with Files' + this.name = 'folderFiles' + this.type = 'Document' + this.icon = 'folder.svg' + this.category = 'Document Loaders' + this.description = `Load data from folder with multiple files` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Folder Path', + name: 'folderPath', + type: 'string', + placeholder: '' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const folderPath = nodeData.inputs?.folderPath as string + const metadata = nodeData.inputs?.metadata + + const loader = new DirectoryLoader(folderPath, { + '.json': (path) => new JSONLoader(path), + '.txt': (path) => new TextLoader(path), + '.csv': (path) => new CSVLoader(path), + '.docx': (path) => new DocxLoader(path), + // @ts-ignore + '.pdf': (path) => new PDFLoader(path, { pdfjs: () => import('pdf-parse/lib/pdf.js/v1.10.100/build/pdf.js') }) + }) + let docs = [] + + if (textSplitter) { + docs = await loader.loadAndSplit(textSplitter) + } else { + docs = await loader.load() + } + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + let finaldocs = [] + for (const doc of docs) { + const newdoc = { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + finaldocs.push(newdoc) + } + return finaldocs + } + + return docs + } +} + +module.exports = { nodeClass: Folder_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Folder/folder.svg b/packages/components/nodes/documentloaders/Folder/folder.svg new file mode 100644 index 0000000000000000000000000000000000000000..eb2b9de9e8465bbcda6d1b4f1d38cd1137f540b0 --- /dev/null +++ b/packages/components/nodes/documentloaders/Folder/folder.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/packages/components/nodes/documentloaders/Github/Github.ts b/packages/components/nodes/documentloaders/Github/Github.ts new file mode 100644 index 0000000000000000000000000000000000000000..bbaad3cb016dd7a5ef91be63501e291e04936f00 --- /dev/null +++ b/packages/components/nodes/documentloaders/Github/Github.ts @@ -0,0 +1,101 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { GithubRepoLoader, GithubRepoLoaderParams } from 'langchain/document_loaders/web/github' + +class Github_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Github' + this.name = 'github' + this.type = 'Document' + this.icon = 'github.png' + this.category = 'Document Loaders' + this.description = `Load data from a GitHub repository` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Repo Link', + name: 'repoLink', + type: 'string', + placeholder: 'https://github.com/FlowiseAI/Flowise' + }, + { + label: 'Branch', + name: 'branch', + type: 'string', + default: 'main' + }, + { + label: 'Access Token', + name: 'accessToken', + type: 'password', + placeholder: '', + optional: true + }, + { + label: 'Recursive', + name: 'recursive', + type: 'boolean', + optional: true + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const repoLink = nodeData.inputs?.repoLink as string + const branch = nodeData.inputs?.branch as string + const recursive = nodeData.inputs?.recursive as boolean + const accessToken = nodeData.inputs?.accessToken as string + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const metadata = nodeData.inputs?.metadata + + const options: GithubRepoLoaderParams = { + branch, + recursive, + unknown: 'warn' + } + + if (accessToken) options.accessToken = accessToken + + const loader = new GithubRepoLoader(repoLink, options) + const docs = textSplitter ? await loader.loadAndSplit(textSplitter) : await loader.load() + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + return docs.map((doc) => { + return { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + }) + } + + return docs + } +} + +module.exports = { nodeClass: Github_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Github/github.png b/packages/components/nodes/documentloaders/Github/github.png new file mode 100644 index 0000000000000000000000000000000000000000..e440081876df135f6d27f5cf764233f85ac6194b Binary files /dev/null and b/packages/components/nodes/documentloaders/Github/github.png differ diff --git a/packages/components/nodes/documentloaders/Json/Json.ts b/packages/components/nodes/documentloaders/Json/Json.ts new file mode 100644 index 0000000000000000000000000000000000000000..9177df5cb896de14d1904746f000f0b80be3a8bc --- /dev/null +++ b/packages/components/nodes/documentloaders/Json/Json.ts @@ -0,0 +1,111 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { JSONLoader } from 'langchain/document_loaders/fs/json' + +class Json_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Json File' + this.name = 'jsonFile' + this.type = 'Document' + this.icon = 'json.svg' + this.category = 'Document Loaders' + this.description = `Load data from JSON files` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Json File', + name: 'jsonFile', + type: 'file', + fileType: '.json' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Pointers Extraction (separated by commas)', + name: 'pointersName', + type: 'string', + description: 'Extracting multiple pointers', + placeholder: 'Enter pointers name', + optional: true + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const jsonFileBase64 = nodeData.inputs?.jsonFile as string + const pointersName = nodeData.inputs?.pointersName as string + const metadata = nodeData.inputs?.metadata + + let pointers: string[] = [] + if (pointersName) { + const outputString = pointersName.replace(/[^a-zA-Z0-9,]+/g, ',') + pointers = outputString.split(',').map((pointer) => '/' + pointer.trim()) + } + + let alldocs = [] + let files: string[] = [] + + if (jsonFileBase64.startsWith('[') && jsonFileBase64.endsWith(']')) { + files = JSON.parse(jsonFileBase64) + } else { + files = [jsonFileBase64] + } + + for (const file of files) { + const splitDataURI = file.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const blob = new Blob([bf]) + const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined) + + if (textSplitter) { + const docs = await loader.loadAndSplit(textSplitter) + alldocs.push(...docs) + } else { + const docs = await loader.load() + alldocs.push(...docs) + } + } + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + let finaldocs = [] + for (const doc of alldocs) { + const newdoc = { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + finaldocs.push(newdoc) + } + return finaldocs + } + + return alldocs + } +} + +module.exports = { nodeClass: Json_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Json/json.svg b/packages/components/nodes/documentloaders/Json/json.svg new file mode 100644 index 0000000000000000000000000000000000000000..c27646e2dad38a15aa1ede80e52b8a9de92364cd --- /dev/null +++ b/packages/components/nodes/documentloaders/Json/json.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/documentloaders/NotionDB/NotionDB.ts b/packages/components/nodes/documentloaders/NotionDB/NotionDB.ts new file mode 100644 index 0000000000000000000000000000000000000000..71e5e507a6a61d5ab59a7c6d39b1331266f31c88 --- /dev/null +++ b/packages/components/nodes/documentloaders/NotionDB/NotionDB.ts @@ -0,0 +1,101 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { NotionDBLoader, NotionDBLoaderParams } from 'langchain/document_loaders/web/notiondb' + +class NotionDB_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Notion Database' + this.name = 'notionDB' + this.type = 'Document' + this.icon = 'notion.png' + this.category = 'Document Loaders' + this.description = 'Load data from Notion Database ID' + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Notion Database Id', + name: 'databaseId', + type: 'string', + description: + 'If your URL looks like - https://www.notion.so/?v=, then is the database ID' + }, + { + label: 'Notion Integration Token', + name: 'notionIntegrationToken', + type: 'password', + description: + 'You can find integration token here' + }, + { + label: 'Page Size Limit', + name: 'pageSizeLimit', + type: 'number', + default: 10 + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const databaseId = nodeData.inputs?.databaseId as string + const notionIntegrationToken = nodeData.inputs?.notionIntegrationToken as string + const pageSizeLimit = nodeData.inputs?.pageSizeLimit as string + const metadata = nodeData.inputs?.metadata + + const obj: NotionDBLoaderParams = { + pageSizeLimit: pageSizeLimit ? parseInt(pageSizeLimit, 10) : 10, + databaseId, + notionIntegrationToken + } + const loader = new NotionDBLoader(obj) + + let docs = [] + if (textSplitter) { + docs = await loader.loadAndSplit(textSplitter) + } else { + docs = await loader.load() + } + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + let finaldocs = [] + for (const doc of docs) { + const newdoc = { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + finaldocs.push(newdoc) + } + return finaldocs + } + + return docs + } +} + +module.exports = { nodeClass: NotionDB_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/NotionDB/notion.png b/packages/components/nodes/documentloaders/NotionDB/notion.png new file mode 100644 index 0000000000000000000000000000000000000000..391051679c8cc33e7e52891593147283bf93dcb0 Binary files /dev/null and b/packages/components/nodes/documentloaders/NotionDB/notion.png differ diff --git a/packages/components/nodes/documentloaders/NotionFolder/NotionFolder.ts b/packages/components/nodes/documentloaders/NotionFolder/NotionFolder.ts new file mode 100644 index 0000000000000000000000000000000000000000..11b8165b093ed1afacd217cfef9c9c47b83a91fa --- /dev/null +++ b/packages/components/nodes/documentloaders/NotionFolder/NotionFolder.ts @@ -0,0 +1,81 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { NotionLoader } from 'langchain/document_loaders/fs/notion' + +class NotionFolder_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Notion Folder' + this.name = 'notionFolder' + this.type = 'Document' + this.icon = 'notion.png' + this.category = 'Document Loaders' + this.description = 'Load data from the exported and unzipped Notion folder' + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Notion Folder', + name: 'notionFolder', + type: 'string', + description: 'Get folder path', + placeholder: 'Paste folder path' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const notionFolder = nodeData.inputs?.notionFolder as string + const metadata = nodeData.inputs?.metadata + + const loader = new NotionLoader(notionFolder) + let docs = [] + + if (textSplitter) { + docs = await loader.loadAndSplit(textSplitter) + } else { + docs = await loader.load() + } + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + let finaldocs = [] + for (const doc of docs) { + const newdoc = { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + finaldocs.push(newdoc) + } + return finaldocs + } + + return docs + } +} + +module.exports = { nodeClass: NotionFolder_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/NotionFolder/notion.png b/packages/components/nodes/documentloaders/NotionFolder/notion.png new file mode 100644 index 0000000000000000000000000000000000000000..391051679c8cc33e7e52891593147283bf93dcb0 Binary files /dev/null and b/packages/components/nodes/documentloaders/NotionFolder/notion.png differ diff --git a/packages/components/nodes/documentloaders/Pdf/Pdf.ts b/packages/components/nodes/documentloaders/Pdf/Pdf.ts new file mode 100644 index 0000000000000000000000000000000000000000..bc36f8cb5cdae470bcfd229501aaf44e2d3f5123 --- /dev/null +++ b/packages/components/nodes/documentloaders/Pdf/Pdf.ts @@ -0,0 +1,127 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { PDFLoader } from 'langchain/document_loaders/fs/pdf' + +class Pdf_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Pdf File' + this.name = 'pdfFile' + this.type = 'Document' + this.icon = 'pdf.svg' + this.category = 'Document Loaders' + this.description = `Load data from PDF files` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Pdf File', + name: 'pdfFile', + type: 'file', + fileType: '.pdf' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Usage', + name: 'usage', + type: 'options', + options: [ + { + label: 'One document per page', + name: 'perPage' + }, + { + label: 'One document per file', + name: 'perFile' + } + ], + default: 'perPage' + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const pdfFileBase64 = nodeData.inputs?.pdfFile as string + const usage = nodeData.inputs?.usage as string + const metadata = nodeData.inputs?.metadata + + let alldocs = [] + let files: string[] = [] + + if (pdfFileBase64.startsWith('[') && pdfFileBase64.endsWith(']')) { + files = JSON.parse(pdfFileBase64) + } else { + files = [pdfFileBase64] + } + + for (const file of files) { + const splitDataURI = file.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + if (usage === 'perFile') { + const loader = new PDFLoader(new Blob([bf]), { + splitPages: false, + // @ts-ignore + pdfjs: () => import('pdf-parse/lib/pdf.js/v1.10.100/build/pdf.js') + }) + if (textSplitter) { + const docs = await loader.loadAndSplit(textSplitter) + alldocs.push(...docs) + } else { + const docs = await loader.load() + alldocs.push(...docs) + } + } else { + // @ts-ignore + const loader = new PDFLoader(new Blob([bf]), { pdfjs: () => import('pdf-parse/lib/pdf.js/v1.10.100/build/pdf.js') }) + if (textSplitter) { + const docs = await loader.loadAndSplit(textSplitter) + alldocs.push(...docs) + } else { + const docs = await loader.load() + alldocs.push(...docs) + } + } + } + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + let finaldocs = [] + for (const doc of alldocs) { + const newdoc = { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + finaldocs.push(newdoc) + } + return finaldocs + } + + return alldocs + } +} + +module.exports = { nodeClass: Pdf_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Pdf/pdf.svg b/packages/components/nodes/documentloaders/Pdf/pdf.svg new file mode 100644 index 0000000000000000000000000000000000000000..20af94f8b9c43dc664fd82746bc5ff7e827aa9f4 --- /dev/null +++ b/packages/components/nodes/documentloaders/Pdf/pdf.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/documentloaders/Text/Text.ts b/packages/components/nodes/documentloaders/Text/Text.ts new file mode 100644 index 0000000000000000000000000000000000000000..63e7e0e26f2f14bf498db6904b8d311e9148e3b0 --- /dev/null +++ b/packages/components/nodes/documentloaders/Text/Text.ts @@ -0,0 +1,95 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { TextSplitter } from 'langchain/text_splitter' +import { TextLoader } from 'langchain/document_loaders/fs/text' + +class Text_DocumentLoaders implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Text File' + this.name = 'textFile' + this.type = 'Document' + this.icon = 'textFile.svg' + this.category = 'Document Loaders' + this.description = `Load data from text files` + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Txt File', + name: 'txtFile', + type: 'file', + fileType: '.txt' + }, + { + label: 'Text Splitter', + name: 'textSplitter', + type: 'TextSplitter', + optional: true + }, + { + label: 'Metadata', + name: 'metadata', + type: 'json', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const textSplitter = nodeData.inputs?.textSplitter as TextSplitter + const txtFileBase64 = nodeData.inputs?.txtFile as string + const metadata = nodeData.inputs?.metadata + + let alldocs = [] + let files: string[] = [] + + if (txtFileBase64.startsWith('[') && txtFileBase64.endsWith(']')) { + files = JSON.parse(txtFileBase64) + } else { + files = [txtFileBase64] + } + + for (const file of files) { + const splitDataURI = file.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const blob = new Blob([bf]) + const loader = new TextLoader(blob) + + if (textSplitter) { + const docs = await loader.loadAndSplit(textSplitter) + alldocs.push(...docs) + } else { + const docs = await loader.load() + alldocs.push(...docs) + } + } + + if (metadata) { + const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) + let finaldocs = [] + for (const doc of alldocs) { + const newdoc = { + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata + } + } + finaldocs.push(newdoc) + } + return finaldocs + } + return alldocs + } +} + +module.exports = { nodeClass: Text_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Text/textFile.svg b/packages/components/nodes/documentloaders/Text/textFile.svg new file mode 100644 index 0000000000000000000000000000000000000000..200be563fc260f3d144da6e7d0307aff4b773cc0 --- /dev/null +++ b/packages/components/nodes/documentloaders/Text/textFile.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/embeddings/AzureOpenAIEmbedding/Azure.svg b/packages/components/nodes/embeddings/AzureOpenAIEmbedding/Azure.svg new file mode 100644 index 0000000000000000000000000000000000000000..51eb62535a52e016a28807f036c37acd9380b19f --- /dev/null +++ b/packages/components/nodes/embeddings/AzureOpenAIEmbedding/Azure.svg @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/packages/components/nodes/embeddings/AzureOpenAIEmbedding/AzureOpenAIEmbedding.ts b/packages/components/nodes/embeddings/AzureOpenAIEmbedding/AzureOpenAIEmbedding.ts new file mode 100644 index 0000000000000000000000000000000000000000..7fe61e622aa53767918b638016c0d643d146a3d6 --- /dev/null +++ b/packages/components/nodes/embeddings/AzureOpenAIEmbedding/AzureOpenAIEmbedding.ts @@ -0,0 +1,99 @@ +import { AzureOpenAIInput } from 'langchain/chat_models/openai' +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai' + +class AzureOpenAIEmbedding_Embeddings implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Azure OpenAI Embeddings' + this.name = 'azureOpenAIEmbeddings' + this.type = 'AzureOpenAIEmbeddings' + this.icon = 'Azure.svg' + this.category = 'Embeddings' + this.description = 'Azure OpenAI API to generate embeddings for a given text' + this.baseClasses = [this.type, ...getBaseClasses(OpenAIEmbeddings)] + this.inputs = [ + { + label: 'Azure OpenAI Api Key', + name: 'azureOpenAIApiKey', + type: 'password' + }, + { + label: 'Azure OpenAI Api Instance Name', + name: 'azureOpenAIApiInstanceName', + type: 'string', + placeholder: 'YOUR-INSTANCE-NAME' + }, + { + label: 'Azure OpenAI Api Deployment Name', + name: 'azureOpenAIApiDeploymentName', + type: 'string', + placeholder: 'YOUR-DEPLOYMENT-NAME' + }, + { + label: 'Azure OpenAI Api Version', + name: 'azureOpenAIApiVersion', + type: 'options', + options: [ + { + label: '2023-03-15-preview', + name: '2023-03-15-preview' + }, + { + label: '2022-12-01', + name: '2022-12-01' + } + ], + default: '2023-03-15-preview' + }, + { + label: 'Batch Size', + name: 'batchSize', + type: 'number', + default: '1', + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const azureOpenAIApiKey = nodeData.inputs?.azureOpenAIApiKey as string + const azureOpenAIApiInstanceName = nodeData.inputs?.azureOpenAIApiInstanceName as string + const azureOpenAIApiDeploymentName = nodeData.inputs?.azureOpenAIApiDeploymentName as string + const azureOpenAIApiVersion = nodeData.inputs?.azureOpenAIApiVersion as string + const batchSize = nodeData.inputs?.batchSize as string + const timeout = nodeData.inputs?.timeout as string + + const obj: Partial & Partial = { + azureOpenAIApiKey, + azureOpenAIApiInstanceName, + azureOpenAIApiDeploymentName, + azureOpenAIApiVersion + } + + if (batchSize) obj.batchSize = parseInt(batchSize, 10) + if (timeout) obj.timeout = parseInt(timeout, 10) + + const model = new OpenAIEmbeddings(obj) + return model + } +} + +module.exports = { nodeClass: AzureOpenAIEmbedding_Embeddings } diff --git a/packages/components/nodes/embeddings/CohereEmbedding/CohereEmbedding.ts b/packages/components/nodes/embeddings/CohereEmbedding/CohereEmbedding.ts new file mode 100644 index 0000000000000000000000000000000000000000..344713a4851a17d2bbd95c1dec75e6c0cf7ee871 --- /dev/null +++ b/packages/components/nodes/embeddings/CohereEmbedding/CohereEmbedding.ts @@ -0,0 +1,68 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { CohereEmbeddings, CohereEmbeddingsParams } from 'langchain/embeddings/cohere' + +class CohereEmbedding_Embeddings implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Cohere Embeddings' + this.name = 'cohereEmbeddings' + this.type = 'CohereEmbeddings' + this.icon = 'cohere.png' + this.category = 'Embeddings' + this.description = 'Cohere API to generate embeddings for a given text' + this.baseClasses = [this.type, ...getBaseClasses(CohereEmbeddings)] + this.inputs = [ + { + label: 'Cohere API Key', + name: 'cohereApiKey', + type: 'password' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'embed-english-v2.0', + name: 'embed-english-v2.0' + }, + { + label: 'embed-english-light-v2.0', + name: 'embed-english-light-v2.0' + }, + { + label: 'embed-multilingual-v2.0', + name: 'embed-multilingual-v2.0' + } + ], + default: 'embed-english-v2.0', + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const apiKey = nodeData.inputs?.cohereApiKey as string + const modelName = nodeData.inputs?.modelName as string + + const obj: Partial & { apiKey?: string } = { + apiKey + } + + if (modelName) obj.modelName = modelName + + const model = new CohereEmbeddings(obj) + return model + } +} + +module.exports = { nodeClass: CohereEmbedding_Embeddings } diff --git a/packages/components/nodes/embeddings/CohereEmbedding/cohere.png b/packages/components/nodes/embeddings/CohereEmbedding/cohere.png new file mode 100644 index 0000000000000000000000000000000000000000..266adeac2214b8627504eaf299c990b28f0448e0 Binary files /dev/null and b/packages/components/nodes/embeddings/CohereEmbedding/cohere.png differ diff --git a/packages/components/nodes/embeddings/HuggingFaceInferenceEmbedding/HuggingFaceInferenceEmbedding.ts b/packages/components/nodes/embeddings/HuggingFaceInferenceEmbedding/HuggingFaceInferenceEmbedding.ts new file mode 100644 index 0000000000000000000000000000000000000000..6f14325a6e514a96e61b7bf90ac7431640c775a6 --- /dev/null +++ b/packages/components/nodes/embeddings/HuggingFaceInferenceEmbedding/HuggingFaceInferenceEmbedding.ts @@ -0,0 +1,53 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { HuggingFaceInferenceEmbeddings, HuggingFaceInferenceEmbeddingsParams } from 'langchain/embeddings/hf' + +class HuggingFaceInferenceEmbedding_Embeddings implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'HuggingFace Inference Embeddings' + this.name = 'huggingFaceInferenceEmbeddings' + this.type = 'HuggingFaceInferenceEmbeddings' + this.icon = 'huggingface.png' + this.category = 'Embeddings' + this.description = 'HuggingFace Inference API to generate embeddings for a given text' + this.baseClasses = [this.type, ...getBaseClasses(HuggingFaceInferenceEmbeddings)] + this.inputs = [ + { + label: 'HuggingFace Api Key', + name: 'apiKey', + type: 'password' + }, + { + label: 'Model', + name: 'modelName', + type: 'string', + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const apiKey = nodeData.inputs?.apiKey as string + const modelName = nodeData.inputs?.modelName as string + + const obj: Partial = { + apiKey + } + + if (modelName) obj.model = modelName + + const model = new HuggingFaceInferenceEmbeddings(obj) + return model + } +} + +module.exports = { nodeClass: HuggingFaceInferenceEmbedding_Embeddings } diff --git a/packages/components/nodes/embeddings/HuggingFaceInferenceEmbedding/huggingface.png b/packages/components/nodes/embeddings/HuggingFaceInferenceEmbedding/huggingface.png new file mode 100644 index 0000000000000000000000000000000000000000..f8f202a46300c21e41bd782da959e43df5c73fd1 Binary files /dev/null and b/packages/components/nodes/embeddings/HuggingFaceInferenceEmbedding/huggingface.png differ diff --git a/packages/components/nodes/embeddings/LocalAIEmbedding/LocalAIEmbedding.ts b/packages/components/nodes/embeddings/LocalAIEmbedding/LocalAIEmbedding.ts new file mode 100644 index 0000000000000000000000000000000000000000..7fb2a79847af7f167d67a2a6b94f6407b508b563 --- /dev/null +++ b/packages/components/nodes/embeddings/LocalAIEmbedding/LocalAIEmbedding.ts @@ -0,0 +1,53 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai' + +class LocalAIEmbedding_Embeddings implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'LocalAI Embeddings' + this.name = 'localAIEmbeddings' + this.type = 'LocalAI Embeddings' + this.icon = 'localai.png' + this.category = 'Embeddings' + this.description = 'Use local embeddings models like llama.cpp' + this.baseClasses = [this.type, 'Embeddings'] + this.inputs = [ + { + label: 'Base Path', + name: 'basePath', + type: 'string', + placeholder: 'http://localhost:8080/v1' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + placeholder: 'text-embedding-ada-002' + } + ] + } + + async init(nodeData: INodeData): Promise { + const modelName = nodeData.inputs?.modelName as string + const basePath = nodeData.inputs?.basePath as string + + const obj: Partial & { openAIApiKey?: string } = { + modelName, + openAIApiKey: 'sk-' + } + + const model = new OpenAIEmbeddings(obj, { basePath }) + + return model + } +} + +module.exports = { nodeClass: LocalAIEmbedding_Embeddings } diff --git a/packages/components/nodes/embeddings/LocalAIEmbedding/localai.png b/packages/components/nodes/embeddings/LocalAIEmbedding/localai.png new file mode 100644 index 0000000000000000000000000000000000000000..321403973dae17c99fbf0435440e36abd81e2f6b Binary files /dev/null and b/packages/components/nodes/embeddings/LocalAIEmbedding/localai.png differ diff --git a/packages/components/nodes/embeddings/OpenAIEmbedding/OpenAIEmbedding.ts b/packages/components/nodes/embeddings/OpenAIEmbedding/OpenAIEmbedding.ts new file mode 100644 index 0000000000000000000000000000000000000000..0fd089733a1116541562535a8d6d0b30685c1adf --- /dev/null +++ b/packages/components/nodes/embeddings/OpenAIEmbedding/OpenAIEmbedding.ts @@ -0,0 +1,80 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { OpenAIEmbeddings, OpenAIEmbeddingsParams } from 'langchain/embeddings/openai' + +class OpenAIEmbedding_Embeddings implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'OpenAI Embeddings' + this.name = 'openAIEmbeddings' + this.type = 'OpenAIEmbeddings' + this.icon = 'openai.png' + this.category = 'Embeddings' + this.description = 'OpenAI API to generate embeddings for a given text' + this.baseClasses = [this.type, ...getBaseClasses(OpenAIEmbeddings)] + this.inputs = [ + { + label: 'OpenAI Api Key', + name: 'openAIApiKey', + type: 'password' + }, + { + label: 'Strip New Lines', + name: 'stripNewLines', + type: 'boolean', + optional: true, + additionalParams: true + }, + { + label: 'Batch Size', + name: 'batchSize', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'BasePath', + name: 'basepath', + type: 'string', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const openAIApiKey = nodeData.inputs?.openAIApiKey as string + const stripNewLines = nodeData.inputs?.stripNewLines as boolean + const batchSize = nodeData.inputs?.batchSize as string + const timeout = nodeData.inputs?.timeout as string + const basePath = nodeData.inputs?.basepath as string + + const obj: Partial & { openAIApiKey?: string } = { + openAIApiKey + } + + if (stripNewLines) obj.stripNewLines = stripNewLines + if (batchSize) obj.batchSize = parseInt(batchSize, 10) + if (timeout) obj.timeout = parseInt(timeout, 10) + + const model = new OpenAIEmbeddings(obj, { basePath }) + return model + } +} + +module.exports = { nodeClass: OpenAIEmbedding_Embeddings } diff --git a/packages/components/nodes/embeddings/OpenAIEmbedding/openai.png b/packages/components/nodes/embeddings/OpenAIEmbedding/openai.png new file mode 100644 index 0000000000000000000000000000000000000000..de08a05b28979826c4cc669c4899789763a938a1 Binary files /dev/null and b/packages/components/nodes/embeddings/OpenAIEmbedding/openai.png differ diff --git a/packages/components/nodes/llms/Azure OpenAI/Azure.svg b/packages/components/nodes/llms/Azure OpenAI/Azure.svg new file mode 100644 index 0000000000000000000000000000000000000000..51eb62535a52e016a28807f036c37acd9380b19f --- /dev/null +++ b/packages/components/nodes/llms/Azure OpenAI/Azure.svg @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/packages/components/nodes/llms/Azure OpenAI/AzureOpenAI.ts b/packages/components/nodes/llms/Azure OpenAI/AzureOpenAI.ts new file mode 100644 index 0000000000000000000000000000000000000000..c19aa83aa28e6f59e21e8a8bf118b3c294755068 --- /dev/null +++ b/packages/components/nodes/llms/Azure OpenAI/AzureOpenAI.ts @@ -0,0 +1,203 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { AzureOpenAIInput, OpenAI, OpenAIInput } from 'langchain/llms/openai' + +class AzureOpenAI_LLMs implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Azure OpenAI' + this.name = 'azureOpenAI' + this.type = 'AzureOpenAI' + this.icon = 'Azure.svg' + this.category = 'LLMs' + this.description = 'Wrapper around Azure OpenAI large language models' + this.baseClasses = [this.type, ...getBaseClasses(OpenAI)] + this.inputs = [ + { + label: 'Azure OpenAI Api Key', + name: 'azureOpenAIApiKey', + type: 'password' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'text-davinci-003', + name: 'text-davinci-003' + }, + { + label: 'ada', + name: 'ada' + }, + { + label: 'text-ada-001', + name: 'text-ada-001' + }, + { + label: 'babbage', + name: 'babbage' + }, + { + label: 'text-babbage-001', + name: 'text-babbage-001' + }, + { + label: 'curie', + name: 'curie' + }, + { + label: 'text-curie-001', + name: 'text-curie-001' + }, + { + label: 'davinci', + name: 'davinci' + }, + { + label: 'text-davinci-001', + name: 'text-davinci-001' + }, + { + label: 'text-davinci-002', + name: 'text-davinci-002' + }, + { + label: 'text-davinci-fine-tune-002', + name: 'text-davinci-fine-tune-002' + }, + { + label: 'gpt-35-turbo', + name: 'gpt-35-turbo' + } + ], + default: 'text-davinci-003', + optional: true + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + default: 0.9, + optional: true + }, + { + label: 'Azure OpenAI Api Instance Name', + name: 'azureOpenAIApiInstanceName', + type: 'string', + placeholder: 'YOUR-INSTANCE-NAME' + }, + { + label: 'Azure OpenAI Api Deployment Name', + name: 'azureOpenAIApiDeploymentName', + type: 'string', + placeholder: 'YOUR-DEPLOYMENT-NAME' + }, + { + label: 'Azure OpenAI Api Version', + name: 'azureOpenAIApiVersion', + type: 'options', + options: [ + { + label: '2023-03-15-preview', + name: '2023-03-15-preview' + }, + { + label: '2022-12-01', + name: '2022-12-01' + } + ], + default: '2023-03-15-preview' + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Best Of', + name: 'bestOf', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const azureOpenAIApiKey = nodeData.inputs?.azureOpenAIApiKey as string + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const azureOpenAIApiInstanceName = nodeData.inputs?.azureOpenAIApiInstanceName as string + const azureOpenAIApiDeploymentName = nodeData.inputs?.azureOpenAIApiDeploymentName as string + const azureOpenAIApiVersion = nodeData.inputs?.azureOpenAIApiVersion as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const timeout = nodeData.inputs?.timeout as string + const bestOf = nodeData.inputs?.bestOf as string + const streaming = nodeData.inputs?.streaming as boolean + + const obj: Partial & Partial = { + temperature: parseInt(temperature, 10), + modelName, + azureOpenAIApiKey, + azureOpenAIApiInstanceName, + azureOpenAIApiDeploymentName, + azureOpenAIApiVersion, + streaming: streaming ?? true + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseInt(topP, 10) + if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10) + if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10) + if (timeout) obj.timeout = parseInt(timeout, 10) + if (bestOf) obj.bestOf = parseInt(bestOf, 10) + + const model = new OpenAI(obj) + return model + } +} + +module.exports = { nodeClass: AzureOpenAI_LLMs } diff --git a/packages/components/nodes/llms/Cohere/Cohere.ts b/packages/components/nodes/llms/Cohere/Cohere.ts new file mode 100644 index 0000000000000000000000000000000000000000..a7e9c696ce2c9f6292b9352dec7c511b9dbcbfc1 --- /dev/null +++ b/packages/components/nodes/llms/Cohere/Cohere.ts @@ -0,0 +1,97 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { Cohere, CohereInput } from './core' + +class Cohere_LLMs implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Cohere' + this.name = 'cohere' + this.type = 'Cohere' + this.icon = 'cohere.png' + this.category = 'LLMs' + this.description = 'Wrapper around Cohere large language models' + this.baseClasses = [this.type, ...getBaseClasses(Cohere)] + this.inputs = [ + { + label: 'Cohere Api Key', + name: 'cohereApiKey', + type: 'password' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'command', + name: 'command' + }, + { + label: 'command-light', + name: 'command-light' + }, + { + label: 'command-nightly', + name: 'command-nightly' + }, + { + label: 'command-light-nightly', + name: 'command-light-nightly' + }, + { + label: 'base', + name: 'base' + }, + { + label: 'base-light', + name: 'base-light' + } + ], + default: 'command', + optional: true + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + default: 0.7, + optional: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const apiKey = nodeData.inputs?.cohereApiKey as string + const maxTokens = nodeData.inputs?.maxTokens as string + + const obj: CohereInput = { + apiKey + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (modelName) obj.model = modelName + if (temperature) obj.temperature = parseInt(temperature, 10) + + const model = new Cohere(obj) + return model + } +} + +module.exports = { nodeClass: Cohere_LLMs } diff --git a/packages/components/nodes/llms/Cohere/cohere.png b/packages/components/nodes/llms/Cohere/cohere.png new file mode 100644 index 0000000000000000000000000000000000000000..266adeac2214b8627504eaf299c990b28f0448e0 Binary files /dev/null and b/packages/components/nodes/llms/Cohere/cohere.png differ diff --git a/packages/components/nodes/llms/Cohere/core.ts b/packages/components/nodes/llms/Cohere/core.ts new file mode 100644 index 0000000000000000000000000000000000000000..97c81571028a7d27377be585d7aa7be76dea3c6d --- /dev/null +++ b/packages/components/nodes/llms/Cohere/core.ts @@ -0,0 +1,78 @@ +import { LLM, BaseLLMParams } from 'langchain/llms/base' + +export interface CohereInput extends BaseLLMParams { + /** Sampling temperature to use */ + temperature?: number + + /** + * Maximum number of tokens to generate in the completion. + */ + maxTokens?: number + + /** Model to use */ + model?: string + + apiKey?: string +} + +export class Cohere extends LLM implements CohereInput { + temperature = 0 + + maxTokens = 250 + + model: string + + apiKey: string + + constructor(fields?: CohereInput) { + super(fields ?? {}) + + const apiKey = fields?.apiKey ?? undefined + + if (!apiKey) { + throw new Error('Please set the COHERE_API_KEY environment variable or pass it to the constructor as the apiKey field.') + } + + this.apiKey = apiKey + this.maxTokens = fields?.maxTokens ?? this.maxTokens + this.temperature = fields?.temperature ?? this.temperature + this.model = fields?.model ?? this.model + } + + _llmType() { + return 'cohere' + } + + /** @ignore */ + async _call(prompt: string, options: this['ParsedCallOptions']): Promise { + const { cohere } = await Cohere.imports() + + cohere.init(this.apiKey) + + // Hit the `generate` endpoint on the `large` model + const generateResponse = await this.caller.callWithOptions({ signal: options.signal }, cohere.generate.bind(cohere), { + prompt, + model: this.model, + max_tokens: this.maxTokens, + temperature: this.temperature, + end_sequences: options.stop + }) + try { + return generateResponse.body.generations[0].text + } catch { + throw new Error('Could not parse response.') + } + } + + /** @ignore */ + static async imports(): Promise<{ + cohere: typeof import('cohere-ai') + }> { + try { + const { default: cohere } = await import('cohere-ai') + return { cohere } + } catch (e) { + throw new Error('Please install cohere-ai as a dependency with, e.g. `yarn add cohere-ai`') + } + } +} diff --git a/packages/components/nodes/llms/HuggingFaceInference/HuggingFaceInference.ts b/packages/components/nodes/llms/HuggingFaceInference/HuggingFaceInference.ts new file mode 100644 index 0000000000000000000000000000000000000000..6aa3f4f4ff5199a086f84b4bfef2d7705fdfd95f --- /dev/null +++ b/packages/components/nodes/llms/HuggingFaceInference/HuggingFaceInference.ts @@ -0,0 +1,50 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { HuggingFaceInference } from 'langchain/llms/hf' + +class HuggingFaceInference_LLMs implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'HuggingFace Inference' + this.name = 'huggingFaceInference_LLMs' + this.type = 'HuggingFaceInference' + this.icon = 'huggingface.png' + this.category = 'LLMs' + this.description = 'Wrapper around HuggingFace large language models' + this.baseClasses = [this.type, ...getBaseClasses(HuggingFaceInference)] + this.inputs = [ + { + label: 'Model', + name: 'model', + type: 'string', + placeholder: 'gpt2' + }, + { + label: 'HuggingFace Api Key', + name: 'apiKey', + type: 'password' + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as string + const apiKey = nodeData.inputs?.apiKey as string + + const huggingFace = new HuggingFaceInference({ + model, + apiKey + }) + return huggingFace + } +} + +module.exports = { nodeClass: HuggingFaceInference_LLMs } diff --git a/packages/components/nodes/llms/HuggingFaceInference/huggingface.png b/packages/components/nodes/llms/HuggingFaceInference/huggingface.png new file mode 100644 index 0000000000000000000000000000000000000000..f8f202a46300c21e41bd782da959e43df5c73fd1 Binary files /dev/null and b/packages/components/nodes/llms/HuggingFaceInference/huggingface.png differ diff --git a/packages/components/nodes/llms/OpenAI/OpenAI.ts b/packages/components/nodes/llms/OpenAI/OpenAI.ts new file mode 100644 index 0000000000000000000000000000000000000000..fb7e5b6b5c5e1a3c4a149025b92a01e3e01e416a --- /dev/null +++ b/packages/components/nodes/llms/OpenAI/OpenAI.ts @@ -0,0 +1,156 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { OpenAI, OpenAIInput } from 'langchain/llms/openai' + +class OpenAI_LLMs implements INode { + label: string + name: string + type: string + icon: string + category: string + description: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'OpenAI' + this.name = 'openAI' + this.type = 'OpenAI' + this.icon = 'openai.png' + this.category = 'LLMs' + this.description = 'Wrapper around OpenAI large language models' + this.baseClasses = [this.type, ...getBaseClasses(OpenAI)] + this.inputs = [ + { + label: 'OpenAI Api Key', + name: 'openAIApiKey', + type: 'password' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'options', + options: [ + { + label: 'text-davinci-003', + name: 'text-davinci-003' + }, + { + label: 'text-davinci-002', + name: 'text-davinci-002' + }, + { + label: 'text-curie-001', + name: 'text-curie-001' + }, + { + label: 'text-babbage-001', + name: 'text-babbage-001' + } + ], + default: 'text-davinci-003', + optional: true + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + default: 0.7, + optional: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Top Probability', + name: 'topP', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Best Of', + name: 'bestOf', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Frequency Penalty', + name: 'frequencyPenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Presence Penalty', + name: 'presencePenalty', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Batch Size', + name: 'batchSize', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + optional: true, + additionalParams: true + }, + { + label: 'BasePath', + name: 'basepath', + type: 'string', + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const temperature = nodeData.inputs?.temperature as string + const modelName = nodeData.inputs?.modelName as string + const openAIApiKey = nodeData.inputs?.openAIApiKey as string + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string + const presencePenalty = nodeData.inputs?.presencePenalty as string + const timeout = nodeData.inputs?.timeout as string + const batchSize = nodeData.inputs?.batchSize as string + const bestOf = nodeData.inputs?.bestOf as string + const streaming = nodeData.inputs?.streaming as boolean + const basePath = nodeData.inputs?.basepath as string + + const obj: Partial & { openAIApiKey?: string } = { + temperature: parseInt(temperature, 10), + modelName, + openAIApiKey, + streaming: streaming ?? true + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseInt(topP, 10) + if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10) + if (presencePenalty) obj.presencePenalty = parseInt(presencePenalty, 10) + if (timeout) obj.timeout = parseInt(timeout, 10) + if (batchSize) obj.batchSize = parseInt(batchSize, 10) + if (bestOf) obj.bestOf = parseInt(bestOf, 10) + + const model = new OpenAI(obj, { + basePath + }) + return model + } +} + +module.exports = { nodeClass: OpenAI_LLMs } diff --git a/packages/components/nodes/llms/OpenAI/openai.png b/packages/components/nodes/llms/OpenAI/openai.png new file mode 100644 index 0000000000000000000000000000000000000000..de08a05b28979826c4cc669c4899789763a938a1 Binary files /dev/null and b/packages/components/nodes/llms/OpenAI/openai.png differ diff --git a/packages/components/nodes/memory/BufferMemory/BufferMemory.ts b/packages/components/nodes/memory/BufferMemory/BufferMemory.ts new file mode 100644 index 0000000000000000000000000000000000000000..fd635ff47e3fd1c484ea42aebcec820ff26ea823 --- /dev/null +++ b/packages/components/nodes/memory/BufferMemory/BufferMemory.ts @@ -0,0 +1,50 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { BufferMemory } from 'langchain/memory' + +class BufferMemory_Memory implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Buffer Memory' + this.name = 'bufferMemory' + this.type = 'BufferMemory' + this.icon = 'memory.svg' + this.category = 'Memory' + this.description = 'Remembers previous conversational back and forths directly' + this.baseClasses = [this.type, ...getBaseClasses(BufferMemory)] + this.inputs = [ + { + label: 'Memory Key', + name: 'memoryKey', + type: 'string', + default: 'chat_history' + }, + { + label: 'Input Key', + name: 'inputKey', + type: 'string', + default: 'input' + } + ] + } + + async init(nodeData: INodeData): Promise { + const memoryKey = nodeData.inputs?.memoryKey as string + const inputKey = nodeData.inputs?.inputKey as string + return new BufferMemory({ + returnMessages: true, + memoryKey, + inputKey + }) + } +} + +module.exports = { nodeClass: BufferMemory_Memory } diff --git a/packages/components/nodes/memory/BufferMemory/memory.svg b/packages/components/nodes/memory/BufferMemory/memory.svg new file mode 100644 index 0000000000000000000000000000000000000000..ca8e17da1c8ea36a75fdfd6bf6fd90a261b1f4e5 --- /dev/null +++ b/packages/components/nodes/memory/BufferMemory/memory.svg @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts b/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts new file mode 100644 index 0000000000000000000000000000000000000000..ae783fec81d5a68d4fdf1e2db72d14a668a222d4 --- /dev/null +++ b/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts @@ -0,0 +1,62 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { BufferWindowMemory, BufferWindowMemoryInput } from 'langchain/memory' + +class BufferWindowMemory_Memory implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Buffer Window Memory' + this.name = 'bufferWindowMemory' + this.type = 'BufferWindowMemory' + this.icon = 'memory.svg' + this.category = 'Memory' + this.description = 'Uses a window of size k to surface the last k back-and-forths to use as memory' + this.baseClasses = [this.type, ...getBaseClasses(BufferWindowMemory)] + this.inputs = [ + { + label: 'Memory Key', + name: 'memoryKey', + type: 'string', + default: 'chat_history' + }, + { + label: 'Input Key', + name: 'inputKey', + type: 'string', + default: 'input' + }, + { + label: 'Size', + name: 'k', + type: 'number', + default: '4', + description: 'Window of size k to surface the last k back-and-forths to use as memory.' + } + ] + } + + async init(nodeData: INodeData): Promise { + const memoryKey = nodeData.inputs?.memoryKey as string + const inputKey = nodeData.inputs?.inputKey as string + const k = nodeData.inputs?.k as string + + const obj: Partial = { + returnMessages: true, + memoryKey: memoryKey, + inputKey: inputKey, + k: parseInt(k, 10) + } + + return new BufferWindowMemory(obj) + } +} + +module.exports = { nodeClass: BufferWindowMemory_Memory } diff --git a/packages/components/nodes/memory/BufferWindowMemory/memory.svg b/packages/components/nodes/memory/BufferWindowMemory/memory.svg new file mode 100644 index 0000000000000000000000000000000000000000..ca8e17da1c8ea36a75fdfd6bf6fd90a261b1f4e5 --- /dev/null +++ b/packages/components/nodes/memory/BufferWindowMemory/memory.svg @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts b/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts new file mode 100644 index 0000000000000000000000000000000000000000..3c055e8e76f27d37b7b87c8e9685feeffd497894 --- /dev/null +++ b/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts @@ -0,0 +1,61 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { ConversationSummaryMemory, ConversationSummaryMemoryInput } from 'langchain/memory' +import { BaseLanguageModel } from 'langchain/base_language' + +class ConversationSummaryMemory_Memory implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Conversation Summary Memory' + this.name = 'conversationSummaryMemory' + this.type = 'ConversationSummaryMemory' + this.icon = 'memory.svg' + this.category = 'Memory' + this.description = 'Summarizes the conversation and stores the current summary in memory' + this.baseClasses = [this.type, ...getBaseClasses(ConversationSummaryMemory)] + this.inputs = [ + { + label: 'Chat Model', + name: 'model', + type: 'BaseChatModel' + }, + { + label: 'Memory Key', + name: 'memoryKey', + type: 'string', + default: 'chat_history' + }, + { + label: 'Input Key', + name: 'inputKey', + type: 'string', + default: 'input' + } + ] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const memoryKey = nodeData.inputs?.memoryKey as string + const inputKey = nodeData.inputs?.inputKey as string + + const obj: ConversationSummaryMemoryInput = { + llm: model, + returnMessages: true, + memoryKey, + inputKey + } + + return new ConversationSummaryMemory(obj) + } +} + +module.exports = { nodeClass: ConversationSummaryMemory_Memory } diff --git a/packages/components/nodes/memory/ConversationSummaryMemory/memory.svg b/packages/components/nodes/memory/ConversationSummaryMemory/memory.svg new file mode 100644 index 0000000000000000000000000000000000000000..ca8e17da1c8ea36a75fdfd6bf6fd90a261b1f4e5 --- /dev/null +++ b/packages/components/nodes/memory/ConversationSummaryMemory/memory.svg @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts b/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts new file mode 100644 index 0000000000000000000000000000000000000000..c3c4d77f686ff9405e77cacd6cf81fd044fd4dc9 --- /dev/null +++ b/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts @@ -0,0 +1,75 @@ +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate } from 'langchain/prompts' + +class ChatPromptTemplate_Prompts implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Chat Prompt Template' + this.name = 'chatPromptTemplate' + this.type = 'ChatPromptTemplate' + this.icon = 'prompt.svg' + this.category = 'Prompts' + this.description = 'Schema to represent a chat prompt' + this.baseClasses = [this.type, ...getBaseClasses(ChatPromptTemplate)] + this.inputs = [ + { + label: 'System Message', + name: 'systemMessagePrompt', + type: 'string', + rows: 4, + placeholder: `You are a helpful assistant that translates {input_language} to {output_language}.` + }, + { + label: 'Human Message', + name: 'humanMessagePrompt', + type: 'string', + rows: 4, + placeholder: `{text}` + }, + { + label: 'Format Prompt Values', + name: 'promptValues', + type: 'string', + rows: 4, + placeholder: `{ + "input_language": "English", + "output_language": "French" +}`, + optional: true, + acceptVariable: true, + list: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const systemMessagePrompt = nodeData.inputs?.systemMessagePrompt as string + const humanMessagePrompt = nodeData.inputs?.humanMessagePrompt as string + const promptValuesStr = nodeData.inputs?.promptValues as string + + const prompt = ChatPromptTemplate.fromPromptMessages([ + SystemMessagePromptTemplate.fromTemplate(systemMessagePrompt), + HumanMessagePromptTemplate.fromTemplate(humanMessagePrompt) + ]) + + let promptValues: ICommonObject = {} + if (promptValuesStr) { + promptValues = JSON.parse(promptValuesStr.replace(/\s/g, '')) + } + // @ts-ignore + prompt.promptValues = promptValues + + return prompt + } +} + +module.exports = { nodeClass: ChatPromptTemplate_Prompts } diff --git a/packages/components/nodes/prompts/ChatPromptTemplate/prompt.svg b/packages/components/nodes/prompts/ChatPromptTemplate/prompt.svg new file mode 100644 index 0000000000000000000000000000000000000000..7e4861188df34ca4b30febe549b7575e4a0aca00 --- /dev/null +++ b/packages/components/nodes/prompts/ChatPromptTemplate/prompt.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/prompts/FewShotPromptTemplate/FewShotPromptTemplate.ts b/packages/components/nodes/prompts/FewShotPromptTemplate/FewShotPromptTemplate.ts new file mode 100644 index 0000000000000000000000000000000000000000..a42a1d088e08e99ccc0c354a50b8f8ba001c99c4 --- /dev/null +++ b/packages/components/nodes/prompts/FewShotPromptTemplate/FewShotPromptTemplate.ts @@ -0,0 +1,109 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getInputVariables } from '../../../src/utils' +import { FewShotPromptTemplate, FewShotPromptTemplateInput, PromptTemplate } from 'langchain/prompts' +import { Example } from 'langchain/schema' +import { TemplateFormat } from 'langchain/dist/prompts/template' + +class FewShotPromptTemplate_Prompts implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Few Shot Prompt Template' + this.name = 'fewShotPromptTemplate' + this.type = 'FewShotPromptTemplate' + this.icon = 'prompt.svg' + this.category = 'Prompts' + this.description = 'Prompt template you can build with examples' + this.baseClasses = [this.type, ...getBaseClasses(FewShotPromptTemplate)] + this.inputs = [ + { + label: 'Examples', + name: 'examples', + type: 'string', + rows: 4, + placeholder: `[ + { "word": "happy", "antonym": "sad" }, + { "word": "tall", "antonym": "short" }, +]` + }, + { + label: 'Example Prompt', + name: 'examplePrompt', + type: 'PromptTemplate' + }, + { + label: 'Prefix', + name: 'prefix', + type: 'string', + rows: 4, + placeholder: `Give the antonym of every input` + }, + { + label: 'Suffix', + name: 'suffix', + type: 'string', + rows: 4, + placeholder: `Word: {input}\nAntonym:` + }, + { + label: 'Example Seperator', + name: 'exampleSeparator', + type: 'string', + placeholder: `\n\n` + }, + { + label: 'Template Format', + name: 'templateFormat', + type: 'options', + options: [ + { + label: 'f-string', + name: 'f-string' + }, + { + label: 'jinja-2', + name: 'jinja-2' + } + ], + default: `f-string` + } + ] + } + + async init(nodeData: INodeData): Promise { + const examplesStr = nodeData.inputs?.examples as string + const prefix = nodeData.inputs?.prefix as string + const suffix = nodeData.inputs?.suffix as string + const exampleSeparator = nodeData.inputs?.exampleSeparator as string + const templateFormat = nodeData.inputs?.templateFormat as TemplateFormat + const examplePrompt = nodeData.inputs?.examplePrompt as PromptTemplate + + const inputVariables = getInputVariables(suffix) + const examples: Example[] = JSON.parse(examplesStr.replace(/\s/g, '')) + + try { + const obj: FewShotPromptTemplateInput = { + examples, + examplePrompt, + prefix, + suffix, + inputVariables, + exampleSeparator, + templateFormat + } + const prompt = new FewShotPromptTemplate(obj) + return prompt + } catch (e) { + throw new Error(e) + } + } +} + +module.exports = { nodeClass: FewShotPromptTemplate_Prompts } diff --git a/packages/components/nodes/prompts/FewShotPromptTemplate/prompt.svg b/packages/components/nodes/prompts/FewShotPromptTemplate/prompt.svg new file mode 100644 index 0000000000000000000000000000000000000000..7e4861188df34ca4b30febe549b7575e4a0aca00 --- /dev/null +++ b/packages/components/nodes/prompts/FewShotPromptTemplate/prompt.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/prompts/PromptTemplate/PromptTemplate.ts b/packages/components/nodes/prompts/PromptTemplate/PromptTemplate.ts new file mode 100644 index 0000000000000000000000000000000000000000..cfa2c48865fcde0e5e39ea6fe603508313a31117 --- /dev/null +++ b/packages/components/nodes/prompts/PromptTemplate/PromptTemplate.ts @@ -0,0 +1,73 @@ +import { ICommonObject, INode, INodeData, INodeParams, PromptTemplate } from '../../../src/Interface' +import { getBaseClasses, getInputVariables, returnJSONStr } from '../../../src/utils' +import { PromptTemplateInput } from 'langchain/prompts' + +class PromptTemplate_Prompts implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Prompt Template' + this.name = 'promptTemplate' + this.type = 'PromptTemplate' + this.icon = 'prompt.svg' + this.category = 'Prompts' + this.description = 'Schema to represent a basic prompt for an LLM' + this.baseClasses = [...getBaseClasses(PromptTemplate)] + this.inputs = [ + { + label: 'Template', + name: 'template', + type: 'string', + rows: 4, + placeholder: `What is a good name for a company that makes {product}?` + }, + { + label: 'Format Prompt Values', + name: 'promptValues', + type: 'string', + rows: 4, + placeholder: `{ + "input_language": "English", + "output_language": "French" +}`, + optional: true, + acceptVariable: true, + list: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const template = nodeData.inputs?.template as string + let promptValuesStr = nodeData.inputs?.promptValues as string + + let promptValues: ICommonObject = {} + if (promptValuesStr) { + promptValuesStr = promptValuesStr.replace(/\s/g, '') + promptValues = JSON.parse(returnJSONStr(promptValuesStr)) + } + + const inputVariables = getInputVariables(template) + + try { + const options: PromptTemplateInput = { + template, + inputVariables + } + const prompt = new PromptTemplate(options) + prompt.promptValues = promptValues + return prompt + } catch (e) { + throw new Error(e) + } + } +} + +module.exports = { nodeClass: PromptTemplate_Prompts } diff --git a/packages/components/nodes/prompts/PromptTemplate/prompt.svg b/packages/components/nodes/prompts/PromptTemplate/prompt.svg new file mode 100644 index 0000000000000000000000000000000000000000..7e4861188df34ca4b30febe549b7575e4a0aca00 --- /dev/null +++ b/packages/components/nodes/prompts/PromptTemplate/prompt.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/retrievers/PromptRetriever/PromptRetriever.ts b/packages/components/nodes/retrievers/PromptRetriever/PromptRetriever.ts new file mode 100644 index 0000000000000000000000000000000000000000..e3b9a4acbd035ede3c5ed36cb1700737eb6698a0 --- /dev/null +++ b/packages/components/nodes/retrievers/PromptRetriever/PromptRetriever.ts @@ -0,0 +1,62 @@ +import { INode, INodeData, INodeParams, PromptRetriever, PromptRetrieverInput } from '../../../src/Interface' + +class PromptRetriever_Retrievers implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Prompt Retriever' + this.name = 'promptRetriever' + this.type = 'PromptRetriever' + this.icon = 'promptretriever.svg' + this.category = 'Retrievers' + this.description = 'Store prompt template with name & description to be later queried by MultiPromptChain' + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Prompt Name', + name: 'name', + type: 'string', + placeholder: 'physics-qa' + }, + { + label: 'Prompt Description', + name: 'description', + type: 'string', + rows: 3, + description: 'Description of what the prompt does and when it should be used', + placeholder: 'Good for answering questions about physics' + }, + { + label: 'Prompt System Message', + name: 'systemMessage', + type: 'string', + rows: 4, + placeholder: `You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.` + } + ] + } + + async init(nodeData: INodeData): Promise { + const name = nodeData.inputs?.name as string + const description = nodeData.inputs?.description as string + const systemMessage = nodeData.inputs?.systemMessage as string + + const obj = { + name, + description, + systemMessage + } as PromptRetrieverInput + + const retriever = new PromptRetriever(obj) + return retriever + } +} + +module.exports = { nodeClass: PromptRetriever_Retrievers } diff --git a/packages/components/nodes/retrievers/PromptRetriever/promptretriever.svg b/packages/components/nodes/retrievers/PromptRetriever/promptretriever.svg new file mode 100644 index 0000000000000000000000000000000000000000..db48e8a519cd35b4fc289b3abcb30ec3ba5586f2 --- /dev/null +++ b/packages/components/nodes/retrievers/PromptRetriever/promptretriever.svg @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/retrievers/VectorStoreRetriever/VectorStoreRetriever.ts b/packages/components/nodes/retrievers/VectorStoreRetriever/VectorStoreRetriever.ts new file mode 100644 index 0000000000000000000000000000000000000000..2ccfc995dff7d7bd30749504470f9cfd7c328d29 --- /dev/null +++ b/packages/components/nodes/retrievers/VectorStoreRetriever/VectorStoreRetriever.ts @@ -0,0 +1,61 @@ +import { VectorStore } from 'langchain/vectorstores/base' +import { INode, INodeData, INodeParams, VectorStoreRetriever, VectorStoreRetrieverInput } from '../../../src/Interface' + +class VectorStoreRetriever_Retrievers implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Vector Store Retriever' + this.name = 'vectorStoreRetriever' + this.type = 'VectorStoreRetriever' + this.icon = 'vectorretriever.svg' + this.category = 'Retrievers' + this.description = 'Store vector store as retriever to be later queried by MultiRetrievalQAChain' + this.baseClasses = [this.type] + this.inputs = [ + { + label: 'Vector Store', + name: 'vectorStore', + type: 'VectorStore' + }, + { + label: 'Retriever Name', + name: 'name', + type: 'string', + placeholder: 'netflix movies' + }, + { + label: 'Retriever Description', + name: 'description', + type: 'string', + rows: 3, + description: 'Description of when to use the vector store retriever', + placeholder: 'Good for answering questions about netflix movies' + } + ] + } + + async init(nodeData: INodeData): Promise { + const name = nodeData.inputs?.name as string + const description = nodeData.inputs?.description as string + const vectorStore = nodeData.inputs?.vectorStore as VectorStore + + const obj = { + name, + description, + vectorStore + } as VectorStoreRetrieverInput + + const retriever = new VectorStoreRetriever(obj) + return retriever + } +} + +module.exports = { nodeClass: VectorStoreRetriever_Retrievers } diff --git a/packages/components/nodes/retrievers/VectorStoreRetriever/vectorretriever.svg b/packages/components/nodes/retrievers/VectorStoreRetriever/vectorretriever.svg new file mode 100644 index 0000000000000000000000000000000000000000..da3a9f207e41197ad9ebf9a40d1862cd02191809 --- /dev/null +++ b/packages/components/nodes/retrievers/VectorStoreRetriever/vectorretriever.svg @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/textsplitters/CharacterTextSplitter/CharacterTextSplitter.ts b/packages/components/nodes/textsplitters/CharacterTextSplitter/CharacterTextSplitter.ts new file mode 100644 index 0000000000000000000000000000000000000000..90387e8b6976c1752154c330583c055c247cfdfb --- /dev/null +++ b/packages/components/nodes/textsplitters/CharacterTextSplitter/CharacterTextSplitter.ts @@ -0,0 +1,63 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { CharacterTextSplitter, CharacterTextSplitterParams } from 'langchain/text_splitter' + +class CharacterTextSplitter_TextSplitters implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Character Text Splitter' + this.name = 'characterTextSplitter' + this.type = 'CharacterTextSplitter' + this.icon = 'textsplitter.svg' + this.category = 'Text Splitters' + this.description = `splits only on one type of character (defaults to "\\n\\n").` + this.baseClasses = [this.type, ...getBaseClasses(CharacterTextSplitter)] + this.inputs = [ + { + label: 'Separator', + name: 'separator', + type: 'string', + optional: true + }, + { + label: 'Chunk Size', + name: 'chunkSize', + type: 'number', + default: 1000, + optional: true + }, + { + label: 'Chunk Overlap', + name: 'chunkOverlap', + type: 'number', + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const separator = nodeData.inputs?.separator as string + const chunkSize = nodeData.inputs?.chunkSize as string + const chunkOverlap = nodeData.inputs?.chunkOverlap as string + + const obj = {} as CharacterTextSplitterParams + + if (separator) obj.separator = separator + if (chunkSize) obj.chunkSize = parseInt(chunkSize, 10) + if (chunkOverlap) obj.chunkOverlap = parseInt(chunkOverlap, 10) + + const splitter = new CharacterTextSplitter(obj) + + return splitter + } +} + +module.exports = { nodeClass: CharacterTextSplitter_TextSplitters } diff --git a/packages/components/nodes/textsplitters/CharacterTextSplitter/textsplitter.svg b/packages/components/nodes/textsplitters/CharacterTextSplitter/textsplitter.svg new file mode 100644 index 0000000000000000000000000000000000000000..73145e2da958bd6b6514cf4411b2d47b051b6d46 --- /dev/null +++ b/packages/components/nodes/textsplitters/CharacterTextSplitter/textsplitter.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/textsplitters/MarkdownTextSplitter/MarkdownTextSplitter.ts b/packages/components/nodes/textsplitters/MarkdownTextSplitter/MarkdownTextSplitter.ts new file mode 100644 index 0000000000000000000000000000000000000000..02c37d8d55b3fe7812d4120f24797255920f30a8 --- /dev/null +++ b/packages/components/nodes/textsplitters/MarkdownTextSplitter/MarkdownTextSplitter.ts @@ -0,0 +1,55 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { MarkdownTextSplitter, MarkdownTextSplitterParams } from 'langchain/text_splitter' + +class MarkdownTextSplitter_TextSplitters implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Markdown Text Splitter' + this.name = 'markdownTextSplitter' + this.type = 'MarkdownTextSplitter' + this.icon = 'markdownTextSplitter.svg' + this.category = 'Text Splitters' + this.description = `Split your content into documents based on the Markdown headers` + this.baseClasses = [this.type, ...getBaseClasses(MarkdownTextSplitter)] + this.inputs = [ + { + label: 'Chunk Size', + name: 'chunkSize', + type: 'number', + default: 1000, + optional: true + }, + { + label: 'Chunk Overlap', + name: 'chunkOverlap', + type: 'number', + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const chunkSize = nodeData.inputs?.chunkSize as string + const chunkOverlap = nodeData.inputs?.chunkOverlap as string + + const obj = {} as MarkdownTextSplitterParams + + if (chunkSize) obj.chunkSize = parseInt(chunkSize, 10) + if (chunkOverlap) obj.chunkOverlap = parseInt(chunkOverlap, 10) + + const splitter = new MarkdownTextSplitter(obj) + + return splitter + } +} + +module.exports = { nodeClass: MarkdownTextSplitter_TextSplitters } diff --git a/packages/components/nodes/textsplitters/MarkdownTextSplitter/markdownTextSplitter.svg b/packages/components/nodes/textsplitters/MarkdownTextSplitter/markdownTextSplitter.svg new file mode 100644 index 0000000000000000000000000000000000000000..f7d45d603af6b288f64b3a830663d3eac0959f36 --- /dev/null +++ b/packages/components/nodes/textsplitters/MarkdownTextSplitter/markdownTextSplitter.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/textsplitters/RecursiveCharacterTextSplitter/RecursiveCharacterTextSplitter.ts b/packages/components/nodes/textsplitters/RecursiveCharacterTextSplitter/RecursiveCharacterTextSplitter.ts new file mode 100644 index 0000000000000000000000000000000000000000..432b5ca9091040f75fae577f73efcb47953c045a --- /dev/null +++ b/packages/components/nodes/textsplitters/RecursiveCharacterTextSplitter/RecursiveCharacterTextSplitter.ts @@ -0,0 +1,55 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { RecursiveCharacterTextSplitter, RecursiveCharacterTextSplitterParams } from 'langchain/text_splitter' + +class RecursiveCharacterTextSplitter_TextSplitters implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Recursive Character Text Splitter' + this.name = 'recursiveCharacterTextSplitter' + this.type = 'RecursiveCharacterTextSplitter' + this.icon = 'textsplitter.svg' + this.category = 'Text Splitters' + this.description = `Split documents recursively by different characters - starting with "\\n\\n", then "\\n", then " "` + this.baseClasses = [this.type, ...getBaseClasses(RecursiveCharacterTextSplitter)] + this.inputs = [ + { + label: 'Chunk Size', + name: 'chunkSize', + type: 'number', + default: 1000, + optional: true + }, + { + label: 'Chunk Overlap', + name: 'chunkOverlap', + type: 'number', + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const chunkSize = nodeData.inputs?.chunkSize as string + const chunkOverlap = nodeData.inputs?.chunkOverlap as string + + const obj = {} as RecursiveCharacterTextSplitterParams + + if (chunkSize) obj.chunkSize = parseInt(chunkSize, 10) + if (chunkOverlap) obj.chunkOverlap = parseInt(chunkOverlap, 10) + + const splitter = new RecursiveCharacterTextSplitter(obj) + + return splitter + } +} + +module.exports = { nodeClass: RecursiveCharacterTextSplitter_TextSplitters } diff --git a/packages/components/nodes/textsplitters/RecursiveCharacterTextSplitter/textsplitter.svg b/packages/components/nodes/textsplitters/RecursiveCharacterTextSplitter/textsplitter.svg new file mode 100644 index 0000000000000000000000000000000000000000..73145e2da958bd6b6514cf4411b2d47b051b6d46 --- /dev/null +++ b/packages/components/nodes/textsplitters/RecursiveCharacterTextSplitter/textsplitter.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/textsplitters/TokenTextSplitter/TokenTextSplitter.ts b/packages/components/nodes/textsplitters/TokenTextSplitter/TokenTextSplitter.ts new file mode 100644 index 0000000000000000000000000000000000000000..8c8d6abea505aa0f3cdc6e068583ab70acc34a30 --- /dev/null +++ b/packages/components/nodes/textsplitters/TokenTextSplitter/TokenTextSplitter.ts @@ -0,0 +1,86 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { TokenTextSplitter, TokenTextSplitterParams } from 'langchain/text_splitter' +import { TiktokenEncoding } from '@dqbd/tiktoken' + +class TokenTextSplitter_TextSplitters implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Token Text Splitter' + this.name = 'tokenTextSplitter' + this.type = 'TokenTextSplitter' + this.icon = 'tiktoken.svg' + this.category = 'Text Splitters' + this.description = `Splits a raw text string by first converting the text into BPE tokens, then split these tokens into chunks and convert the tokens within a single chunk back into text.` + this.baseClasses = [this.type, ...getBaseClasses(TokenTextSplitter)] + this.inputs = [ + { + label: 'Encoding Name', + name: 'encodingName', + type: 'options', + options: [ + { + label: 'gpt2', + name: 'gpt2' + }, + { + label: 'r50k_base', + name: 'r50k_base' + }, + { + label: 'p50k_base', + name: 'p50k_base' + }, + { + label: 'p50k_edit', + name: 'p50k_edit' + }, + { + label: 'cl100k_base', + name: 'cl100k_base' + } + ], + default: 'gpt2' + }, + { + label: 'Chunk Size', + name: 'chunkSize', + type: 'number', + default: 1000, + optional: true + }, + { + label: 'Chunk Overlap', + name: 'chunkOverlap', + type: 'number', + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const encodingName = nodeData.inputs?.encodingName as string + const chunkSize = nodeData.inputs?.chunkSize as string + const chunkOverlap = nodeData.inputs?.chunkOverlap as string + + const obj = {} as TokenTextSplitterParams + + obj.encodingName = encodingName as TiktokenEncoding + if (chunkSize) obj.chunkSize = parseInt(chunkSize, 10) + if (chunkOverlap) obj.chunkOverlap = parseInt(chunkOverlap, 10) + + const splitter = new TokenTextSplitter(obj) + + return splitter + } +} + +module.exports = { nodeClass: TokenTextSplitter_TextSplitters } diff --git a/packages/components/nodes/textsplitters/TokenTextSplitter/tiktoken.svg b/packages/components/nodes/textsplitters/TokenTextSplitter/tiktoken.svg new file mode 100644 index 0000000000000000000000000000000000000000..833cfae107c05ee69bbb6d898bbe630e594aee1c --- /dev/null +++ b/packages/components/nodes/textsplitters/TokenTextSplitter/tiktoken.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/AIPlugin/AIPlugin.ts b/packages/components/nodes/tools/AIPlugin/AIPlugin.ts new file mode 100644 index 0000000000000000000000000000000000000000..ad21f8dbc2c677c04c3e63745ec3ff6fad8a6a98 --- /dev/null +++ b/packages/components/nodes/tools/AIPlugin/AIPlugin.ts @@ -0,0 +1,41 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { AIPluginTool } from 'langchain/tools' +import { getBaseClasses } from '../../../src/utils' + +class AIPlugin implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs?: INodeParams[] + + constructor() { + this.label = 'AI Plugin' + this.name = 'aiPlugin' + this.type = 'AIPlugin' + this.icon = 'aiplugin.svg' + this.category = 'Tools' + this.description = 'Execute actions using ChatGPT Plugin Url' + this.baseClasses = [this.type, ...getBaseClasses(AIPluginTool)] + this.inputs = [ + { + label: 'Plugin Url', + name: 'pluginUrl', + type: 'string', + placeholder: 'https://www.klarna.com/.well-known/ai-plugin.json' + } + ] + } + + async init(nodeData: INodeData): Promise { + const pluginUrl = nodeData.inputs?.pluginUrl as string + const aiplugin = await AIPluginTool.fromPluginUrl(pluginUrl) + + return aiplugin + } +} + +module.exports = { nodeClass: AIPlugin } diff --git a/packages/components/nodes/tools/AIPlugin/aiplugin.svg b/packages/components/nodes/tools/AIPlugin/aiplugin.svg new file mode 100644 index 0000000000000000000000000000000000000000..e617e45c090197b29eae860f29f90479a9b2bdce --- /dev/null +++ b/packages/components/nodes/tools/AIPlugin/aiplugin.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/Calculator/Calculator.ts b/packages/components/nodes/tools/Calculator/Calculator.ts new file mode 100644 index 0000000000000000000000000000000000000000..85284f0fdfce3f4b4d42dbb21a8970f839f4e5fe --- /dev/null +++ b/packages/components/nodes/tools/Calculator/Calculator.ts @@ -0,0 +1,29 @@ +import { INode } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { Calculator } from 'langchain/tools/calculator' + +class Calculator_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + + constructor() { + this.label = 'Calculator' + this.name = 'calculator' + this.type = 'Calculator' + this.icon = 'calculator.svg' + this.category = 'Tools' + this.description = 'Perform calculations on response' + this.baseClasses = [this.type, ...getBaseClasses(Calculator)] + } + + async init(): Promise { + return new Calculator() + } +} + +module.exports = { nodeClass: Calculator_Tools } diff --git a/packages/components/nodes/tools/Calculator/calculator.svg b/packages/components/nodes/tools/Calculator/calculator.svg new file mode 100644 index 0000000000000000000000000000000000000000..6fa49e1551aea1120cd758e294227007c556e5b2 --- /dev/null +++ b/packages/components/nodes/tools/Calculator/calculator.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/ChainTool/ChainTool.ts b/packages/components/nodes/tools/ChainTool/ChainTool.ts new file mode 100644 index 0000000000000000000000000000000000000000..32e414af7e28888b3f27b489ccf1165e3f97eede --- /dev/null +++ b/packages/components/nodes/tools/ChainTool/ChainTool.ts @@ -0,0 +1,73 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { ChainTool } from 'langchain/tools' +import { BaseChain } from 'langchain/chains' + +class ChainTool_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Chain Tool' + this.name = 'chainTool' + this.type = 'ChainTool' + this.icon = 'chaintool.svg' + this.category = 'Tools' + this.description = 'Use a chain as allowed tool for agent' + this.baseClasses = [this.type, ...getBaseClasses(ChainTool)] + this.inputs = [ + { + label: 'Chain Name', + name: 'name', + type: 'string', + placeholder: 'state-of-union-qa' + }, + { + label: 'Chain Description', + name: 'description', + type: 'string', + rows: 3, + placeholder: + 'State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.' + }, + { + label: 'Return Direct', + name: 'returnDirect', + type: 'boolean', + optional: true + }, + { + label: 'Base Chain', + name: 'baseChain', + type: 'BaseChain' + } + ] + } + + async init(nodeData: INodeData): Promise { + const name = nodeData.inputs?.name as string + const description = nodeData.inputs?.description as string + const baseChain = nodeData.inputs?.baseChain as BaseChain + const returnDirect = nodeData.inputs?.returnDirect as boolean + + const obj = { + name, + description, + chain: baseChain + } as any + + if (returnDirect) obj.returnDirect = returnDirect + + const tool = new ChainTool(obj) + + return tool + } +} + +module.exports = { nodeClass: ChainTool_Tools } diff --git a/packages/components/nodes/tools/ChainTool/chaintool.svg b/packages/components/nodes/tools/ChainTool/chaintool.svg new file mode 100644 index 0000000000000000000000000000000000000000..c5bd0fbcc103c5826958febc20478599e428b3fa --- /dev/null +++ b/packages/components/nodes/tools/ChainTool/chaintool.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/MakeWebhook/MakeWebhook.ts b/packages/components/nodes/tools/MakeWebhook/MakeWebhook.ts new file mode 100644 index 0000000000000000000000000000000000000000..38e0cdd1e8ba91fc34d87079212fdcda63d1a02c --- /dev/null +++ b/packages/components/nodes/tools/MakeWebhook/MakeWebhook.ts @@ -0,0 +1,48 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { MakeWebhookTool } from './core' + +class MakeWebhook_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Make.com Webhook' + this.name = 'makeWebhook' + this.type = 'MakeWebhook' + this.icon = 'make.png' + this.category = 'Tools' + this.description = 'Execute webhook calls on Make.com' + this.inputs = [ + { + label: 'Webhook Url', + name: 'url', + type: 'string', + placeholder: 'https://hook.eu1.make.com/abcdefg' + }, + { + label: 'Tool Description', + name: 'desc', + type: 'string', + rows: 4, + placeholder: 'Useful when need to send message to Discord' + } + ] + this.baseClasses = [this.type, ...getBaseClasses(MakeWebhookTool)] + } + + async init(nodeData: INodeData): Promise { + const url = nodeData.inputs?.url as string + const desc = nodeData.inputs?.desc as string + + return new MakeWebhookTool(url, desc, 'GET') + } +} + +module.exports = { nodeClass: MakeWebhook_Tools } diff --git a/packages/components/nodes/tools/MakeWebhook/core.ts b/packages/components/nodes/tools/MakeWebhook/core.ts new file mode 100644 index 0000000000000000000000000000000000000000..8b04ecb96ddda1a5f32c376b3c1286225ebd7624 --- /dev/null +++ b/packages/components/nodes/tools/MakeWebhook/core.ts @@ -0,0 +1,41 @@ +import axios, { AxiosRequestConfig, Method } from 'axios' +import { Tool } from 'langchain/tools' +import { ICommonObject } from '../../../src/Interface' + +export class MakeWebhookTool extends Tool { + private url: string + + name: string + + description: string + + method: string + + headers: ICommonObject + + constructor(url: string, description: string, method = 'POST', headers: ICommonObject = {}) { + super() + this.url = url + this.name = 'make_webhook' + this.description = description ?? `useful for when you need to execute tasks on Make` + this.method = method + this.headers = headers + } + + async _call(): Promise { + try { + const axiosConfig: AxiosRequestConfig = { + method: this.method as Method, + url: this.url, + headers: { + ...this.headers, + 'Content-Type': 'application/json' + } + } + const response = await axios(axiosConfig) + return typeof response.data === 'object' ? JSON.stringify(response.data) : response.data + } catch (error) { + throw new Error(`HTTP error ${error}`) + } + } +} diff --git a/packages/components/nodes/tools/MakeWebhook/make.png b/packages/components/nodes/tools/MakeWebhook/make.png new file mode 100644 index 0000000000000000000000000000000000000000..968afcb581aee3f3fef53ada08bdaada95c8feed Binary files /dev/null and b/packages/components/nodes/tools/MakeWebhook/make.png differ diff --git a/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts b/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts new file mode 100644 index 0000000000000000000000000000000000000000..d6168061888d9b008e50f8037970b158159ad106 --- /dev/null +++ b/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts @@ -0,0 +1,69 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { OpenApiToolkit } from 'langchain/agents' +import { JsonSpec, JsonObject } from 'langchain/tools' +import { BaseLanguageModel } from 'langchain/base_language' +import { load } from 'js-yaml' + +class OpenAPIToolkit_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'OpenAPI Toolkit' + this.name = 'openAPIToolkit' + this.type = 'OpenAPIToolkit' + this.icon = 'openapi.png' + this.category = 'Tools' + this.description = 'Load OpenAPI specification' + this.inputs = [ + { + label: 'OpenAI API Key', + name: 'openAIApiKey', + type: 'password' + }, + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'YAML File', + name: 'yamlFile', + type: 'file', + fileType: '.yaml' + } + ] + this.baseClasses = [this.type, 'Tool'] + } + + async init(nodeData: INodeData): Promise { + const openAIApiKey = nodeData.inputs?.openAIApiKey as string + const model = nodeData.inputs?.model as BaseLanguageModel + const yamlFileBase64 = nodeData.inputs?.yamlFile as string + + const splitDataURI = yamlFileBase64.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const utf8String = bf.toString('utf-8') + const data = load(utf8String) as JsonObject + if (!data) { + throw new Error('Failed to load OpenAPI spec') + } + + const headers = { + 'Content-Type': 'application/json', + Authorization: `Bearer ${openAIApiKey}` + } + const toolkit = new OpenApiToolkit(new JsonSpec(data), model, headers) + + return toolkit.tools + } +} + +module.exports = { nodeClass: OpenAPIToolkit_Tools } diff --git a/packages/components/nodes/tools/OpenAPIToolkit/openapi.png b/packages/components/nodes/tools/OpenAPIToolkit/openapi.png new file mode 100644 index 0000000000000000000000000000000000000000..457c2e4050c8eef06588f25c0acc67a6059ebe01 Binary files /dev/null and b/packages/components/nodes/tools/OpenAPIToolkit/openapi.png differ diff --git a/packages/components/nodes/tools/ReadFile/ReadFile.ts b/packages/components/nodes/tools/ReadFile/ReadFile.ts new file mode 100644 index 0000000000000000000000000000000000000000..b6678943844c8805759bf0a311fa621473086f66 --- /dev/null +++ b/packages/components/nodes/tools/ReadFile/ReadFile.ts @@ -0,0 +1,42 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { ReadFileTool } from 'langchain/tools' +import { NodeFileStore } from 'langchain/stores/file/node' + +class ReadFile_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Read File' + this.name = 'readFile' + this.type = 'ReadFile' + this.icon = 'readfile.svg' + this.category = 'Tools' + this.description = 'Read file from disk' + this.baseClasses = [this.type, 'Tool', ...getBaseClasses(ReadFileTool)] + this.inputs = [ + { + label: 'Base Path', + name: 'basePath', + placeholder: `C:\\Users\\User\\Desktop`, + type: 'string', + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const basePath = nodeData.inputs?.basePath as string + const store = basePath ? new NodeFileStore(basePath) : new NodeFileStore() + return new ReadFileTool({ store }) + } +} + +module.exports = { nodeClass: ReadFile_Tools } diff --git a/packages/components/nodes/tools/ReadFile/readfile.svg b/packages/components/nodes/tools/ReadFile/readfile.svg new file mode 100644 index 0000000000000000000000000000000000000000..3a57a762cfb7cb662ca8e734c08cefd8eb707e3c --- /dev/null +++ b/packages/components/nodes/tools/ReadFile/readfile.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/RequestsGet/RequestsGet.ts b/packages/components/nodes/tools/RequestsGet/RequestsGet.ts new file mode 100644 index 0000000000000000000000000000000000000000..0b7f0ac809867caeed9aa52da65a8df83319fbbc --- /dev/null +++ b/packages/components/nodes/tools/RequestsGet/RequestsGet.ts @@ -0,0 +1,70 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { desc, RequestParameters, RequestsGetTool } from './core' + +class RequestsGet_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Requests Get' + this.name = 'requestsGet' + this.type = 'RequestsGet' + this.icon = 'requestsget.svg' + this.category = 'Tools' + this.description = 'Execute HTTP GET requests' + this.baseClasses = [this.type, ...getBaseClasses(RequestsGetTool)] + this.inputs = [ + { + label: 'URL', + name: 'url', + type: 'string', + description: + 'Agent will make call to this exact URL. If not specified, agent will try to figure out itself from AIPlugin if provided', + additionalParams: true, + optional: true + }, + { + label: 'Description', + name: 'description', + type: 'string', + rows: 4, + default: desc, + description: 'Acts like a prompt to tell agent when it should use this tool', + additionalParams: true, + optional: true + }, + { + label: 'Headers', + name: 'headers', + type: 'json', + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const headers = nodeData.inputs?.headers as string + const url = nodeData.inputs?.url as string + const description = nodeData.inputs?.description as string + + const obj: RequestParameters = {} + if (url) obj.url = url + if (description) obj.description = description + if (headers) { + const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(headers) + obj.headers = parsedHeaders + } + + return new RequestsGetTool(obj) + } +} + +module.exports = { nodeClass: RequestsGet_Tools } diff --git a/packages/components/nodes/tools/RequestsGet/core.ts b/packages/components/nodes/tools/RequestsGet/core.ts new file mode 100644 index 0000000000000000000000000000000000000000..14bc07f3a79222881b344fa19c4ed48f52d3afde --- /dev/null +++ b/packages/components/nodes/tools/RequestsGet/core.ts @@ -0,0 +1,46 @@ +import fetch from 'node-fetch' +import { Tool } from 'langchain/tools' + +export const desc = `A portal to the internet. Use this when you need to get specific content from a website. +Input should be a url (i.e. https://www.google.com). The output will be the text response of the GET request.` + +export interface Headers { + [key: string]: string +} + +export interface RequestParameters { + headers?: Headers + url?: string + description?: string + maxOutputLength?: number +} + +export class RequestsGetTool extends Tool { + name = 'requests_get' + url = '' + description = desc + maxOutputLength = 2000 + headers = {} + + constructor(args?: RequestParameters) { + super() + this.url = args?.url ?? this.url + this.headers = args?.headers ?? this.headers + this.description = args?.description ?? this.description + this.maxOutputLength = args?.maxOutputLength ?? this.maxOutputLength + } + + /** @ignore */ + async _call(input: string) { + const inputUrl = !this.url ? input : this.url + + if (process.env.DEBUG === 'true') console.info(`Making GET API call to ${inputUrl}`) + + const res = await fetch(inputUrl, { + headers: this.headers + }) + + const text = await res.text() + return text.slice(0, this.maxOutputLength) + } +} diff --git a/packages/components/nodes/tools/RequestsGet/requestsget.svg b/packages/components/nodes/tools/RequestsGet/requestsget.svg new file mode 100644 index 0000000000000000000000000000000000000000..03777e7cd2e30a74ebcb8665061eda3c6b3c1cec --- /dev/null +++ b/packages/components/nodes/tools/RequestsGet/requestsget.svg @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/RequestsPost/RequestsPost.ts b/packages/components/nodes/tools/RequestsPost/RequestsPost.ts new file mode 100644 index 0000000000000000000000000000000000000000..0e64556fa33cfa590b9407384d2d74b4a47f41e0 --- /dev/null +++ b/packages/components/nodes/tools/RequestsPost/RequestsPost.ts @@ -0,0 +1,84 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { RequestParameters, desc, RequestsPostTool } from './core' + +class RequestsPost_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Requests Post' + this.name = 'requestsPost' + this.type = 'RequestsPost' + this.icon = 'requestspost.svg' + this.category = 'Tools' + this.description = 'Execute HTTP POST requests' + this.baseClasses = [this.type, ...getBaseClasses(RequestsPostTool)] + this.inputs = [ + { + label: 'URL', + name: 'url', + type: 'string', + description: + 'Agent will make call to this exact URL. If not specified, agent will try to figure out itself from AIPlugin if provided', + additionalParams: true, + optional: true + }, + { + label: 'Body', + name: 'body', + type: 'json', + description: + 'JSON body for the POST request. If not specified, agent will try to figure out itself from AIPlugin if provided', + additionalParams: true, + optional: true + }, + { + label: 'Description', + name: 'description', + type: 'string', + rows: 4, + default: desc, + description: 'Acts like a prompt to tell agent when it should use this tool', + additionalParams: true, + optional: true + }, + { + label: 'Headers', + name: 'headers', + type: 'json', + additionalParams: true, + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const headers = nodeData.inputs?.headers as string + const url = nodeData.inputs?.url as string + const description = nodeData.inputs?.description as string + const body = nodeData.inputs?.body as string + + const obj: RequestParameters = {} + if (url) obj.url = url + if (description) obj.description = description + if (headers) { + const parsedHeaders = typeof headers === 'object' ? headers : JSON.parse(headers) + obj.headers = parsedHeaders + } + if (body) { + const parsedBody = typeof body === 'object' ? body : JSON.parse(body) + obj.body = parsedBody + } + + return new RequestsPostTool(obj) + } +} + +module.exports = { nodeClass: RequestsPost_Tools } diff --git a/packages/components/nodes/tools/RequestsPost/core.ts b/packages/components/nodes/tools/RequestsPost/core.ts new file mode 100644 index 0000000000000000000000000000000000000000..403ba42b032f9a8902e00e66cd352e24f2c90a89 --- /dev/null +++ b/packages/components/nodes/tools/RequestsPost/core.ts @@ -0,0 +1,72 @@ +import { Tool } from 'langchain/tools' +import fetch from 'node-fetch' + +export const desc = `Use this when you want to POST to a website. +Input should be a json string with two keys: "url" and "data". +The value of "url" should be a string, and the value of "data" should be a dictionary of +key-value pairs you want to POST to the url as a JSON body. +Be careful to always use double quotes for strings in the json string +The output will be the text response of the POST request.` + +export interface Headers { + [key: string]: string +} + +export interface Body { + [key: string]: any +} + +export interface RequestParameters { + headers?: Headers + body?: Body + url?: string + description?: string + maxOutputLength?: number +} + +export class RequestsPostTool extends Tool { + name = 'requests_post' + url = '' + description = desc + maxOutputLength = Infinity + headers = {} + body = {} + + constructor(args?: RequestParameters) { + super() + this.url = args?.url ?? this.url + this.headers = args?.headers ?? this.headers + this.body = args?.body ?? this.body + this.description = args?.description ?? this.description + this.maxOutputLength = args?.maxOutputLength ?? this.maxOutputLength + } + + /** @ignore */ + async _call(input: string) { + try { + let inputUrl = '' + let inputBody = {} + if (Object.keys(this.body).length || this.url) { + if (this.url) inputUrl = this.url + if (Object.keys(this.body).length) inputBody = this.body + } else { + const { url, data } = JSON.parse(input) + inputUrl = url + inputBody = data + } + + if (process.env.DEBUG === 'true') console.info(`Making POST API call to ${inputUrl} with body ${JSON.stringify(inputBody)}`) + + const res = await fetch(inputUrl, { + method: 'POST', + headers: this.headers, + body: JSON.stringify(inputBody) + }) + + const text = await res.text() + return text.slice(0, this.maxOutputLength) + } catch (error) { + return `${error}` + } + } +} diff --git a/packages/components/nodes/tools/RequestsPost/requestspost.svg b/packages/components/nodes/tools/RequestsPost/requestspost.svg new file mode 100644 index 0000000000000000000000000000000000000000..2bea6e967c06ac372e08a8b535706a810c4433f5 --- /dev/null +++ b/packages/components/nodes/tools/RequestsPost/requestspost.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/SerpAPI/SerpAPI.ts b/packages/components/nodes/tools/SerpAPI/SerpAPI.ts new file mode 100644 index 0000000000000000000000000000000000000000..694324083581ef3eac70467d7fcbf9c0fe7f921e --- /dev/null +++ b/packages/components/nodes/tools/SerpAPI/SerpAPI.ts @@ -0,0 +1,38 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { SerpAPI } from 'langchain/tools' + +class SerpAPI_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Serp API' + this.name = 'serpAPI' + this.type = 'SerpAPI' + this.icon = 'serp.png' + this.category = 'Tools' + this.description = 'Wrapper around SerpAPI - a real-time API to access Google search results' + this.inputs = [ + { + label: 'Serp Api Key', + name: 'apiKey', + type: 'password' + } + ] + this.baseClasses = [this.type, ...getBaseClasses(SerpAPI)] + } + + async init(nodeData: INodeData): Promise { + const apiKey = nodeData.inputs?.apiKey as string + return new SerpAPI(apiKey) + } +} + +module.exports = { nodeClass: SerpAPI_Tools } diff --git a/packages/components/nodes/tools/SerpAPI/serp.png b/packages/components/nodes/tools/SerpAPI/serp.png new file mode 100644 index 0000000000000000000000000000000000000000..338aeaea9e5b06b98edd44d243f96f089a9ad273 Binary files /dev/null and b/packages/components/nodes/tools/SerpAPI/serp.png differ diff --git a/packages/components/nodes/tools/Serper/Serper.ts b/packages/components/nodes/tools/Serper/Serper.ts new file mode 100644 index 0000000000000000000000000000000000000000..65dff57c0662e769774530cbf18bdac14bfb6445 --- /dev/null +++ b/packages/components/nodes/tools/Serper/Serper.ts @@ -0,0 +1,38 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { Serper } from 'langchain/tools' + +class Serper_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Serper' + this.name = 'serper' + this.type = 'Serper' + this.icon = 'serper.png' + this.category = 'Tools' + this.description = 'Wrapper around Serper.dev - Google Search API' + this.inputs = [ + { + label: 'Serper Api Key', + name: 'apiKey', + type: 'password' + } + ] + this.baseClasses = [this.type, ...getBaseClasses(Serper)] + } + + async init(nodeData: INodeData): Promise { + const apiKey = nodeData.inputs?.apiKey as string + return new Serper(apiKey) + } +} + +module.exports = { nodeClass: Serper_Tools } diff --git a/packages/components/nodes/tools/Serper/serper.png b/packages/components/nodes/tools/Serper/serper.png new file mode 100644 index 0000000000000000000000000000000000000000..0b094037b067f0f61b25fbd03e54d49888320f37 Binary files /dev/null and b/packages/components/nodes/tools/Serper/serper.png differ diff --git a/packages/components/nodes/tools/WebBrowser/WebBrowser.ts b/packages/components/nodes/tools/WebBrowser/WebBrowser.ts new file mode 100644 index 0000000000000000000000000000000000000000..09478047a1f186dac5f4c2d216f345f9608116b1 --- /dev/null +++ b/packages/components/nodes/tools/WebBrowser/WebBrowser.ts @@ -0,0 +1,47 @@ +import { BaseLanguageModel } from 'langchain/base_language' +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { WebBrowser } from 'langchain/tools/webbrowser' +import { Embeddings } from 'langchain/embeddings/base' + +class WebBrowser_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Web Browser' + this.name = 'webBrowser' + this.type = 'WebBrowser' + this.icon = 'webBrowser.svg' + this.category = 'Tools' + this.description = 'Gives agent the ability to visit a website and extract information' + this.inputs = [ + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + } + ] + this.baseClasses = [this.type, ...getBaseClasses(WebBrowser)] + } + + async init(nodeData: INodeData): Promise { + const model = nodeData.inputs?.model as BaseLanguageModel + const embeddings = nodeData.inputs?.embeddings as Embeddings + + return new WebBrowser({ model, embeddings }) + } +} + +module.exports = { nodeClass: WebBrowser_Tools } diff --git a/packages/components/nodes/tools/WebBrowser/webBrowser.svg b/packages/components/nodes/tools/WebBrowser/webBrowser.svg new file mode 100644 index 0000000000000000000000000000000000000000..01eea4f2a374d0712b7aec53758a1b274969fb1a --- /dev/null +++ b/packages/components/nodes/tools/WebBrowser/webBrowser.svg @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/WriteFile/WriteFile.ts b/packages/components/nodes/tools/WriteFile/WriteFile.ts new file mode 100644 index 0000000000000000000000000000000000000000..208166d86265a84479672c22ba7764d4abb23b48 --- /dev/null +++ b/packages/components/nodes/tools/WriteFile/WriteFile.ts @@ -0,0 +1,42 @@ +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' +import { WriteFileTool } from 'langchain/tools' +import { NodeFileStore } from 'langchain/stores/file/node' + +class WriteFile_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Write File' + this.name = 'writeFile' + this.type = 'WriteFile' + this.icon = 'writefile.svg' + this.category = 'Tools' + this.description = 'Write file to disk' + this.baseClasses = [this.type, 'Tool', ...getBaseClasses(WriteFileTool)] + this.inputs = [ + { + label: 'Base Path', + name: 'basePath', + placeholder: `C:\\Users\\User\\Desktop`, + type: 'string', + optional: true + } + ] + } + + async init(nodeData: INodeData): Promise { + const basePath = nodeData.inputs?.basePath as string + const store = basePath ? new NodeFileStore(basePath) : new NodeFileStore() + return new WriteFileTool({ store }) + } +} + +module.exports = { nodeClass: WriteFile_Tools } diff --git a/packages/components/nodes/tools/WriteFile/writefile.svg b/packages/components/nodes/tools/WriteFile/writefile.svg new file mode 100644 index 0000000000000000000000000000000000000000..72500bf642eea6d7377b2da55da081ffe5ef3511 --- /dev/null +++ b/packages/components/nodes/tools/WriteFile/writefile.svg @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/tools/ZapierNLA/ZapierNLA.ts b/packages/components/nodes/tools/ZapierNLA/ZapierNLA.ts new file mode 100644 index 0000000000000000000000000000000000000000..849f5946decd31317d4be38a3760b7ede206eb81 --- /dev/null +++ b/packages/components/nodes/tools/ZapierNLA/ZapierNLA.ts @@ -0,0 +1,45 @@ +import { ZapierNLAWrapper, ZapiterNLAWrapperParams } from 'langchain/tools' +import { INode, INodeData, INodeParams } from '../../../src/Interface' +import { ZapierToolKit } from 'langchain/agents' + +class ZapierNLA_Tools implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + + constructor() { + this.label = 'Zapier NLA' + this.name = 'zapierNLA' + this.type = 'ZapierNLA' + this.icon = 'zapier.png' + this.category = 'Tools' + this.description = "Access to apps and actions on Zapier's platform through a natural language API interface" + this.inputs = [ + { + label: 'Zapier NLA Api Key', + name: 'apiKey', + type: 'password' + } + ] + this.baseClasses = [this.type, 'Tool'] + } + + async init(nodeData: INodeData): Promise { + const apiKey = nodeData.inputs?.apiKey as string + + const obj: Partial = { + apiKey + } + const zapier = new ZapierNLAWrapper(obj) + const toolkit = await ZapierToolKit.fromZapierNLAWrapper(zapier) + + return toolkit.tools + } +} + +module.exports = { nodeClass: ZapierNLA_Tools } diff --git a/packages/components/nodes/tools/ZapierNLA/zapier.png b/packages/components/nodes/tools/ZapierNLA/zapier.png new file mode 100644 index 0000000000000000000000000000000000000000..769716faaadca77fcd3e7ac06c22ba570d37d2e6 Binary files /dev/null and b/packages/components/nodes/tools/ZapierNLA/zapier.png differ diff --git a/packages/components/nodes/vectorstores/Chroma_Existing/Chroma_Existing.ts b/packages/components/nodes/vectorstores/Chroma_Existing/Chroma_Existing.ts new file mode 100644 index 0000000000000000000000000000000000000000..3ce93e87279f401a9e6152a42bee084985fa79c5 --- /dev/null +++ b/packages/components/nodes/vectorstores/Chroma_Existing/Chroma_Existing.ts @@ -0,0 +1,93 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { Chroma } from 'langchain/vectorstores/chroma' +import { Embeddings } from 'langchain/embeddings/base' +import { getBaseClasses } from '../../../src/utils' + +class Chroma_Existing_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Chroma Load Existing Index' + this.name = 'chromaExistingIndex' + this.type = 'Chroma' + this.icon = 'chroma.svg' + this.category = 'Vector Stores' + this.description = 'Load existing index from Chroma (i.e: Document has been upserted)' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Collection Name', + name: 'collectionName', + type: 'string' + }, + { + label: 'Chroma URL', + name: 'chromaURL', + type: 'string', + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Chroma Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Chroma Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(Chroma)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const collectionName = nodeData.inputs?.collectionName as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const chromaURL = nodeData.inputs?.chromaURL as string + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const obj: { + collectionName: string + url?: string + } = { collectionName } + if (chromaURL) obj.url = chromaURL + + const vectorStore = await Chroma.fromExistingCollection(embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: Chroma_Existing_VectorStores } diff --git a/packages/components/nodes/vectorstores/Chroma_Existing/chroma.svg b/packages/components/nodes/vectorstores/Chroma_Existing/chroma.svg new file mode 100644 index 0000000000000000000000000000000000000000..64090685b9ddefbdf08b15546e0ea002d87b965b --- /dev/null +++ b/packages/components/nodes/vectorstores/Chroma_Existing/chroma.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/packages/components/nodes/vectorstores/Chroma_Upsert/Chroma_Upsert.ts b/packages/components/nodes/vectorstores/Chroma_Upsert/Chroma_Upsert.ts new file mode 100644 index 0000000000000000000000000000000000000000..f32fbb6712ea6b8447c2de1715de4d9996570873 --- /dev/null +++ b/packages/components/nodes/vectorstores/Chroma_Upsert/Chroma_Upsert.ts @@ -0,0 +1,108 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { Chroma } from 'langchain/vectorstores/chroma' +import { Embeddings } from 'langchain/embeddings/base' +import { Document } from 'langchain/document' +import { getBaseClasses } from '../../../src/utils' +import { flatten } from 'lodash' + +class ChromaUpsert_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Chroma Upsert Document' + this.name = 'chromaUpsert' + this.type = 'Chroma' + this.icon = 'chroma.svg' + this.category = 'Vector Stores' + this.description = 'Upsert documents to Chroma' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Collection Name', + name: 'collectionName', + type: 'string' + }, + { + label: 'Chroma URL', + name: 'chromaURL', + type: 'string', + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Chroma Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Chroma Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(Chroma)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const collectionName = nodeData.inputs?.collectionName as string + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const chromaURL = nodeData.inputs?.chromaURL as string + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + finalDocs.push(new Document(flattenDocs[i])) + } + + const obj: { + collectionName: string + url?: string + } = { collectionName } + if (chromaURL) obj.url = chromaURL + + const vectorStore = await Chroma.fromDocuments(finalDocs, embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: ChromaUpsert_VectorStores } diff --git a/packages/components/nodes/vectorstores/Chroma_Upsert/chroma.svg b/packages/components/nodes/vectorstores/Chroma_Upsert/chroma.svg new file mode 100644 index 0000000000000000000000000000000000000000..64090685b9ddefbdf08b15546e0ea002d87b965b --- /dev/null +++ b/packages/components/nodes/vectorstores/Chroma_Upsert/chroma.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/packages/components/nodes/vectorstores/Faiss_Existing/Faiss_Existing.ts b/packages/components/nodes/vectorstores/Faiss_Existing/Faiss_Existing.ts new file mode 100644 index 0000000000000000000000000000000000000000..6dd1859481449848a1d6f3c1ca654d5565f69fc7 --- /dev/null +++ b/packages/components/nodes/vectorstores/Faiss_Existing/Faiss_Existing.ts @@ -0,0 +1,82 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { FaissStore } from 'langchain/vectorstores/faiss' +import { Embeddings } from 'langchain/embeddings/base' +import { getBaseClasses } from '../../../src/utils' + +class Faiss_Existing_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Faiss Load Existing Index' + this.name = 'faissExistingIndex' + this.type = 'Faiss' + this.icon = 'faiss.svg' + this.category = 'Vector Stores' + this.description = 'Load existing index from Faiss (i.e: Document has been upserted)' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Base Path to load', + name: 'basePath', + description: 'Path to load faiss.index file', + placeholder: `C:\\Users\\User\\Desktop`, + type: 'string' + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Faiss Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Faiss Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(FaissStore)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const embeddings = nodeData.inputs?.embeddings as Embeddings + const basePath = nodeData.inputs?.basePath as string + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const vectorStore = await FaissStore.load(basePath, embeddings) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: Faiss_Existing_VectorStores } diff --git a/packages/components/nodes/vectorstores/Faiss_Existing/faiss.svg b/packages/components/nodes/vectorstores/Faiss_Existing/faiss.svg new file mode 100644 index 0000000000000000000000000000000000000000..5fbe98322bdefbf140b16a0466a786500993a7fd --- /dev/null +++ b/packages/components/nodes/vectorstores/Faiss_Existing/faiss.svg @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/Faiss_Upsert/Faiss_Upsert.ts b/packages/components/nodes/vectorstores/Faiss_Upsert/Faiss_Upsert.ts new file mode 100644 index 0000000000000000000000000000000000000000..5e5f9028c5f3354954cc85b669f98a9175b448d2 --- /dev/null +++ b/packages/components/nodes/vectorstores/Faiss_Upsert/Faiss_Upsert.ts @@ -0,0 +1,98 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { Embeddings } from 'langchain/embeddings/base' +import { Document } from 'langchain/document' +import { getBaseClasses } from '../../../src/utils' +import { FaissStore } from 'langchain/vectorstores/faiss' +import { flatten } from 'lodash' + +class FaissUpsert_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Faiss Upsert Document' + this.name = 'faissUpsert' + this.type = 'Faiss' + this.icon = 'faiss.svg' + this.category = 'Vector Stores' + this.description = 'Upsert documents to Faiss' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Base Path to store', + name: 'basePath', + description: 'Path to store faiss.index file', + placeholder: `C:\\Users\\User\\Desktop`, + type: 'string' + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Faiss Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Faiss Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(FaissStore)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const basePath = nodeData.inputs?.basePath as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + finalDocs.push(new Document(flattenDocs[i])) + } + + const vectorStore = await FaissStore.fromDocuments(finalDocs, embeddings) + await vectorStore.save(basePath) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: FaissUpsert_VectorStores } diff --git a/packages/components/nodes/vectorstores/Faiss_Upsert/faiss.svg b/packages/components/nodes/vectorstores/Faiss_Upsert/faiss.svg new file mode 100644 index 0000000000000000000000000000000000000000..5fbe98322bdefbf140b16a0466a786500993a7fd --- /dev/null +++ b/packages/components/nodes/vectorstores/Faiss_Upsert/faiss.svg @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts b/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts new file mode 100644 index 0000000000000000000000000000000000000000..32a785a5f399a48a25fece38194fbaf312d787c0 --- /dev/null +++ b/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts @@ -0,0 +1,88 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { MemoryVectorStore } from 'langchain/vectorstores/memory' +import { Embeddings } from 'langchain/embeddings/base' +import { Document } from 'langchain/document' +import { getBaseClasses } from '../../../src/utils' +import { flatten } from 'lodash' + +class InMemoryVectorStore_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'In-Memory Vector Store' + this.name = 'memoryVectorStore' + this.type = 'Memory' + this.icon = 'memory.svg' + this.category = 'Vector Stores' + this.description = 'In-memory vectorstore that stores embeddings and does an exact, linear search for the most similar embeddings.' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + optional: true + } + ] + this.outputs = [ + { + label: 'Memory Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Memory Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(MemoryVectorStore)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + finalDocs.push(new Document(flattenDocs[i])) + } + + const vectorStore = await MemoryVectorStore.fromDocuments(finalDocs, embeddings) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: InMemoryVectorStore_VectorStores } diff --git a/packages/components/nodes/vectorstores/InMemory/memory.svg b/packages/components/nodes/vectorstores/InMemory/memory.svg new file mode 100644 index 0000000000000000000000000000000000000000..e7f97c871b5eac44763ee2daf4a8f90c16f0533b --- /dev/null +++ b/packages/components/nodes/vectorstores/InMemory/memory.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/Pinecone_Existing/Pinecone_Existing.ts b/packages/components/nodes/vectorstores/Pinecone_Existing/Pinecone_Existing.ts new file mode 100644 index 0000000000000000000000000000000000000000..e57da39621a69478dd66770bb4fbd35e640a68b7 --- /dev/null +++ b/packages/components/nodes/vectorstores/Pinecone_Existing/Pinecone_Existing.ts @@ -0,0 +1,128 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { PineconeClient } from '@pinecone-database/pinecone' +import { PineconeLibArgs, PineconeStore } from 'langchain/vectorstores/pinecone' +import { Embeddings } from 'langchain/embeddings/base' +import { getBaseClasses } from '../../../src/utils' + +class Pinecone_Existing_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Pinecone Load Existing Index' + this.name = 'pineconeExistingIndex' + this.type = 'Pinecone' + this.icon = 'pinecone.png' + this.category = 'Vector Stores' + this.description = 'Load existing index from Pinecone (i.e: Document has been upserted)' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Pinecone Api Key', + name: 'pineconeApiKey', + type: 'password' + }, + { + label: 'Pinecone Environment', + name: 'pineconeEnv', + type: 'string' + }, + { + label: 'Pinecone Index', + name: 'pineconeIndex', + type: 'string' + }, + { + label: 'Pinecone Namespace', + name: 'pineconeNamespace', + type: 'string', + placeholder: 'my-first-namespace', + additionalParams: true, + optional: true + }, + { + label: 'Pinecone Metadata Filter', + name: 'pineconeMetadataFilter', + type: 'json', + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Pinecone Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Pinecone Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(PineconeStore)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const pineconeApiKey = nodeData.inputs?.pineconeApiKey as string + const pineconeEnv = nodeData.inputs?.pineconeEnv as string + const index = nodeData.inputs?.pineconeIndex as string + const pineconeNamespace = nodeData.inputs?.pineconeNamespace as string + const pineconeMetadataFilter = nodeData.inputs?.pineconeMetadataFilter + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const client = new PineconeClient() + await client.init({ + apiKey: pineconeApiKey, + environment: pineconeEnv + }) + + const pineconeIndex = client.Index(index) + + const obj: PineconeLibArgs = { + pineconeIndex + } + + if (pineconeNamespace) obj.namespace = pineconeNamespace + if (pineconeMetadataFilter) { + const metadatafilter = typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : JSON.parse(pineconeMetadataFilter) + obj.filter = metadatafilter + } + + const vectorStore = await PineconeStore.fromExistingIndex(embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: Pinecone_Existing_VectorStores } diff --git a/packages/components/nodes/vectorstores/Pinecone_Existing/pinecone.png b/packages/components/nodes/vectorstores/Pinecone_Existing/pinecone.png new file mode 100644 index 0000000000000000000000000000000000000000..1ae189fdcc3b672a629d34c271a3d963bfaa1d70 Binary files /dev/null and b/packages/components/nodes/vectorstores/Pinecone_Existing/pinecone.png differ diff --git a/packages/components/nodes/vectorstores/Pinecone_Upsert/Pinecone_Upsert.ts b/packages/components/nodes/vectorstores/Pinecone_Upsert/Pinecone_Upsert.ts new file mode 100644 index 0000000000000000000000000000000000000000..ad1767c25be44a3948005ad0d755759d1bf2cc62 --- /dev/null +++ b/packages/components/nodes/vectorstores/Pinecone_Upsert/Pinecone_Upsert.ts @@ -0,0 +1,131 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { PineconeClient } from '@pinecone-database/pinecone' +import { PineconeLibArgs, PineconeStore } from 'langchain/vectorstores/pinecone' +import { Embeddings } from 'langchain/embeddings/base' +import { Document } from 'langchain/document' +import { getBaseClasses } from '../../../src/utils' +import { flatten } from 'lodash' + +class PineconeUpsert_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Pinecone Upsert Document' + this.name = 'pineconeUpsert' + this.type = 'Pinecone' + this.icon = 'pinecone.png' + this.category = 'Vector Stores' + this.description = 'Upsert documents to Pinecone' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Pinecone Api Key', + name: 'pineconeApiKey', + type: 'password' + }, + { + label: 'Pinecone Environment', + name: 'pineconeEnv', + type: 'string' + }, + { + label: 'Pinecone Index', + name: 'pineconeIndex', + type: 'string' + }, + { + label: 'Pinecone Namespace', + name: 'pineconeNamespace', + type: 'string', + placeholder: 'my-first-namespace', + additionalParams: true, + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Pinecone Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Pinecone Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(PineconeStore)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const pineconeApiKey = nodeData.inputs?.pineconeApiKey as string + const pineconeEnv = nodeData.inputs?.pineconeEnv as string + const index = nodeData.inputs?.pineconeIndex as string + const pineconeNamespace = nodeData.inputs?.pineconeNamespace as string + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const client = new PineconeClient() + await client.init({ + apiKey: pineconeApiKey, + environment: pineconeEnv + }) + + const pineconeIndex = client.Index(index) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + finalDocs.push(new Document(flattenDocs[i])) + } + + const obj: PineconeLibArgs = { + pineconeIndex + } + + if (pineconeNamespace) obj.namespace = pineconeNamespace + + const vectorStore = await PineconeStore.fromDocuments(finalDocs, embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: PineconeUpsert_VectorStores } diff --git a/packages/components/nodes/vectorstores/Pinecone_Upsert/pinecone.png b/packages/components/nodes/vectorstores/Pinecone_Upsert/pinecone.png new file mode 100644 index 0000000000000000000000000000000000000000..1ae189fdcc3b672a629d34c271a3d963bfaa1d70 Binary files /dev/null and b/packages/components/nodes/vectorstores/Pinecone_Upsert/pinecone.png differ diff --git a/packages/components/nodes/vectorstores/Supabase_Existing/Supabase_Exisiting.ts b/packages/components/nodes/vectorstores/Supabase_Existing/Supabase_Exisiting.ts new file mode 100644 index 0000000000000000000000000000000000000000..173660caef09759f5c3e6c30543c3357fc1bc0fb --- /dev/null +++ b/packages/components/nodes/vectorstores/Supabase_Existing/Supabase_Exisiting.ts @@ -0,0 +1,120 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { Embeddings } from 'langchain/embeddings/base' +import { getBaseClasses } from '../../../src/utils' +import { SupabaseLibArgs, SupabaseVectorStore } from 'langchain/vectorstores/supabase' +import { createClient } from '@supabase/supabase-js' + +class Supabase_Existing_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Supabase Load Existing Index' + this.name = 'supabaseExistingIndex' + this.type = 'Supabase' + this.icon = 'supabase.svg' + this.category = 'Vector Stores' + this.description = 'Load existing index from Supabase (i.e: Document has been upserted)' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Supabase API Key', + name: 'supabaseApiKey', + type: 'password' + }, + { + label: 'Supabase Project URL', + name: 'supabaseProjUrl', + type: 'string' + }, + { + label: 'Table Name', + name: 'tableName', + type: 'string' + }, + { + label: 'Query Name', + name: 'queryName', + type: 'string' + }, + { + label: 'Supabase Metadata Filter', + name: 'supabaseMetadataFilter', + type: 'json', + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Supabase Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Supabase Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(SupabaseVectorStore)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const supabaseApiKey = nodeData.inputs?.supabaseApiKey as string + const supabaseProjUrl = nodeData.inputs?.supabaseProjUrl as string + const tableName = nodeData.inputs?.tableName as string + const queryName = nodeData.inputs?.queryName as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const supabaseMetadataFilter = nodeData.inputs?.supabaseMetadataFilter + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const client = createClient(supabaseProjUrl, supabaseApiKey) + + const obj: SupabaseLibArgs = { + client, + tableName, + queryName + } + + if (supabaseMetadataFilter) { + const metadatafilter = typeof supabaseMetadataFilter === 'object' ? supabaseMetadataFilter : JSON.parse(supabaseMetadataFilter) + obj.filter = metadatafilter + } + + const vectorStore = await SupabaseVectorStore.fromExistingIndex(embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: Supabase_Existing_VectorStores } diff --git a/packages/components/nodes/vectorstores/Supabase_Existing/supabase.svg b/packages/components/nodes/vectorstores/Supabase_Existing/supabase.svg new file mode 100644 index 0000000000000000000000000000000000000000..884d6449f47166deb7d80d61fd9499a9d5c74f3f --- /dev/null +++ b/packages/components/nodes/vectorstores/Supabase_Existing/supabase.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/Supabase_Upsert/Supabase_Upsert.ts b/packages/components/nodes/vectorstores/Supabase_Upsert/Supabase_Upsert.ts new file mode 100644 index 0000000000000000000000000000000000000000..69997a563453071c823dbf5a02692191b52c18df --- /dev/null +++ b/packages/components/nodes/vectorstores/Supabase_Upsert/Supabase_Upsert.ts @@ -0,0 +1,120 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { Embeddings } from 'langchain/embeddings/base' +import { Document } from 'langchain/document' +import { getBaseClasses } from '../../../src/utils' +import { SupabaseVectorStore } from 'langchain/vectorstores/supabase' +import { createClient } from '@supabase/supabase-js' +import { flatten } from 'lodash' + +class SupabaseUpsert_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Supabase Upsert Document' + this.name = 'supabaseUpsert' + this.type = 'Supabase' + this.icon = 'supabase.svg' + this.category = 'Vector Stores' + this.description = 'Upsert documents to Supabase' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Supabase API Key', + name: 'supabaseApiKey', + type: 'password' + }, + { + label: 'Supabase Project URL', + name: 'supabaseProjUrl', + type: 'string' + }, + { + label: 'Table Name', + name: 'tableName', + type: 'string' + }, + { + label: 'Query Name', + name: 'queryName', + type: 'string' + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Supabase Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Supabase Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(SupabaseVectorStore)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const supabaseApiKey = nodeData.inputs?.supabaseApiKey as string + const supabaseProjUrl = nodeData.inputs?.supabaseProjUrl as string + const tableName = nodeData.inputs?.tableName as string + const queryName = nodeData.inputs?.queryName as string + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const client = createClient(supabaseProjUrl, supabaseApiKey) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + finalDocs.push(new Document(flattenDocs[i])) + } + + const vectorStore = await SupabaseVectorStore.fromDocuments(finalDocs, embeddings, { + client, + tableName: tableName, + queryName: queryName + }) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: SupabaseUpsert_VectorStores } diff --git a/packages/components/nodes/vectorstores/Supabase_Upsert/supabase.svg b/packages/components/nodes/vectorstores/Supabase_Upsert/supabase.svg new file mode 100644 index 0000000000000000000000000000000000000000..884d6449f47166deb7d80d61fd9499a9d5c74f3f --- /dev/null +++ b/packages/components/nodes/vectorstores/Supabase_Upsert/supabase.svg @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/Weaviate_Existing/Weaviate_Existing.ts b/packages/components/nodes/vectorstores/Weaviate_Existing/Weaviate_Existing.ts new file mode 100644 index 0000000000000000000000000000000000000000..595691bdc4ac40cac7d2dbfa7892cf817ac70885 --- /dev/null +++ b/packages/components/nodes/vectorstores/Weaviate_Existing/Weaviate_Existing.ts @@ -0,0 +1,148 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { Embeddings } from 'langchain/embeddings/base' +import { getBaseClasses } from '../../../src/utils' +import weaviate, { WeaviateClient, ApiKey } from 'weaviate-ts-client' +import { WeaviateLibArgs, WeaviateStore } from 'langchain/vectorstores/weaviate' + +class Weaviate_Existing_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Weaviate Load Existing Index' + this.name = 'weaviateExistingIndex' + this.type = 'Weaviate' + this.icon = 'weaviate.png' + this.category = 'Vector Stores' + this.description = 'Load existing index from Weaviate (i.e: Document has been upserted)' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Weaviate Scheme', + name: 'weaviateScheme', + type: 'options', + default: 'https', + options: [ + { + label: 'https', + name: 'https' + }, + { + label: 'http', + name: 'http' + } + ] + }, + { + label: 'Weaviate Host', + name: 'weaviateHost', + type: 'string', + placeholder: 'localhost:8080' + }, + { + label: 'Weaviate Index', + name: 'weaviateIndex', + type: 'string', + placeholder: 'Test' + }, + { + label: 'Weaviate API Key', + name: 'weaviateApiKey', + type: 'password', + optional: true + }, + { + label: 'Weaviate Text Key', + name: 'weaviateTextKey', + type: 'string', + placeholder: 'text', + optional: true, + additionalParams: true + }, + { + label: 'Weaviate Metadata Keys', + name: 'weaviateMetadataKeys', + type: 'string', + rows: 4, + placeholder: `["foo"]`, + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Weaviate Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Weaviate Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(WeaviateStore)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const weaviateScheme = nodeData.inputs?.weaviateScheme as string + const weaviateHost = nodeData.inputs?.weaviateHost as string + const weaviateIndex = nodeData.inputs?.weaviateIndex as string + const weaviateApiKey = nodeData.inputs?.weaviateApiKey as string + const weaviateTextKey = nodeData.inputs?.weaviateTextKey as string + const weaviateMetadataKeys = nodeData.inputs?.weaviateMetadataKeys as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const clientConfig: any = { + scheme: weaviateScheme, + host: weaviateHost + } + if (weaviateApiKey) clientConfig.apiKey = new ApiKey(weaviateApiKey) + + const client: WeaviateClient = weaviate.client(clientConfig) + + const obj: WeaviateLibArgs = { + client, + indexName: weaviateIndex + } + + if (weaviateTextKey) obj.textKey = weaviateTextKey + if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, '')) + + const vectorStore = await WeaviateStore.fromExistingIndex(embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: Weaviate_Existing_VectorStores } diff --git a/packages/components/nodes/vectorstores/Weaviate_Existing/weaviate.png b/packages/components/nodes/vectorstores/Weaviate_Existing/weaviate.png new file mode 100644 index 0000000000000000000000000000000000000000..25a39e33894ee2702ff1e569cf14971eb2bf45f3 Binary files /dev/null and b/packages/components/nodes/vectorstores/Weaviate_Existing/weaviate.png differ diff --git a/packages/components/nodes/vectorstores/Weaviate_Upsert/Weaviate_Upsert.ts b/packages/components/nodes/vectorstores/Weaviate_Upsert/Weaviate_Upsert.ts new file mode 100644 index 0000000000000000000000000000000000000000..061374263f3ea0c6e9a709213a0e9daf96412a98 --- /dev/null +++ b/packages/components/nodes/vectorstores/Weaviate_Upsert/Weaviate_Upsert.ts @@ -0,0 +1,163 @@ +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { Embeddings } from 'langchain/embeddings/base' +import { Document } from 'langchain/document' +import { getBaseClasses } from '../../../src/utils' +import { WeaviateLibArgs, WeaviateStore } from 'langchain/vectorstores/weaviate' +import weaviate, { WeaviateClient, ApiKey } from 'weaviate-ts-client' +import { flatten } from 'lodash' + +class WeaviateUpsert_VectorStores implements INode { + label: string + name: string + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Weaviate Upsert Document' + this.name = 'weaviateUpsert' + this.type = 'Weaviate' + this.icon = 'weaviate.png' + this.category = 'Vector Stores' + this.description = 'Upsert documents to Weaviate' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Weaviate Scheme', + name: 'weaviateScheme', + type: 'options', + default: 'https', + options: [ + { + label: 'https', + name: 'https' + }, + { + label: 'http', + name: 'http' + } + ] + }, + { + label: 'Weaviate Host', + name: 'weaviateHost', + type: 'string', + placeholder: 'localhost:8080' + }, + { + label: 'Weaviate Index', + name: 'weaviateIndex', + type: 'string', + placeholder: 'Test' + }, + { + label: 'Weaviate API Key', + name: 'weaviateApiKey', + type: 'password', + optional: true + }, + { + label: 'Weaviate Text Key', + name: 'weaviateTextKey', + type: 'string', + placeholder: 'text', + optional: true, + additionalParams: true + }, + { + label: 'Weaviate Metadata Keys', + name: 'weaviateMetadataKeys', + type: 'string', + rows: 4, + placeholder: `["foo"]`, + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Weaviate Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Weaviate Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(WeaviateStore)] + } + ] + } + + async init(nodeData: INodeData): Promise { + const weaviateScheme = nodeData.inputs?.weaviateScheme as string + const weaviateHost = nodeData.inputs?.weaviateHost as string + const weaviateIndex = nodeData.inputs?.weaviateIndex as string + const weaviateApiKey = nodeData.inputs?.weaviateApiKey as string + const weaviateTextKey = nodeData.inputs?.weaviateTextKey as string + const weaviateMetadataKeys = nodeData.inputs?.weaviateMetadataKeys as string + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const clientConfig: any = { + scheme: weaviateScheme, + host: weaviateHost + } + if (weaviateApiKey) clientConfig.apiKey = new ApiKey(weaviateApiKey) + + const client: WeaviateClient = weaviate.client(clientConfig) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + finalDocs.push(new Document(flattenDocs[i])) + } + + const obj: WeaviateLibArgs = { + client, + indexName: weaviateIndex + } + + if (weaviateTextKey) obj.textKey = weaviateTextKey + if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, '')) + + const vectorStore = await WeaviateStore.fromDocuments(finalDocs, embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: WeaviateUpsert_VectorStores } diff --git a/packages/components/nodes/vectorstores/Weaviate_Upsert/weaviate.png b/packages/components/nodes/vectorstores/Weaviate_Upsert/weaviate.png new file mode 100644 index 0000000000000000000000000000000000000000..25a39e33894ee2702ff1e569cf14971eb2bf45f3 Binary files /dev/null and b/packages/components/nodes/vectorstores/Weaviate_Upsert/weaviate.png differ diff --git a/packages/components/package.json b/packages/components/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3e3d58b6cebc753e193281f9c6591938922209c5 --- /dev/null +++ b/packages/components/package.json @@ -0,0 +1,51 @@ +{ + "name": "flowise-components", + "version": "1.2.12", + "description": "Flowiseai Components", + "main": "dist/src/index", + "types": "dist/src/index.d.ts", + "scripts": { + "build": "tsc && gulp", + "dev": "tsc --watch" + }, + "keywords": [], + "homepage": "https://flowiseai.com", + "author": { + "name": "Henry Heng", + "email": "henryheng@flowiseai.com" + }, + "license": "SEE LICENSE IN LICENSE.md", + "dependencies": { + "@dqbd/tiktoken": "^1.0.7", + "@huggingface/inference": "1", + "@pinecone-database/pinecone": "^0.0.12", + "@supabase/supabase-js": "^2.21.0", + "@types/js-yaml": "^4.0.5", + "axios": "^0.27.2", + "cheerio": "^1.0.0-rc.12", + "chromadb": "^1.4.2", + "cohere-ai": "^6.2.0", + "d3-dsv": "2", + "dotenv": "^16.0.0", + "express": "^4.17.3", + "faiss-node": "^0.2.1", + "form-data": "^4.0.0", + "graphql": "^16.6.0", + "langchain": "^0.0.91", + "linkifyjs": "^4.1.1", + "mammoth": "^1.5.1", + "moment": "^2.29.3", + "node-fetch": "^2.6.11", + "pdf-parse": "^1.1.1", + "weaviate-ts-client": "^1.1.0", + "ws": "^8.9.0", + "html-to-text": "^9.0.5" + }, + "devDependencies": { + "@types/gulp": "4.0.9", + "@types/node-fetch": "2.6.2", + "@types/ws": "^8.5.3", + "gulp": "^4.0.2", + "typescript": "^4.8.4" + } +} diff --git a/packages/components/src/Interface.ts b/packages/components/src/Interface.ts new file mode 100644 index 0000000000000000000000000000000000000000..bd94cca8907ad4cbd323b3b1dce6f04626be1fe8 --- /dev/null +++ b/packages/components/src/Interface.ts @@ -0,0 +1,145 @@ +/** + * Types + */ + +export type NodeParamsType = 'options' | 'string' | 'number' | 'boolean' | 'password' | 'json' | 'code' | 'date' | 'file' | 'folder' + +export type CommonType = string | number | boolean | undefined | null + +export type MessageType = 'apiMessage' | 'userMessage' + +/** + * Others + */ + +export interface ICommonObject { + [key: string]: any | CommonType | ICommonObject | CommonType[] | ICommonObject[] +} + +export interface IAttachment { + content: string + contentType: string + size?: number + filename?: string +} + +export interface INodeOptionsValue { + label: string + name: string + description?: string +} + +export interface INodeOutputsValue { + label: string + name: string + baseClasses: string[] + description?: string +} + +export interface INodeParams { + label: string + name: string + type: NodeParamsType | string + default?: CommonType | ICommonObject | ICommonObject[] + description?: string + options?: Array + optional?: boolean | INodeDisplay + rows?: number + list?: boolean + acceptVariable?: boolean + placeholder?: string + fileType?: string + additionalParams?: boolean +} + +export interface INodeExecutionData { + [key: string]: CommonType | CommonType[] | ICommonObject | ICommonObject[] +} + +export interface INodeDisplay { + [key: string]: string[] | string +} + +export interface INodeProperties { + label: string + name: string + type: string + icon: string + category: string + baseClasses: string[] + description?: string + filePath?: string +} + +export interface INode extends INodeProperties { + inputs?: INodeParams[] + output?: INodeOutputsValue[] + init?(nodeData: INodeData, input: string, options?: ICommonObject): Promise + run?(nodeData: INodeData, input: string, options?: ICommonObject): Promise +} + +export interface INodeData extends INodeProperties { + id: string + inputs?: ICommonObject + outputs?: ICommonObject + instance?: any +} + +export interface IMessage { + message: string + type: MessageType +} + +/** + * Classes + */ + +import { PromptTemplate as LangchainPromptTemplate, PromptTemplateInput } from 'langchain/prompts' +import { VectorStore } from 'langchain/vectorstores/base' + +export class PromptTemplate extends LangchainPromptTemplate { + promptValues: ICommonObject + + constructor(input: PromptTemplateInput) { + super(input) + } +} + +export interface PromptRetrieverInput { + name: string + description: string + systemMessage: string +} + +const fixedTemplate = `Here is a question: +{input} +` +export class PromptRetriever { + name: string + description: string + systemMessage: string + + constructor(fields: PromptRetrieverInput) { + this.name = fields.name + this.description = fields.description + this.systemMessage = `${fields.systemMessage}\n${fixedTemplate}` + } +} + +export interface VectorStoreRetrieverInput { + name: string + description: string + vectorStore: VectorStore +} + +export class VectorStoreRetriever { + name: string + description: string + vectorStore: VectorStore + + constructor(fields: VectorStoreRetrieverInput) { + this.name = fields.name + this.description = fields.description + this.vectorStore = fields.vectorStore + } +} diff --git a/packages/components/src/index.ts b/packages/components/src/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..ae2e380ee40f1b3c545b2f7deab44173c03396bd --- /dev/null +++ b/packages/components/src/index.ts @@ -0,0 +1,8 @@ +import dotenv from 'dotenv' +import path from 'path' + +const envPath = path.join(__dirname, '..', '..', '.env') +dotenv.config({ path: envPath, override: true }) + +export * from './Interface' +export * from './utils' diff --git a/packages/components/src/utils.ts b/packages/components/src/utils.ts new file mode 100644 index 0000000000000000000000000000000000000000..de026a359bb826774c8b401e08d55a3ecd41d6b4 --- /dev/null +++ b/packages/components/src/utils.ts @@ -0,0 +1,286 @@ +import axios from 'axios' +import { load } from 'cheerio' +import * as fs from 'fs' +import * as path from 'path' +import { BaseCallbackHandler } from 'langchain/callbacks' +import { Server } from 'socket.io' +import { ChainValues } from 'langchain/dist/schema' + +export const numberOrExpressionRegex = '^(\\d+\\.?\\d*|{{.*}})$' //return true if string consists only numbers OR expression {{}} +export const notEmptyRegex = '(.|\\s)*\\S(.|\\s)*' //return true if string is not empty or blank + +/** + * Get base classes of components + * + * @export + * @param {any} targetClass + * @returns {string[]} + */ +export const getBaseClasses = (targetClass: any) => { + const baseClasses: string[] = [] + + if (targetClass instanceof Function) { + let baseClass = targetClass + + while (baseClass) { + const newBaseClass = Object.getPrototypeOf(baseClass) + if (newBaseClass && newBaseClass !== Object && newBaseClass.name) { + baseClass = newBaseClass + baseClasses.push(baseClass.name) + } else { + break + } + } + } + return baseClasses +} + +/** + * Serialize axios query params + * + * @export + * @param {any} params + * @param {boolean} skipIndex // Set to true if you want same params to be: param=1¶m=2 instead of: param[0]=1¶m[1]=2 + * @returns {string} + */ +export function serializeQueryParams(params: any, skipIndex?: boolean): string { + const parts: any[] = [] + + const encode = (val: string) => { + return encodeURIComponent(val) + .replace(/%3A/gi, ':') + .replace(/%24/g, '$') + .replace(/%2C/gi, ',') + .replace(/%20/g, '+') + .replace(/%5B/gi, '[') + .replace(/%5D/gi, ']') + } + + const convertPart = (key: string, val: any) => { + if (val instanceof Date) val = val.toISOString() + else if (val instanceof Object) val = JSON.stringify(val) + + parts.push(encode(key) + '=' + encode(val)) + } + + Object.entries(params).forEach(([key, val]) => { + if (val === null || typeof val === 'undefined') return + + if (Array.isArray(val)) val.forEach((v, i) => convertPart(`${key}${skipIndex ? '' : `[${i}]`}`, v)) + else convertPart(key, val) + }) + + return parts.join('&') +} + +/** + * Handle error from try catch + * + * @export + * @param {any} error + * @returns {string} + */ +export function handleErrorMessage(error: any): string { + let errorMessage = '' + + if (error.message) { + errorMessage += error.message + '. ' + } + + if (error.response && error.response.data) { + if (error.response.data.error) { + if (typeof error.response.data.error === 'object') errorMessage += JSON.stringify(error.response.data.error) + '. ' + else if (typeof error.response.data.error === 'string') errorMessage += error.response.data.error + '. ' + } else if (error.response.data.msg) errorMessage += error.response.data.msg + '. ' + else if (error.response.data.Message) errorMessage += error.response.data.Message + '. ' + else if (typeof error.response.data === 'string') errorMessage += error.response.data + '. ' + } + + if (!errorMessage) errorMessage = 'Unexpected Error.' + + return errorMessage +} + +/** + * Returns the path of node modules package + * @param {string} packageName + * @returns {string} + */ +export const getNodeModulesPackagePath = (packageName: string): string => { + const checkPaths = [ + path.join(__dirname, '..', 'node_modules', packageName), + path.join(__dirname, '..', '..', 'node_modules', packageName), + path.join(__dirname, '..', '..', '..', 'node_modules', packageName), + path.join(__dirname, '..', '..', '..', '..', 'node_modules', packageName), + path.join(__dirname, '..', '..', '..', '..', '..', 'node_modules', packageName) + ] + for (const checkPath of checkPaths) { + if (fs.existsSync(checkPath)) { + return checkPath + } + } + return '' +} + +/** + * Get input variables + * @param {string} paramValue + * @returns {boolean} + */ +export const getInputVariables = (paramValue: string): string[] => { + let returnVal = paramValue + const variableStack = [] + const inputVariables = [] + let startIdx = 0 + const endIdx = returnVal.length + + while (startIdx < endIdx) { + const substr = returnVal.substring(startIdx, startIdx + 1) + + // Store the opening double curly bracket + if (substr === '{') { + variableStack.push({ substr, startIdx: startIdx + 1 }) + } + + // Found the complete variable + if (substr === '}' && variableStack.length > 0 && variableStack[variableStack.length - 1].substr === '{') { + const variableStartIdx = variableStack[variableStack.length - 1].startIdx + const variableEndIdx = startIdx + const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx) + inputVariables.push(variableFullPath) + variableStack.pop() + } + startIdx += 1 + } + return inputVariables +} + +/** + * Crawl all available urls given a domain url and limit + * @param {string} url + * @param {number} limit + * @returns {string[]} + */ +export const getAvailableURLs = async (url: string, limit: number) => { + try { + const availableUrls: string[] = [] + + console.info(`Crawling: ${url}`) + availableUrls.push(url) + + const response = await axios.get(url) + const $ = load(response.data) + + const relativeLinks = $("a[href^='/']") + console.info(`Available Relative Links: ${relativeLinks.length}`) + if (relativeLinks.length === 0) return availableUrls + + limit = Math.min(limit + 1, relativeLinks.length) // limit + 1 is because index start from 0 and index 0 is occupy by url + console.info(`True Limit: ${limit}`) + + // availableUrls.length cannot exceed limit + for (let i = 0; availableUrls.length < limit; i++) { + if (i === limit) break // some links are repetitive so it won't added into the array which cause the length to be lesser + console.info(`index: ${i}`) + const element = relativeLinks[i] + + const relativeUrl = $(element).attr('href') + if (!relativeUrl) continue + + const absoluteUrl = new URL(relativeUrl, url).toString() + if (!availableUrls.includes(absoluteUrl)) { + availableUrls.push(absoluteUrl) + console.info(`Found unique relative link: ${absoluteUrl}`) + } + } + + return availableUrls + } catch (err) { + throw new Error(`getAvailableURLs: ${err?.message}`) + } +} + +/** + * Custom chain handler class + */ +export class CustomChainHandler extends BaseCallbackHandler { + name = 'custom_chain_handler' + isLLMStarted = false + socketIO: Server + socketIOClientId = '' + skipK = 0 // Skip streaming for first K numbers of handleLLMStart + returnSourceDocuments = false + + constructor(socketIO: Server, socketIOClientId: string, skipK?: number, returnSourceDocuments?: boolean) { + super() + this.socketIO = socketIO + this.socketIOClientId = socketIOClientId + this.skipK = skipK ?? this.skipK + this.returnSourceDocuments = returnSourceDocuments ?? this.returnSourceDocuments + } + + handleLLMStart() { + if (this.skipK > 0) this.skipK -= 1 + } + + handleLLMNewToken(token: string) { + if (this.skipK === 0) { + if (!this.isLLMStarted) { + this.isLLMStarted = true + this.socketIO.to(this.socketIOClientId).emit('start', token) + } + this.socketIO.to(this.socketIOClientId).emit('token', token) + } + } + + handleLLMEnd() { + this.socketIO.to(this.socketIOClientId).emit('end') + } + + handleChainEnd(outputs: ChainValues): void | Promise { + if (this.returnSourceDocuments) { + this.socketIO.to(this.socketIOClientId).emit('sourceDocuments', outputs?.sourceDocuments) + } + } +} + +export const returnJSONStr = (jsonStr: string): string => { + let jsonStrArray = jsonStr.split(':') + + let wholeString = '' + for (let i = 0; i < jsonStrArray.length; i++) { + if (jsonStrArray[i].includes(',') && jsonStrArray[i + 1] !== undefined) { + const splitValueAndTitle = jsonStrArray[i].split(',') + const value = splitValueAndTitle[0] + const newTitle = splitValueAndTitle[1] + wholeString += handleEscapeDoubleQuote(value) + ',' + newTitle + ':' + } else { + wholeString += wholeString === '' ? jsonStrArray[i] + ':' : handleEscapeDoubleQuote(jsonStrArray[i]) + } + } + return wholeString +} + +const handleEscapeDoubleQuote = (value: string): string => { + let newValue = '' + if (value.includes('"')) { + const valueArray = value.split('"') + for (let i = 0; i < valueArray.length; i++) { + if ((i + 1) % 2 !== 0) { + switch (valueArray[i]) { + case '': + newValue += '"' + break + case '}': + newValue += '"}' + break + default: + newValue += '\\"' + valueArray[i] + '\\"' + } + } else { + newValue += valueArray[i] + } + } + } + return newValue === '' ? value : newValue +} diff --git a/packages/components/tsconfig.json b/packages/components/tsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..2002d62f76dd139ab852340cb2c35150cb82290e --- /dev/null +++ b/packages/components/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "lib": ["ES2020"], + "experimentalDecorators": true /* Enable experimental support for TC39 stage 2 draft decorators. */, + "emitDecoratorMetadata": true /* Emit design-type metadata for decorated declarations in source files. */, + "target": "ES2020", // or higher + "outDir": "./dist/", + "resolveJsonModule": true, + "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */, + "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, + "strict": true /* Enable all strict type-checking options. */, + "skipLibCheck": true /* Skip type checking all .d.ts files. */, + "sourceMap": true, + "strictPropertyInitialization": false, + "useUnknownInCatchVariables": false, + "declaration": true, + "module": "commonjs" + }, + "include": ["src", "nodes"] +} diff --git a/packages/server/.env.example b/packages/server/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..fd82c096c3afc15dfca7ca7a45ea0c2f03e4b415 --- /dev/null +++ b/packages/server/.env.example @@ -0,0 +1,4 @@ +PORT=3000 +# FLOWISE_USERNAME=user +# FLOWISE_PASSWORD=1234 +# EXECUTION_MODE=child or main \ No newline at end of file diff --git a/packages/server/README.md b/packages/server/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2cdf41d1cf95102ad5915b58418467db75fd1aea --- /dev/null +++ b/packages/server/README.md @@ -0,0 +1,54 @@ + + +# Flowise - LangchainJS UI + +![Flowise](https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.gif?raw=true) + +Drag & drop UI to build your customized LLM flow using [LangchainJS](https://github.com/hwchase17/langchainjs) + +## ⚡Quick Start + +1. Install Flowise + ```bash + npm install -g flowise + ``` +2. Start Flowise + + ```bash + npx flowise start + ``` + +3. Open [http://localhost:3000](http://localhost:3000) + +## 🔒 Authentication + +To enable app level authentication, add `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `.env` file: + +``` +FLOWISE_USERNAME=user +FLOWISE_PASSWORD=1234 +``` + +## 📖 Documentation + +Coming Soon + +## 💻 Cloud Hosted + +Coming Soon + +## 🌐 Self Host + +Coming Soon + +## 🙋 Support + +Feel free to ask any questions, raise problems, and request new features in [discussion](https://github.com/FlowiseAI/Flowise/discussions) + +## 🙌 Contributing + +See [contributing guide](https://github.com/FlowiseAI/Flowise/blob/master/CONTRIBUTING.md). Reach out to us at [Discord](https://discord.gg/jbaHfsRVBW) if you have any questions or issues. + +## 📄 License + +Source code in this repository is made available under the [MIT License](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/server/babel.config.js b/packages/server/babel.config.js new file mode 100644 index 0000000000000000000000000000000000000000..f966f26db2c11f82c89ddd55986afeabc7ad7fac --- /dev/null +++ b/packages/server/babel.config.js @@ -0,0 +1,3 @@ +module.exports = { + extends: '../../babel.config.js' +} diff --git a/packages/server/bin/dev b/packages/server/bin/dev new file mode 100755 index 0000000000000000000000000000000000000000..d5b53266563a8f6779abd7bb9f7017cbd1c2f727 --- /dev/null +++ b/packages/server/bin/dev @@ -0,0 +1,17 @@ +#!/usr/bin/env node + +const oclif = require('@oclif/core') + +const path = require('path') +const project = path.join(__dirname, '..', 'tsconfig.json') + +// In dev mode -> use ts-node and dev plugins +process.env.NODE_ENV = 'development' + +require('ts-node').register({ project }) + +// In dev mode, always show stack traces +oclif.settings.debug = true + +// Start the CLI +oclif.run().then(oclif.flush).catch(oclif.Errors.handle) diff --git a/packages/server/bin/dev.cmd b/packages/server/bin/dev.cmd new file mode 100755 index 0000000000000000000000000000000000000000..077b57ae7528beb564a148508f4e12d916c613e9 --- /dev/null +++ b/packages/server/bin/dev.cmd @@ -0,0 +1,3 @@ +@echo off + +node "%~dp0\dev" %* \ No newline at end of file diff --git a/packages/server/bin/run b/packages/server/bin/run new file mode 100755 index 0000000000000000000000000000000000000000..a7635de86ed11c42df7e2c4ffe43ced29ca0a6db --- /dev/null +++ b/packages/server/bin/run @@ -0,0 +1,5 @@ +#!/usr/bin/env node + +const oclif = require('@oclif/core') + +oclif.run().then(require('@oclif/core/flush')).catch(require('@oclif/core/handle')) diff --git a/packages/server/bin/run.cmd b/packages/server/bin/run.cmd new file mode 100755 index 0000000000000000000000000000000000000000..cf40b543c96644d4de91223fd6004a57f9609169 --- /dev/null +++ b/packages/server/bin/run.cmd @@ -0,0 +1,3 @@ +@echo off + +node "%~dp0\run" %* \ No newline at end of file diff --git a/packages/server/marketplaces/API Agent.json b/packages/server/marketplaces/API Agent.json new file mode 100644 index 0000000000000000000000000000000000000000..20e270af261ff7957efdd597029755fb541de64c --- /dev/null +++ b/packages/server/marketplaces/API Agent.json @@ -0,0 +1,973 @@ +{ + "description": "Given API docs, agent automatically decide which API to call, generating url and body request from conversation", + "nodes": [ + { + "width": 300, + "height": 459, + "id": "getApiChain_0", + "position": { + "x": 1222.6923202234623, + "y": 359.97676456347756 + }, + "type": "customNode", + "data": { + "id": "getApiChain_0", + "label": "GET API Chain", + "name": "getApiChain", + "type": "GETApiChain", + "baseClasses": ["GETApiChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Chain to run queries against GET API", + "inputParams": [ + { + "label": "API Documentation", + "name": "apiDocs", + "type": "string", + "description": "Description of how API works. Please refer to more examples", + "rows": 4, + "id": "getApiChain_0-input-apiDocs-string" + }, + { + "label": "Headers", + "name": "headers", + "type": "json", + "additionalParams": true, + "optional": true, + "id": "getApiChain_0-input-headers-json" + }, + { + "label": "URL Prompt", + "name": "urlPrompt", + "type": "string", + "description": "Prompt used to tell LLMs how to construct the URL. Must contains {api_docs} and {question}", + "default": "You are given the below API Documentation:\n{api_docs}\nUsing this documentation, generate the full API url to call for answering the user question.\nYou should build the API url in order to get a response that is as short as possible, while still getting the necessary information to answer the question. Pay attention to deliberately exclude any unnecessary pieces of data in the API call.\n\nQuestion:{question}\nAPI url:", + "rows": 4, + "additionalParams": true, + "id": "getApiChain_0-input-urlPrompt-string" + }, + { + "label": "Answer Prompt", + "name": "ansPrompt", + "type": "string", + "description": "Prompt used to tell LLMs how to return the API response. Must contains {api_response}, {api_url}, and {question}", + "default": "Given this {api_response} response for {api_url}. use the given response to answer this {question}", + "rows": 4, + "additionalParams": true, + "id": "getApiChain_0-input-ansPrompt-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "getApiChain_0-input-model-BaseLanguageModel" + } + ], + "inputs": { + "model": "{{chatOpenAI_1.data.instance}}", + "apiDocs": "BASE URL: https://api.open-meteo.com/\n\nAPI Documentation\nThe API endpoint /v1/forecast accepts a geographical coordinate, a list of weather variables and responds with a JSON hourly weather forecast for 7 days. Time always starts at 0:00 today and contains 168 hours. All URL parameters are listed below:\n\nParameter\tFormat\tRequired\tDefault\tDescription\nlatitude, longitude\tFloating point\tYes\t\tGeographical WGS84 coordinate of the location\nhourly\tString array\tNo\t\tA list of weather variables which should be returned. Values can be comma separated, or multiple &hourly= parameter in the URL can be used.\ndaily\tString array\tNo\t\tA list of daily weather variable aggregations which should be returned. Values can be comma separated, or multiple &daily= parameter in the URL can be used. If daily weather variables are specified, parameter timezone is required.\ncurrent_weather\tBool\tNo\tfalse\tInclude current weather conditions in the JSON output.\ntemperature_unit\tString\tNo\tcelsius\tIf fahrenheit is set, all temperature values are converted to Fahrenheit.\nwindspeed_unit\tString\tNo\tkmh\tOther wind speed speed units: ms, mph and kn\nprecipitation_unit\tString\tNo\tmm\tOther precipitation amount units: inch\ntimeformat\tString\tNo\tiso8601\tIf format unixtime is selected, all time values are returned in UNIX epoch time in seconds. Please note that all timestamp are in GMT+0! For daily values with unix timestamps, please apply utc_offset_seconds again to get the correct date.\ntimezone\tString\tNo\tGMT\tIf timezone is set, all timestamps are returned as local-time and data is returned starting at 00:00 local-time. Any time zone name from the time zone database is supported. If auto is set as a time zone, the coordinates will be automatically resolved to the local time zone.\npast_days\tInteger (0-2)\tNo\t0\tIf past_days is set, yesterday or the day before yesterday data are also returned.\nstart_date\nend_date\tString (yyyy-mm-dd)\tNo\t\tThe time interval to get weather data. A day must be specified as an ISO8601 date (e.g. 2022-06-30).\nmodels\tString array\tNo\tauto\tManually select one or more weather models. Per default, the best suitable weather models will be combined.\n\nHourly Parameter Definition\nThe parameter &hourly= accepts the following values. Most weather variables are given as an instantaneous value for the indicated hour. Some variables like precipitation are calculated from the preceding hour as an average or sum.\n\nVariable\tValid time\tUnit\tDescription\ntemperature_2m\tInstant\t°C (°F)\tAir temperature at 2 meters above ground\nsnowfall\tPreceding hour sum\tcm (inch)\tSnowfall amount of the preceding hour in centimeters. For the water equivalent in millimeter, divide by 7. E.g. 7 cm snow = 10 mm precipitation water equivalent\nrain\tPreceding hour sum\tmm (inch)\tRain from large scale weather systems of the preceding hour in millimeter\nshowers\tPreceding hour sum\tmm (inch)\tShowers from convective precipitation in millimeters from the preceding hour\nweathercode\tInstant\tWMO code\tWeather condition as a numeric code. Follow WMO weather interpretation codes. See table below for details.\nsnow_depth\tInstant\tmeters\tSnow depth on the ground\nfreezinglevel_height\tInstant\tmeters\tAltitude above sea level of the 0°C level\nvisibility\tInstant\tmeters\tViewing distance in meters. Influenced by low clouds, humidity and aerosols. Maximum visibility is approximately 24 km.", + "headers": "", + "urlPrompt": "You are given the below API Documentation:\n{api_docs}\nUsing this documentation, generate the full API url to call for answering the user question.\nYou should build the API url in order to get a response that is as short as possible, while still getting the necessary information to answer the question. Pay attention to deliberately exclude any unnecessary pieces of data in the API call.\n\nQuestion:{question}\nAPI url:", + "ansPrompt": "Given this {api_response} response for {api_url}. use the given response to answer this {question}" + }, + "outputAnchors": [ + { + "id": "getApiChain_0-output-getApiChain-GETApiChain|BaseChain|BaseLangChain", + "name": "getApiChain", + "label": "GETApiChain", + "type": "GETApiChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1222.6923202234623, + "y": 359.97676456347756 + }, + "dragging": false + }, + { + "width": 300, + "height": 383, + "id": "conversationalAgent_0", + "position": { + "x": 1993.8540808923876, + "y": 952.6297081192247 + }, + "type": "customNode", + "data": { + "id": "conversationalAgent_0", + "label": "Conversational Agent", + "name": "conversationalAgent", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], + "category": "Agents", + "description": "Conversational agent for a chat model. It will utilize chat specific prompts", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessage", + "type": "string", + "rows": 4, + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-systemMessage-string" + }, + { + "label": "Human Message", + "name": "humanMessage", + "type": "string", + "rows": 4, + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-humanMessage-string" + } + ], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "conversationalAgent_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "conversationalAgent_0-input-model-BaseLanguageModel" + }, + { + "label": "Memory", + "name": "memory", + "type": "BaseChatMemory", + "id": "conversationalAgent_0-input-memory-BaseChatMemory" + } + ], + "inputs": { + "tools": ["{{chainTool_0.data.instance}}", "{{chainTool_1.data.instance}}"], + "model": "{{chatOpenAI_0.data.instance}}", + "memory": "{{bufferMemory_0.data.instance}}", + "systemMessage": "", + "humanMessage": "" + }, + "outputAnchors": [ + { + "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|BaseLangChain", + "name": "conversationalAgent", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1993.8540808923876, + "y": 952.6297081192247 + }, + "dragging": false + }, + { + "width": 300, + "height": 602, + "id": "chainTool_0", + "position": { + "x": 1600.1485877701232, + "y": 276.38970893436533 + }, + "type": "customNode", + "data": { + "id": "chainTool_0", + "label": "Chain Tool", + "name": "chainTool", + "type": "ChainTool", + "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Use a chain as allowed tool for agent", + "inputParams": [ + { + "label": "Chain Name", + "name": "name", + "type": "string", + "placeholder": "state-of-union-qa", + "id": "chainTool_0-input-name-string" + }, + { + "label": "Chain Description", + "name": "description", + "type": "string", + "rows": 3, + "placeholder": "State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.", + "id": "chainTool_0-input-description-string" + }, + { + "label": "Return Direct", + "name": "returnDirect", + "type": "boolean", + "optional": true, + "id": "chainTool_0-input-returnDirect-boolean" + } + ], + "inputAnchors": [ + { + "label": "Base Chain", + "name": "baseChain", + "type": "BaseChain", + "id": "chainTool_0-input-baseChain-BaseChain" + } + ], + "inputs": { + "name": "weather-qa", + "description": "useful for when you need to ask question about weather", + "returnDirect": "", + "baseChain": "{{getApiChain_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "chainTool_0-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", + "name": "chainTool", + "label": "ChainTool", + "type": "ChainTool | DynamicTool | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1600.1485877701232, + "y": 276.38970893436533 + }, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "chatOpenAI_0", + "position": { + "x": 1270.7548070814019, + "y": 1565.864417576483 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1270.7548070814019, + "y": 1565.864417576483 + }, + "dragging": false + }, + { + "width": 300, + "height": 376, + "id": "bufferMemory_0", + "position": { + "x": 1642.0644080121785, + "y": 1715.6131926891728 + }, + "type": "customNode", + "data": { + "id": "bufferMemory_0", + "label": "Buffer Memory", + "name": "bufferMemory", + "type": "BufferMemory", + "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], + "category": "Memory", + "description": "Remembers previous conversational back and forths directly", + "inputParams": [ + { + "label": "Memory Key", + "name": "memoryKey", + "type": "string", + "default": "chat_history", + "id": "bufferMemory_0-input-memoryKey-string" + }, + { + "label": "Input Key", + "name": "inputKey", + "type": "string", + "default": "input", + "id": "bufferMemory_0-input-inputKey-string" + } + ], + "inputAnchors": [], + "inputs": { + "memoryKey": "chat_history", + "inputKey": "input" + }, + "outputAnchors": [ + { + "id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "name": "bufferMemory", + "label": "BufferMemory", + "type": "BufferMemory | BaseChatMemory | BaseMemory" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1642.0644080121785, + "y": 1715.6131926891728 + }, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "chatOpenAI_1", + "position": { + "x": 865.4424095725009, + "y": 350.7505181391267 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_1", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 865.4424095725009, + "y": 350.7505181391267 + }, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "chatOpenAI_2", + "position": { + "x": 587.6425146349426, + "y": 917.1494176892741 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_2", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_2-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_2-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_2-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 587.6425146349426, + "y": 917.1494176892741 + }, + "dragging": false + }, + { + "width": 300, + "height": 602, + "id": "chainTool_1", + "position": { + "x": 1284.7746596034926, + "y": 895.1444797047182 + }, + "type": "customNode", + "data": { + "id": "chainTool_1", + "label": "Chain Tool", + "name": "chainTool", + "type": "ChainTool", + "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Use a chain as allowed tool for agent", + "inputParams": [ + { + "label": "Chain Name", + "name": "name", + "type": "string", + "placeholder": "state-of-union-qa", + "id": "chainTool_1-input-name-string" + }, + { + "label": "Chain Description", + "name": "description", + "type": "string", + "rows": 3, + "placeholder": "State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.", + "id": "chainTool_1-input-description-string" + }, + { + "label": "Return Direct", + "name": "returnDirect", + "type": "boolean", + "optional": true, + "id": "chainTool_1-input-returnDirect-boolean" + } + ], + "inputAnchors": [ + { + "label": "Base Chain", + "name": "baseChain", + "type": "BaseChain", + "id": "chainTool_1-input-baseChain-BaseChain" + } + ], + "inputs": { + "name": "discord-bot", + "description": "useful for when you need to send message to Discord", + "returnDirect": "", + "baseChain": "{{postApiChain_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "chainTool_1-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", + "name": "chainTool", + "label": "ChainTool", + "type": "ChainTool | DynamicTool | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1284.7746596034926, + "y": 895.1444797047182 + }, + "dragging": false + }, + { + "width": 300, + "height": 459, + "id": "postApiChain_0", + "position": { + "x": 933.3631140153886, + "y": 974.8756002461283 + }, + "type": "customNode", + "data": { + "id": "postApiChain_0", + "label": "POST API Chain", + "name": "postApiChain", + "type": "POSTApiChain", + "baseClasses": ["POSTApiChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Chain to run queries against POST API", + "inputParams": [ + { + "label": "API Documentation", + "name": "apiDocs", + "type": "string", + "description": "Description of how API works. Please refer to more examples", + "rows": 4, + "id": "postApiChain_0-input-apiDocs-string" + }, + { + "label": "Headers", + "name": "headers", + "type": "json", + "additionalParams": true, + "optional": true, + "id": "postApiChain_0-input-headers-json" + }, + { + "label": "URL Prompt", + "name": "urlPrompt", + "type": "string", + "description": "Prompt used to tell LLMs how to construct the URL. Must contains {api_docs} and {question}", + "default": "You are given the below API Documentation:\n{api_docs}\nUsing this documentation, generate a json string with two keys: \"url\" and \"data\".\nThe value of \"url\" should be a string, which is the API url to call for answering the user question.\nThe value of \"data\" should be a dictionary of key-value pairs you want to POST to the url as a JSON body.\nBe careful to always use double quotes for strings in the json string.\nYou should build the json string in order to get a response that is as short as possible, while still getting the necessary information to answer the question. Pay attention to deliberately exclude any unnecessary pieces of data in the API call.\n\nQuestion:{question}\njson string:", + "rows": 4, + "additionalParams": true, + "id": "postApiChain_0-input-urlPrompt-string" + }, + { + "label": "Answer Prompt", + "name": "ansPrompt", + "type": "string", + "description": "Prompt used to tell LLMs how to return the API response. Must contains {api_response}, {api_url}, and {question}", + "default": "You are given the below API Documentation:\n{api_docs}\nUsing this documentation, generate a json string with two keys: \"url\" and \"data\".\nThe value of \"url\" should be a string, which is the API url to call for answering the user question.\nThe value of \"data\" should be a dictionary of key-value pairs you want to POST to the url as a JSON body.\nBe careful to always use double quotes for strings in the json string.\nYou should build the json string in order to get a response that is as short as possible, while still getting the necessary information to answer the question. Pay attention to deliberately exclude any unnecessary pieces of data in the API call.\n\nQuestion:{question}\njson string: {api_url_body}\n\nHere is the response from the API:\n\n{api_response}\n\nSummarize this response to answer the original question.\n\nSummary:", + "rows": 4, + "additionalParams": true, + "id": "postApiChain_0-input-ansPrompt-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "postApiChain_0-input-model-BaseLanguageModel" + } + ], + "inputs": { + "model": "{{chatOpenAI_2.data.instance}}", + "apiDocs": "API documentation:\nEndpoint: https://eog776prcv6dg0j.m.pipedream.net\n\nThis API is for sending Discord message\n\nQuery body table:\nmessage | string | Message to send | required\n\nResponse schema (string):\nresult | string", + "headers": "", + "urlPrompt": "You are given the below API Documentation:\n{api_docs}\nUsing this documentation, generate a json string with two keys: \"url\" and \"data\".\nThe value of \"url\" should be a string, which is the API url to call for answering the user question.\nThe value of \"data\" should be a dictionary of key-value pairs you want to POST to the url as a JSON body.\nBe careful to always use double quotes for strings in the json string.\nYou should build the json string in order to get a response that is as short as possible, while still getting the necessary information to answer the question. Pay attention to deliberately exclude any unnecessary pieces of data in the API call.\n\nQuestion:{question}\njson string:", + "ansPrompt": "You are given the below API Documentation:\n{api_docs}\nUsing this documentation, generate a json string with two keys: \"url\" and \"data\".\nThe value of \"url\" should be a string, which is the API url to call for answering the user question.\nThe value of \"data\" should be a dictionary of key-value pairs you want to POST to the url as a JSON body.\nBe careful to always use double quotes for strings in the json string.\nYou should build the json string in order to get a response that is as short as possible, while still getting the necessary information to answer the question. Pay attention to deliberately exclude any unnecessary pieces of data in the API call.\n\nQuestion:{question}\njson string: {api_url_body}\n\nHere is the response from the API:\n\n{api_response}\n\nSummarize this response to answer the original question.\n\nSummary:" + }, + "outputAnchors": [ + { + "id": "postApiChain_0-output-postApiChain-POSTApiChain|BaseChain|BaseLangChain", + "name": "postApiChain", + "label": "POSTApiChain", + "type": "POSTApiChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 933.3631140153886, + "y": 974.8756002461283 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "getApiChain_0", + "sourceHandle": "getApiChain_0-output-getApiChain-GETApiChain|BaseChain|BaseLangChain", + "target": "chainTool_0", + "targetHandle": "chainTool_0-input-baseChain-BaseChain", + "type": "buttonedge", + "id": "getApiChain_0-getApiChain_0-output-getApiChain-GETApiChain|BaseChain|BaseLangChain-chainTool_0-chainTool_0-input-baseChain-BaseChain", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "bufferMemory_0", + "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory", + "type": "buttonedge", + "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "getApiChain_0", + "targetHandle": "getApiChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-getApiChain_0-getApiChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_2", + "sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "postApiChain_0", + "targetHandle": "postApiChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-postApiChain_0-postApiChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "postApiChain_0", + "sourceHandle": "postApiChain_0-output-postApiChain-POSTApiChain|BaseChain|BaseLangChain", + "target": "chainTool_1", + "targetHandle": "chainTool_1-input-baseChain-BaseChain", + "type": "buttonedge", + "id": "postApiChain_0-postApiChain_0-output-postApiChain-POSTApiChain|BaseChain|BaseLangChain-chainTool_1-chainTool_1-input-baseChain-BaseChain", + "data": { + "label": "" + } + }, + { + "source": "chainTool_0", + "sourceHandle": "chainTool_0-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-tools-Tool", + "type": "buttonedge", + "id": "chainTool_0-chainTool_0-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "chainTool_1", + "sourceHandle": "chainTool_1-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-tools-Tool", + "type": "buttonedge", + "id": "chainTool_1-chainTool_1-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Antonym.json b/packages/server/marketplaces/Antonym.json new file mode 100644 index 0000000000000000000000000000000000000000..2e21fd221e3a04e9516691a755a434e12c07dfd4 --- /dev/null +++ b/packages/server/marketplaces/Antonym.json @@ -0,0 +1,435 @@ +{ + "description": "Output antonym of given user input using few-shot prompt template built with examples", + "nodes": [ + { + "width": 300, + "height": 534, + "id": "promptTemplate_1", + "position": { + "x": 532.2791692529131, + "y": -31.128527027841372 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_1", + "label": "Prompt Template", + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_1-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "string", + "rows": 4, + "placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_1-input-promptValues-string" + } + ], + "inputAnchors": [], + "inputs": { + "template": "Word: {word}\\nAntonym: {antonym}\\n", + "promptValues": "" + }, + "outputAnchors": [ + { + "id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 532.2791692529131, + "y": -31.128527027841372 + }, + "dragging": false + }, + { + "width": 300, + "height": 956, + "id": "fewShotPromptTemplate_1", + "position": { + "x": 886.3229032369354, + "y": -32.18537399495787 + }, + "type": "customNode", + "data": { + "id": "fewShotPromptTemplate_1", + "label": "Few Shot Prompt Template", + "name": "fewShotPromptTemplate", + "type": "FewShotPromptTemplate", + "baseClasses": ["FewShotPromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Prompt template you can build with examples", + "inputParams": [ + { + "label": "Examples", + "name": "examples", + "type": "string", + "rows": 4, + "placeholder": "[\n { \"word\": \"happy\", \"antonym\": \"sad\" },\n { \"word\": \"tall\", \"antonym\": \"short\" },\n]", + "id": "fewShotPromptTemplate_1-input-examples-string" + }, + { + "label": "Prefix", + "name": "prefix", + "type": "string", + "rows": 4, + "placeholder": "Give the antonym of every input", + "id": "fewShotPromptTemplate_1-input-prefix-string" + }, + { + "label": "Suffix", + "name": "suffix", + "type": "string", + "rows": 4, + "placeholder": "Word: {input}\nAntonym:", + "id": "fewShotPromptTemplate_1-input-suffix-string" + }, + { + "label": "Example Seperator", + "name": "exampleSeparator", + "type": "string", + "placeholder": "\n\n", + "id": "fewShotPromptTemplate_1-input-exampleSeparator-string" + }, + { + "label": "Template Format", + "name": "templateFormat", + "type": "options", + "options": [ + { + "label": "f-string", + "name": "f-string" + }, + { + "label": "jinja-2", + "name": "jinja-2" + } + ], + "default": "f-string", + "id": "fewShotPromptTemplate_1-input-templateFormat-options" + } + ], + "inputAnchors": [ + { + "label": "Example Prompt", + "name": "examplePrompt", + "type": "PromptTemplate", + "id": "fewShotPromptTemplate_1-input-examplePrompt-PromptTemplate" + } + ], + "inputs": { + "examples": "[\n { \"word\": \"happy\", \"antonym\": \"sad\" },\n { \"word\": \"tall\", \"antonym\": \"short\" }\n]", + "examplePrompt": "{{promptTemplate_1.data.instance}}", + "prefix": "Give the antonym of every input", + "suffix": "Word: {input}\\nAntonym:", + "exampleSeparator": "\\n\\n", + "templateFormat": "f-string" + }, + "outputAnchors": [ + { + "id": "fewShotPromptTemplate_1-output-fewShotPromptTemplate-FewShotPromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "fewShotPromptTemplate", + "label": "FewShotPromptTemplate", + "type": "FewShotPromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 886.3229032369354, + "y": -32.18537399495787 + }, + "dragging": false + }, + { + "width": 300, + "height": 526, + "id": "openAI_1", + "position": { + "x": 1224.5139327142097, + "y": -30.864315286062364 + }, + "type": "customNode", + "data": { + "id": "openAI_1", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1224.5139327142097, + "y": -30.864315286062364 + }, + "dragging": false + }, + { + "width": 300, + "height": 407, + "id": "llmChain_1", + "position": { + "x": 1635.363191180743, + "y": 450.00105475193766 + }, + "type": "customNode", + "data": { + "id": "llmChain_1", + "label": "LLM Chain", + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_1-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_1-input-prompt-BasePromptTemplate" + } + ], + "inputs": { + "model": "{{openAI_1.data.instance}}", + "prompt": "{{fewShotPromptTemplate_1.data.instance}}", + "chainName": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|BaseLangChain", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | BaseLangChain" + }, + { + "id": "llmChain_1-output-outputPrediction-string", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "positionAbsolute": { + "x": 1635.363191180743, + "y": 450.00105475193766 + }, + "selected": false, + "dragging": false + } + ], + "edges": [ + { + "source": "promptTemplate_1", + "sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "fewShotPromptTemplate_1", + "targetHandle": "fewShotPromptTemplate_1-input-examplePrompt-PromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-fewShotPromptTemplate_1-fewShotPromptTemplate_1-input-examplePrompt-PromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "openAI_1", + "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "fewShotPromptTemplate_1", + "sourceHandle": "fewShotPromptTemplate_1-output-fewShotPromptTemplate-FewShotPromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "fewShotPromptTemplate_1-fewShotPromptTemplate_1-output-fewShotPromptTemplate-FewShotPromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/AutoGPT.json b/packages/server/marketplaces/AutoGPT.json new file mode 100644 index 0000000000000000000000000000000000000000..4fd1cfdb06800beaa949e82c41481abc7d42d1aa --- /dev/null +++ b/packages/server/marketplaces/AutoGPT.json @@ -0,0 +1,638 @@ +{ + "description": "Use AutoGPT - Autonomous agent with chain of thoughts for self-guided task completion", + "nodes": [ + { + "width": 300, + "height": 629, + "id": "autoGPT_0", + "position": { + "x": 1627.8124366169843, + "y": 129.76619452400155 + }, + "type": "customNode", + "data": { + "id": "autoGPT_0", + "label": "AutoGPT", + "name": "autoGPT", + "type": "AutoGPT", + "baseClasses": ["AutoGPT"], + "category": "Agents", + "description": "Autonomous agent with chain of thoughts by GPT4", + "inputParams": [ + { + "label": "AutoGPT Name", + "name": "aiName", + "type": "string", + "placeholder": "Tom", + "optional": true, + "id": "autoGPT_0-input-aiName-string" + }, + { + "label": "AutoGPT Role", + "name": "aiRole", + "type": "string", + "placeholder": "Assistant", + "optional": true, + "id": "autoGPT_0-input-aiRole-string" + }, + { + "label": "Maximum Loop", + "name": "maxLoop", + "type": "number", + "default": 5, + "optional": true, + "id": "autoGPT_0-input-maxLoop-number" + } + ], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "autoGPT_0-input-tools-Tool" + }, + { + "label": "Chat Model", + "name": "model", + "type": "BaseChatModel", + "id": "autoGPT_0-input-model-BaseChatModel" + }, + { + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "BaseRetriever", + "id": "autoGPT_0-input-vectorStoreRetriever-BaseRetriever" + } + ], + "inputs": { + "tools": ["{{readFile_0.data.instance}}", "{{writeFile_1.data.instance}}", "{{serpAPI_0.data.instance}}"], + "model": "{{chatOpenAI_1.data.instance}}", + "vectorStoreRetriever": "{{pineconeExistingIndex_1.data.instance}}", + "aiName": "", + "aiRole": "", + "maxLoop": 5 + }, + "outputAnchors": [ + { + "id": "autoGPT_0-output-autoGPT-AutoGPT", + "name": "autoGPT", + "label": "AutoGPT", + "type": "AutoGPT" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1627.8124366169843, + "y": 129.76619452400155 + }, + "dragging": false + }, + { + "width": 300, + "height": 526, + "id": "chatOpenAI_1", + "position": { + "x": 168.57515834535457, + "y": -90.74139976987627 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_1", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": "0", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 168.57515834535457, + "y": -90.74139976987627 + }, + "dragging": false + }, + { + "width": 300, + "height": 279, + "id": "writeFile_1", + "position": { + "x": 546.3440710182241, + "y": 55.28691941459434 + }, + "type": "customNode", + "data": { + "id": "writeFile_1", + "label": "Write File", + "name": "writeFile", + "type": "WriteFile", + "baseClasses": ["WriteFile", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Write file to disk", + "inputParams": [ + { + "label": "Base Path", + "name": "basePath", + "placeholder": "C:\\Users\\User\\Desktop", + "type": "string", + "optional": true, + "id": "writeFile_1-input-basePath-string" + } + ], + "inputAnchors": [], + "inputs": { + "basePath": "" + }, + "outputAnchors": [ + { + "id": "writeFile_1-output-writeFile-WriteFile|Tool|StructuredTool|BaseLangChain", + "name": "writeFile", + "label": "WriteFile", + "type": "WriteFile | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 546.3440710182241, + "y": 55.28691941459434 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 279, + "id": "readFile_0", + "position": { + "x": 881.2568465391292, + "y": -112.9631005153393 + }, + "type": "customNode", + "data": { + "id": "readFile_0", + "label": "Read File", + "name": "readFile", + "type": "ReadFile", + "baseClasses": ["ReadFile", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Read file from disk", + "inputParams": [ + { + "label": "Base Path", + "name": "basePath", + "placeholder": "C:\\Users\\User\\Desktop", + "type": "string", + "optional": true, + "id": "readFile_0-input-basePath-string" + } + ], + "inputAnchors": [], + "inputs": { + "basePath": "" + }, + "outputAnchors": [ + { + "id": "readFile_0-output-readFile-ReadFile|Tool|StructuredTool|BaseLangChain", + "name": "readFile", + "label": "ReadFile", + "type": "ReadFile | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 881.2568465391292, + "y": -112.9631005153393 + }, + "dragging": false + }, + { + "width": 300, + "height": 279, + "id": "serpAPI_0", + "position": { + "x": 1244.740380161344, + "y": -193.9135818023827 + }, + "type": "customNode", + "data": { + "id": "serpAPI_0", + "label": "Serp API", + "name": "serpAPI", + "type": "SerpAPI", + "baseClasses": ["SerpAPI", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Wrapper around SerpAPI - a real-time API to access Google search results", + "inputParams": [ + { + "label": "Serp Api Key", + "name": "apiKey", + "type": "password", + "id": "serpAPI_0-input-apiKey-password" + } + ], + "inputAnchors": [], + "inputs": {}, + "outputAnchors": [ + { + "id": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain", + "name": "serpAPI", + "label": "SerpAPI", + "type": "SerpAPI | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1244.740380161344, + "y": -193.9135818023827 + }, + "dragging": false + }, + { + "width": 300, + "height": 331, + "id": "openAIEmbeddings_0", + "position": { + "x": 530.4714276286077, + "y": 487.0228196121594 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_0", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_0-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 530.4714276286077, + "y": 487.0228196121594 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 652, + "id": "pineconeExistingIndex_1", + "position": { + "x": 943.1601557586332, + "y": 404.9622062733608 + }, + "type": "customNode", + "data": { + "id": "pineconeExistingIndex_1", + "label": "Pinecone Load Existing Index", + "name": "pineconeExistingIndex", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Load existing index from Pinecone (i.e: Document has been upserted)", + "inputParams": [ + { + "label": "Pinecone Api Key", + "name": "pineconeApiKey", + "type": "password", + "id": "pineconeExistingIndex_1-input-pineconeApiKey-password" + }, + { + "label": "Pinecone Environment", + "name": "pineconeEnv", + "type": "string", + "id": "pineconeExistingIndex_1-input-pineconeEnv-string" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pineconeExistingIndex_1-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "optional": true, + "additionalParams": true, + "id": "pineconeExistingIndex_1-input-pineconeNamespace-string" + }, + { + "label": "Pinecone Metadata Filter", + "name": "pineconeMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "pineconeExistingIndex_1-input-pineconeMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pineconeExistingIndex_1-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pineconeExistingIndex_1-input-embeddings-Embeddings" + } + ], + "inputs": { + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "pineconeEnv": "us-west4-gcp", + "pineconeIndex": "", + "pineconeNamespace": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pineconeExistingIndex_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pineconeExistingIndex_1-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 943.1601557586332, + "y": 404.9622062733608 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "pineconeExistingIndex_1", + "sourceHandle": "pineconeExistingIndex_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "target": "autoGPT_0", + "targetHandle": "autoGPT_0-input-vectorStoreRetriever-BaseRetriever", + "type": "buttonedge", + "id": "pineconeExistingIndex_1-pineconeExistingIndex_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-autoGPT_0-autoGPT_0-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pineconeExistingIndex_1", + "targetHandle": "pineconeExistingIndex_1-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_1-pineconeExistingIndex_1-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "autoGPT_0", + "targetHandle": "autoGPT_0-input-model-BaseChatModel", + "type": "buttonedge", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-autoGPT_0-autoGPT_0-input-model-BaseChatModel", + "data": { + "label": "" + } + }, + { + "source": "writeFile_1", + "sourceHandle": "writeFile_1-output-writeFile-WriteFile|Tool|StructuredTool|BaseLangChain", + "target": "autoGPT_0", + "targetHandle": "autoGPT_0-input-tools-Tool", + "type": "buttonedge", + "id": "writeFile_1-writeFile_1-output-writeFile-WriteFile|Tool|StructuredTool|BaseLangChain-autoGPT_0-autoGPT_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "readFile_0", + "sourceHandle": "readFile_0-output-readFile-ReadFile|Tool|StructuredTool|BaseLangChain", + "target": "autoGPT_0", + "targetHandle": "autoGPT_0-input-tools-Tool", + "type": "buttonedge", + "id": "readFile_0-readFile_0-output-readFile-ReadFile|Tool|StructuredTool|BaseLangChain-autoGPT_0-autoGPT_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "serpAPI_0", + "sourceHandle": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain", + "target": "autoGPT_0", + "targetHandle": "autoGPT_0-input-tools-Tool", + "type": "buttonedge", + "id": "serpAPI_0-serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain-autoGPT_0-autoGPT_0-input-tools-Tool", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/BabyAGI.json b/packages/server/marketplaces/BabyAGI.json new file mode 100644 index 0000000000000000000000000000000000000000..797b574fb21bbc6004a2820fb983d187cc169494 --- /dev/null +++ b/packages/server/marketplaces/BabyAGI.json @@ -0,0 +1,435 @@ +{ + "description": "Use BabyAGI to create tasks and reprioritize for a given objective", + "nodes": [ + { + "width": 300, + "height": 331, + "id": "openAIEmbeddings_1", + "position": { + "x": -84.60344342694289, + "y": -189.6930708050951 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_1", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_1-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": -84.60344342694289, + "y": -189.6930708050951 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 652, + "id": "pineconeExistingIndex_1", + "position": { + "x": 264.729293346415, + "y": -190.36689763560724 + }, + "type": "customNode", + "data": { + "id": "pineconeExistingIndex_1", + "label": "Pinecone Load Existing Index", + "name": "pineconeExistingIndex", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Load existing index from Pinecone (i.e: Document has been upserted)", + "inputParams": [ + { + "label": "Pinecone Api Key", + "name": "pineconeApiKey", + "type": "password", + "id": "pineconeExistingIndex_1-input-pineconeApiKey-password" + }, + { + "label": "Pinecone Environment", + "name": "pineconeEnv", + "type": "string", + "id": "pineconeExistingIndex_1-input-pineconeEnv-string" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pineconeExistingIndex_1-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "optional": true, + "additionalParams": true, + "id": "pineconeExistingIndex_1-input-pineconeNamespace-string" + }, + { + "label": "Pinecone Metadata Filter", + "name": "pineconeMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "pineconeExistingIndex_1-input-pineconeMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pineconeExistingIndex_1-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pineconeExistingIndex_1-input-embeddings-Embeddings" + } + ], + "inputs": { + "embeddings": "{{openAIEmbeddings_1.data.instance}}", + "pineconeEnv": "us-west4-gcp", + "pineconeIndex": "", + "pineconeNamespace": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pineconeExistingIndex_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pineconeExistingIndex_1-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "vectorStore" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 264.729293346415, + "y": -190.36689763560724 + }, + "dragging": false + }, + { + "width": 300, + "height": 526, + "id": "chatOpenAI_1", + "position": { + "x": 590.3367401418911, + "y": -374.0329977259934 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_1", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": "0", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 590.3367401418911, + "y": -374.0329977259934 + } + }, + { + "width": 300, + "height": 380, + "id": "babyAGI_1", + "position": { + "x": 950.8042093214954, + "y": 66.00028106865324 + }, + "type": "customNode", + "data": { + "id": "babyAGI_1", + "label": "BabyAGI", + "name": "babyAGI", + "type": "BabyAGI", + "baseClasses": ["BabyAGI"], + "category": "Agents", + "description": "Task Driven Autonomous Agent which creates new task and reprioritizes task list based on objective", + "inputParams": [ + { + "label": "Task Loop", + "name": "taskLoop", + "type": "number", + "default": 3, + "id": "babyAGI_1-input-taskLoop-number" + } + ], + "inputAnchors": [ + { + "label": "Chat Model", + "name": "model", + "type": "BaseChatModel", + "id": "babyAGI_1-input-model-BaseChatModel" + }, + { + "label": "Vector Store", + "name": "vectorStore", + "type": "VectorStore", + "id": "babyAGI_1-input-vectorStore-VectorStore" + } + ], + "inputs": { + "model": "{{chatOpenAI_1.data.instance}}", + "vectorStore": "{{pineconeExistingIndex_1.data.instance}}", + "taskLoop": 3 + }, + "outputAnchors": [ + { + "id": "babyAGI_1-output-babyAGI-BabyAGI", + "name": "babyAGI", + "label": "BabyAGI", + "type": "BabyAGI" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 950.8042093214954, + "y": 66.00028106865324 + } + } + ], + "edges": [ + { + "source": "openAIEmbeddings_1", + "sourceHandle": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pineconeExistingIndex_1", + "targetHandle": "pineconeExistingIndex_1-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_1-openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_1-pineconeExistingIndex_1-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "babyAGI_1", + "targetHandle": "babyAGI_1-input-model-BaseChatModel", + "type": "buttonedge", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-babyAGI_1-babyAGI_1-input-model-BaseChatModel", + "data": { + "label": "" + } + }, + { + "source": "pineconeExistingIndex_1", + "sourceHandle": "pineconeExistingIndex_1-output-vectorStore-Pinecone|VectorStore", + "target": "babyAGI_1", + "targetHandle": "babyAGI_1-input-vectorStore-VectorStore", + "type": "buttonedge", + "id": "pineconeExistingIndex_1-pineconeExistingIndex_1-output-vectorStore-Pinecone|VectorStore-babyAGI_1-babyAGI_1-input-vectorStore-VectorStore", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/ChatGPTPlugin.json b/packages/server/marketplaces/ChatGPTPlugin.json new file mode 100644 index 0000000000000000000000000000000000000000..648c94b7b0c5aae9856d7030a45d6523c189fd95 --- /dev/null +++ b/packages/server/marketplaces/ChatGPTPlugin.json @@ -0,0 +1,443 @@ +{ + "description": "Use ChatGPT Plugins within LangChain abstractions with GET and POST Tools", + "nodes": [ + { + "width": 300, + "height": 278, + "id": "aiPlugin_0", + "position": { + "x": 1086.2925487205378, + "y": 84.92168014974317 + }, + "type": "customNode", + "data": { + "id": "aiPlugin_0", + "label": "AI Plugin", + "name": "aiPlugin", + "type": "AIPlugin", + "baseClasses": ["AIPlugin", "Tool"], + "category": "Tools", + "description": "Execute actions using ChatGPT Plugin Url", + "inputParams": [ + { + "label": "Plugin Url", + "name": "pluginUrl", + "type": "string", + "placeholder": "https://www.klarna.com/.well-known/ai-plugin.json" + } + ], + "inputAnchors": [], + "inputs": { + "pluginUrl": "https://www.klarna.com/.well-known/ai-plugin.json" + }, + "outputAnchors": [ + { + "id": "aiPlugin_0-output-aiPlugin-AIPlugin|Tool", + "name": "aiPlugin", + "label": "AIPlugin", + "type": "AIPlugin | Tool" + } + ], + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1086.2925487205378, + "y": 84.92168014974317 + }, + "dragging": false + }, + { + "width": 300, + "height": 251, + "id": "requestsGet_0", + "position": { + "x": 761.713884489628, + "y": 170.84830553778124 + }, + "type": "customNode", + "data": { + "id": "requestsGet_0", + "label": "Requests Get", + "name": "requestsGet", + "type": "RequestsGet", + "baseClasses": ["RequestsGet", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Execute HTTP GET requests", + "inputParams": [ + { + "label": "URL", + "name": "url", + "type": "string", + "description": "Agent will make call to this exact URL. If not specified, agent will try to figure out itself from AIPlugin if provided", + "additionalParams": true, + "optional": true, + "id": "requestsGet_0-input-url-string" + }, + { + "label": "Description", + "name": "description", + "type": "string", + "rows": 4, + "default": "A portal to the internet. Use this when you need to get specific content from a website. \nInput should be a url (i.e. https://www.google.com). The output will be the text response of the GET request.", + "description": "Acts like a prompt to tell agent when it should use this tool", + "additionalParams": true, + "optional": true, + "id": "requestsGet_0-input-description-string" + }, + { + "label": "Headers", + "name": "headers", + "type": "json", + "additionalParams": true, + "optional": true, + "id": "requestsGet_0-input-headers-json" + } + ], + "inputAnchors": [], + "inputs": { + "url": "", + "description": "A portal to the internet. Use this when you need to get specific content from a website. \nInput should be a url (i.e. https://www.google.com). The output will be the text response of the GET request.", + "headers": "" + }, + "outputAnchors": [ + { + "id": "requestsGet_0-output-requestsGet-RequestsGet|Tool|StructuredTool|BaseLangChain", + "name": "requestsGet", + "label": "RequestsGet", + "type": "RequestsGet | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 761.713884489628, + "y": 170.84830553778124 + }, + "dragging": false + }, + { + "width": 300, + "height": 251, + "id": "requestsPost_0", + "position": { + "x": 436.4125209312256, + "y": 306.87715502984184 + }, + "type": "customNode", + "data": { + "id": "requestsPost_0", + "label": "Requests Post", + "name": "requestsPost", + "type": "RequestsPost", + "baseClasses": ["RequestsPost", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Execute HTTP POST requests", + "inputParams": [ + { + "label": "URL", + "name": "url", + "type": "string", + "description": "Agent will make call to this exact URL. If not specified, agent will try to figure out itself from AIPlugin if provided", + "additionalParams": true, + "optional": true, + "id": "requestsPost_0-input-url-string" + }, + { + "label": "Body", + "name": "body", + "type": "json", + "description": "JSON body for the POST request. If not specified, agent will try to figure out itself from AIPlugin if provided", + "additionalParams": true, + "optional": true, + "id": "requestsPost_0-input-body-json" + }, + { + "label": "Description", + "name": "description", + "type": "string", + "rows": 4, + "default": "Use this when you want to POST to a website.\nInput should be a json string with two keys: \"url\" and \"data\".\nThe value of \"url\" should be a string, and the value of \"data\" should be a dictionary of \nkey-value pairs you want to POST to the url as a JSON body.\nBe careful to always use double quotes for strings in the json string\nThe output will be the text response of the POST request.", + "description": "Acts like a prompt to tell agent when it should use this tool", + "additionalParams": true, + "optional": true, + "id": "requestsPost_0-input-description-string" + }, + { + "label": "Headers", + "name": "headers", + "type": "json", + "additionalParams": true, + "optional": true, + "id": "requestsPost_0-input-headers-json" + } + ], + "inputAnchors": [], + "inputs": { + "url": "", + "body": "", + "description": "Use this when you want to POST to a website.\nInput should be a json string with two keys: \"url\" and \"data\".\nThe value of \"url\" should be a string, and the value of \"data\" should be a dictionary of \nkey-value pairs you want to POST to the url as a JSON body.\nBe careful to always use double quotes for strings in the json string\nThe output will be the text response of the POST request.", + "headers": "" + }, + "outputAnchors": [ + { + "id": "requestsPost_0-output-requestsPost-RequestsPost|Tool|StructuredTool|BaseLangChain", + "name": "requestsPost", + "label": "RequestsPost", + "type": "RequestsPost | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 436.4125209312256, + "y": 306.87715502984184 + }, + "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "mrklAgentChat_0", + "position": { + "x": 1416.2054860029416, + "y": 451.43299014109715 + }, + "type": "customNode", + "data": { + "id": "mrklAgentChat_0", + "label": "MRKL Agent for Chat Models", + "name": "mrklAgentChat", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], + "category": "Agents", + "description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with Chat Models", + "inputParams": [], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "mrklAgentChat_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "mrklAgentChat_0-input-model-BaseLanguageModel" + } + ], + "inputs": { + "tools": ["{{requestsGet_0.data.instance}}", "{{requestsPost_0.data.instance}}", "{{aiPlugin_0.data.instance}}"], + "model": "{{chatOpenAI_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "mrklAgentChat_0-output-mrklAgentChat-AgentExecutor|BaseChain|BaseLangChain", + "name": "mrklAgentChat", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1416.2054860029416, + "y": 451.43299014109715 + }, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "chatOpenAI_0", + "position": { + "x": 797.0574814814245, + "y": 578.7641992971934 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 797.0574814814245, + "y": 578.7641992971934 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "requestsGet_0", + "sourceHandle": "requestsGet_0-output-requestsGet-RequestsGet|Tool|StructuredTool|BaseLangChain", + "target": "mrklAgentChat_0", + "targetHandle": "mrklAgentChat_0-input-tools-Tool", + "type": "buttonedge", + "id": "requestsGet_0-requestsGet_0-output-requestsGet-RequestsGet|Tool|StructuredTool|BaseLangChain-mrklAgentChat_0-mrklAgentChat_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "aiPlugin_0", + "sourceHandle": "aiPlugin_0-output-aiPlugin-AIPlugin|Tool", + "target": "mrklAgentChat_0", + "targetHandle": "mrklAgentChat_0-input-tools-Tool", + "type": "buttonedge", + "id": "aiPlugin_0-aiPlugin_0-output-aiPlugin-AIPlugin|Tool-mrklAgentChat_0-mrklAgentChat_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "requestsPost_0", + "sourceHandle": "requestsPost_0-output-requestsPost-RequestsPost|Tool|StructuredTool|BaseLangChain", + "target": "mrklAgentChat_0", + "targetHandle": "mrklAgentChat_0-input-tools-Tool", + "type": "buttonedge", + "id": "requestsPost_0-requestsPost_0-output-requestsPost-RequestsPost|Tool|StructuredTool|BaseLangChain-mrklAgentChat_0-mrklAgentChat_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "mrklAgentChat_0", + "targetHandle": "mrklAgentChat_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-mrklAgentChat_0-mrklAgentChat_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Conversational Agent.json b/packages/server/marketplaces/Conversational Agent.json new file mode 100644 index 0000000000000000000000000000000000000000..635455ce794a04ac7d9e0d59c3fa60e7871a0f13 --- /dev/null +++ b/packages/server/marketplaces/Conversational Agent.json @@ -0,0 +1,412 @@ +{ + "description": "A conversational agent for a chat model which utilize chat specific prompts", + "nodes": [ + { + "width": 300, + "height": 524, + "id": "chatOpenAI_1", + "position": { + "x": 56.646518061018355, + "y": 71.07043412525425 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_1", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": "0", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 56.646518061018355, + "y": 71.07043412525425 + }, + "dragging": false + }, + { + "width": 300, + "height": 278, + "id": "serpAPI_1", + "position": { + "x": 436.94138168947336, + "y": 39.517825311262044 + }, + "type": "customNode", + "data": { + "id": "serpAPI_1", + "label": "Serp API", + "name": "serpAPI", + "type": "SerpAPI", + "baseClasses": ["SerpAPI", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Wrapper around SerpAPI - a real-time API to access Google search results", + "inputParams": [ + { + "label": "Serp Api Key", + "name": "apiKey", + "type": "password", + "id": "serpAPI_1-input-apiKey-password" + } + ], + "inputAnchors": [], + "inputs": {}, + "outputAnchors": [ + { + "id": "serpAPI_1-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain", + "name": "serpAPI", + "label": "SerpAPI", + "type": "SerpAPI | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 436.94138168947336, + "y": 39.517825311262044 + }, + "dragging": false + }, + { + "width": 300, + "height": 143, + "id": "calculator_1", + "position": { + "x": 800.5125025564965, + "y": 72.40592063242738 + }, + "type": "customNode", + "data": { + "id": "calculator_1", + "label": "Calculator", + "name": "calculator", + "type": "Calculator", + "baseClasses": ["Calculator", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Perform calculations on response", + "inputParams": [], + "inputAnchors": [], + "inputs": {}, + "outputAnchors": [ + { + "id": "calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain", + "name": "calculator", + "label": "Calculator", + "type": "Calculator | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 800.5125025564965, + "y": 72.40592063242738 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 376, + "id": "bufferMemory_1", + "position": { + "x": 573.479796337051, + "y": 575.8843338367278 + }, + "type": "customNode", + "data": { + "id": "bufferMemory_1", + "label": "Buffer Memory", + "name": "bufferMemory", + "type": "BufferMemory", + "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], + "category": "Memory", + "description": "Remembers previous conversational back and forths directly", + "inputParams": [ + { + "label": "Memory Key", + "name": "memoryKey", + "type": "string", + "default": "chat_history", + "id": "bufferMemory_1-input-memoryKey-string" + }, + { + "label": "Input Key", + "name": "inputKey", + "type": "string", + "default": "input", + "id": "bufferMemory_1-input-inputKey-string" + } + ], + "inputAnchors": [], + "inputs": { + "memoryKey": "chat_history", + "inputKey": "input" + }, + "outputAnchors": [ + { + "id": "bufferMemory_1-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "name": "bufferMemory", + "label": "BufferMemory", + "type": "BufferMemory | BaseChatMemory | BaseMemory" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 573.479796337051, + "y": 575.8843338367278 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 383, + "id": "conversationalAgent_0", + "position": { + "x": 1206.1996037716035, + "y": 227.39579577603587 + }, + "type": "customNode", + "data": { + "id": "conversationalAgent_0", + "label": "Conversational Agent", + "name": "conversationalAgent", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], + "category": "Agents", + "description": "Conversational agent for a chat model. It will utilize chat specific prompts", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessage", + "type": "string", + "rows": 4, + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-systemMessage-string" + }, + { + "label": "Human Message", + "name": "humanMessage", + "type": "string", + "rows": 4, + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-humanMessage-string" + } + ], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "conversationalAgent_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "conversationalAgent_0-input-model-BaseLanguageModel" + }, + { + "label": "Memory", + "name": "memory", + "type": "BaseChatMemory", + "id": "conversationalAgent_0-input-memory-BaseChatMemory" + } + ], + "inputs": { + "tools": ["{{calculator_1.data.instance}}", "{{serpAPI_1.data.instance}}"], + "model": "{{chatOpenAI_1.data.instance}}", + "memory": "{{bufferMemory_1.data.instance}}", + "systemMessage": "", + "humanMessage": "" + }, + "outputAnchors": [ + { + "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|BaseLangChain", + "name": "conversationalAgent", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1206.1996037716035, + "y": 227.39579577603587 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "calculator_1", + "sourceHandle": "calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-tools-Tool", + "type": "buttonedge", + "id": "calculator_1-calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "serpAPI_1", + "sourceHandle": "serpAPI_1-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-tools-Tool", + "type": "buttonedge", + "id": "serpAPI_1-serpAPI_1-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "bufferMemory_1", + "sourceHandle": "bufferMemory_1-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory", + "type": "buttonedge", + "id": "bufferMemory_1-bufferMemory_1-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Conversational Retrieval QA Chain.json b/packages/server/marketplaces/Conversational Retrieval QA Chain.json new file mode 100644 index 0000000000000000000000000000000000000000..4d470ab252651135369937c7d975738d96f73182 --- /dev/null +++ b/packages/server/marketplaces/Conversational Retrieval QA Chain.json @@ -0,0 +1,612 @@ +{ + "description": "Text file QnA using conversational retrieval QA chain", + "nodes": [ + { + "width": 300, + "height": 376, + "id": "recursiveCharacterTextSplitter_1", + "position": { + "x": 422.81091375202413, + "y": 122.99825010325736 + }, + "type": "customNode", + "data": { + "id": "recursiveCharacterTextSplitter_1", + "label": "Recursive Character Text Splitter", + "name": "recursiveCharacterTextSplitter", + "type": "RecursiveCharacterTextSplitter", + "baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"], + "category": "Text Splitters", + "description": "Split documents recursively by different characters - starting with \"\n\n\", then \"\n\", then \" \"", + "inputParams": [ + { + "label": "Chunk Size", + "name": "chunkSize", + "type": "number", + "default": 1000, + "optional": true, + "id": "recursiveCharacterTextSplitter_1-input-chunkSize-number" + }, + { + "label": "Chunk Overlap", + "name": "chunkOverlap", + "type": "number", + "optional": true, + "id": "recursiveCharacterTextSplitter_1-input-chunkOverlap-number" + } + ], + "inputAnchors": [], + "inputs": { + "chunkSize": 1000, + "chunkOverlap": "" + }, + "outputAnchors": [ + { + "id": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", + "name": "recursiveCharacterTextSplitter", + "label": "RecursiveCharacterTextSplitter", + "type": "RecursiveCharacterTextSplitter | TextSplitter" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 422.81091375202413, + "y": 122.99825010325736 + }, + "dragging": false + }, + { + "width": 300, + "height": 392, + "id": "textFile_1", + "position": { + "x": 810.6456923854021, + "y": 61.45989039390216 + }, + "type": "customNode", + "data": { + "id": "textFile_1", + "label": "Text File", + "name": "textFile", + "type": "Document", + "baseClasses": ["Document"], + "category": "Document Loaders", + "description": "Load data from text files", + "inputParams": [ + { + "label": "Txt File", + "name": "txtFile", + "type": "file", + "fileType": ".txt", + "id": "textFile_1-input-txtFile-file" + }, + { + "label": "Metadata", + "name": "metadata", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "textFile_1-input-metadata-json" + } + ], + "inputAnchors": [ + { + "label": "Text Splitter", + "name": "textSplitter", + "type": "TextSplitter", + "optional": true, + "id": "textFile_1-input-textSplitter-TextSplitter" + } + ], + "inputs": { + "textSplitter": "{{recursiveCharacterTextSplitter_1.data.instance}}" + }, + "outputAnchors": [ + { + "id": "textFile_1-output-textFile-Document", + "name": "textFile", + "label": "Document", + "type": "Document" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 810.6456923854021, + "y": 61.45989039390216 + }, + "dragging": false + }, + { + "width": 300, + "height": 330, + "id": "openAIEmbeddings_1", + "position": { + "x": 817.2208258595176, + "y": 586.8095386455508 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_1", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_1-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 817.2208258595176, + "y": 586.8095386455508 + }, + "dragging": false + }, + { + "width": 300, + "height": 702, + "id": "pineconeUpsert_1", + "position": { + "x": 1201.3427203075867, + "y": 545.1800202023215 + }, + "type": "customNode", + "data": { + "id": "pineconeUpsert_1", + "label": "Pinecone Upsert Document", + "name": "pineconeUpsert", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert documents to Pinecone", + "inputParams": [ + { + "label": "Pinecone Api Key", + "name": "pineconeApiKey", + "type": "password", + "id": "pineconeUpsert_1-input-pineconeApiKey-password" + }, + { + "label": "Pinecone Environment", + "name": "pineconeEnv", + "type": "string", + "id": "pineconeUpsert_1-input-pineconeEnv-string" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pineconeUpsert_1-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "optional": true, + "additionalParams": true, + "id": "pineconeUpsert_1-input-pineconeNamespace-string" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pineconeUpsert_1-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "id": "pineconeUpsert_1-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pineconeUpsert_1-input-embeddings-Embeddings" + } + ], + "inputs": { + "document": ["{{textFile_1.data.instance}}"], + "embeddings": "{{openAIEmbeddings_1.data.instance}}", + "pineconeEnv": "us-west4-gcp", + "pineconeIndex": "myindex", + "pineconeNamespace": "mynamespace" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pineconeUpsert_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pineconeUpsert_1-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 1201.3427203075867, + "y": 545.1800202023215 + } + }, + { + "width": 300, + "height": 524, + "id": "chatOpenAI_0", + "position": { + "x": 1200.565568471151, + "y": -33.648143275380406 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": "0.5", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1200.565568471151, + "y": -33.648143275380406 + }, + "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "conversationalRetrievalQAChain_0", + "position": { + "x": 1627.1855024401737, + "y": 394.42287890442145 + }, + "type": "customNode", + "data": { + "id": "conversationalRetrievalQAChain_0", + "label": "Conversational Retrieval QA Chain", + "name": "conversationalRetrievalQAChain", + "type": "ConversationalRetrievalQAChain", + "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Document QA - built on RetrievalQAChain to provide a chat history component", + "inputParams": [ + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true, + "id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean" + }, + { + "label": "System Message", + "name": "systemMessagePrompt", + "type": "string", + "rows": 4, + "additionalParams": true, + "optional": true, + "placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.", + "id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string" + }, + { + "label": "Chain Option", + "name": "chainOption", + "type": "options", + "options": [ + { + "label": "MapReduceDocumentsChain", + "name": "map_reduce", + "description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time" + }, + { + "label": "RefineDocumentsChain", + "name": "refine", + "description": "Suitable for QA tasks over a large number of documents." + }, + { + "label": "StuffDocumentsChain", + "name": "stuff", + "description": "Suitable for QA tasks over a small number of documents." + } + ], + "additionalParams": true, + "optional": true, + "id": "conversationalRetrievalQAChain_0-input-chainOption-options" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "BaseRetriever", + "id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "vectorStoreRetriever": "{{pineconeUpsert_1.data.instance}}" + }, + "outputAnchors": [ + { + "id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain|BaseLangChain", + "name": "conversationalRetrievalQAChain", + "label": "ConversationalRetrievalQAChain", + "type": "ConversationalRetrievalQAChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1627.1855024401737, + "y": 394.42287890442145 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "openAIEmbeddings_1", + "sourceHandle": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pineconeUpsert_1", + "targetHandle": "pineconeUpsert_1-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_1-openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeUpsert_1-pineconeUpsert_1-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "textFile_1", + "sourceHandle": "textFile_1-output-textFile-Document", + "target": "pineconeUpsert_1", + "targetHandle": "pineconeUpsert_1-input-document-Document", + "type": "buttonedge", + "id": "textFile_1-textFile_1-output-textFile-Document-pineconeUpsert_1-pineconeUpsert_1-input-document-Document", + "data": { + "label": "" + } + }, + { + "source": "recursiveCharacterTextSplitter_1", + "sourceHandle": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", + "target": "textFile_1", + "targetHandle": "textFile_1-input-textSplitter-TextSplitter", + "type": "buttonedge", + "id": "recursiveCharacterTextSplitter_1-recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-textFile_1-textFile_1-input-textSplitter-TextSplitter", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "pineconeUpsert_1", + "sourceHandle": "pineconeUpsert_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "type": "buttonedge", + "id": "pineconeUpsert_1-pineconeUpsert_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Github Repo QnA.json b/packages/server/marketplaces/Github Repo QnA.json new file mode 100644 index 0000000000000000000000000000000000000000..a9294eca185cefe8cf1d9f81169703a4c3af0927 --- /dev/null +++ b/packages/server/marketplaces/Github Repo QnA.json @@ -0,0 +1,629 @@ +{ + "description": "Github repo QnA using conversational retrieval QA chain", + "nodes": [ + { + "width": 300, + "height": 376, + "id": "recursiveCharacterTextSplitter_1", + "position": { + "x": 447.1038086695898, + "y": 126.52301921543597 + }, + "type": "customNode", + "data": { + "id": "recursiveCharacterTextSplitter_1", + "label": "Recursive Character Text Splitter", + "name": "recursiveCharacterTextSplitter", + "type": "RecursiveCharacterTextSplitter", + "baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"], + "category": "Text Splitters", + "description": "Split documents recursively by different characters - starting with \"\n\n\", then \"\n\", then \" \"", + "inputParams": [ + { + "label": "Chunk Size", + "name": "chunkSize", + "type": "number", + "default": 1000, + "optional": true, + "id": "recursiveCharacterTextSplitter_1-input-chunkSize-number" + }, + { + "label": "Chunk Overlap", + "name": "chunkOverlap", + "type": "number", + "optional": true, + "id": "recursiveCharacterTextSplitter_1-input-chunkOverlap-number" + } + ], + "inputAnchors": [], + "inputs": { + "chunkSize": 1000, + "chunkOverlap": "" + }, + "outputAnchors": [ + { + "id": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", + "name": "recursiveCharacterTextSplitter", + "label": "RecursiveCharacterTextSplitter", + "type": "RecursiveCharacterTextSplitter | TextSplitter" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 447.1038086695898, + "y": 126.52301921543597 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 578, + "id": "github_1", + "position": { + "x": 836.9660489009947, + "y": -44.04171088580361 + }, + "type": "customNode", + "data": { + "id": "github_1", + "label": "Github", + "name": "github", + "type": "Document", + "baseClasses": ["Document"], + "category": "Document Loaders", + "description": "Load data from a GitHub repository", + "inputParams": [ + { + "label": "Repo Link", + "name": "repoLink", + "type": "string", + "placeholder": "https://github.com/FlowiseAI/Flowise", + "id": "github_1-input-repoLink-string" + }, + { + "label": "Branch", + "name": "branch", + "type": "string", + "default": "main", + "id": "github_1-input-branch-string" + }, + { + "label": "Access Token", + "name": "accessToken", + "type": "password", + "placeholder": "", + "optional": true, + "id": "github_1-input-accessToken-password" + }, + { + "label": "Metadata", + "name": "metadata", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "github_1-input-metadata-json" + } + ], + "inputAnchors": [ + { + "label": "Text Splitter", + "name": "textSplitter", + "type": "TextSplitter", + "optional": true, + "id": "github_1-input-textSplitter-TextSplitter" + } + ], + "inputs": { + "repoLink": "", + "branch": "main", + "textSplitter": "{{recursiveCharacterTextSplitter_1.data.instance}}" + }, + "outputAnchors": [ + { + "id": "github_1-output-github-Document", + "name": "github", + "label": "Document", + "type": "Document" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 836.9660489009947, + "y": -44.04171088580361 + }, + "dragging": false + }, + { + "width": 300, + "height": 330, + "id": "openAIEmbeddings_1", + "position": { + "x": 833.4085562012468, + "y": 541.7875676090047 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_1", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_1-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 833.4085562012468, + "y": 541.7875676090047 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 702, + "id": "pineconeUpsert_1", + "position": { + "x": 1268.7946529279823, + "y": 382.77997896801634 + }, + "type": "customNode", + "data": { + "id": "pineconeUpsert_1", + "label": "Pinecone Upsert Document", + "name": "pineconeUpsert", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert documents to Pinecone", + "inputParams": [ + { + "label": "Pinecone Api Key", + "name": "pineconeApiKey", + "type": "password", + "id": "pineconeUpsert_1-input-pineconeApiKey-password" + }, + { + "label": "Pinecone Environment", + "name": "pineconeEnv", + "type": "string", + "id": "pineconeUpsert_1-input-pineconeEnv-string" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pineconeUpsert_1-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "optional": true, + "additionalParams": true, + "id": "pineconeUpsert_1-input-pineconeNamespace-string" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pineconeUpsert_1-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "id": "pineconeUpsert_1-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pineconeUpsert_1-input-embeddings-Embeddings" + } + ], + "inputs": { + "document": ["{{github_1.data.instance}}"], + "embeddings": "{{openAIEmbeddings_1.data.instance}}", + "pineconeEnv": "us-west4-gcp", + "pineconeIndex": "myindex", + "pineconeNamespace": "mynamespace" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pineconeUpsert_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pineconeUpsert_1-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 1268.7946529279823, + "y": 382.77997896801634 + } + }, + { + "width": 300, + "height": 524, + "id": "chatOpenAI_0", + "position": { + "x": 1271.1300438358664, + "y": -169.75707425097968 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": "0.5", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1271.1300438358664, + "y": -169.75707425097968 + }, + "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "conversationalRetrievalQAChain_0", + "position": { + "x": 1653.6177539108153, + "y": 266.4856653480158 + }, + "type": "customNode", + "data": { + "id": "conversationalRetrievalQAChain_0", + "label": "Conversational Retrieval QA Chain", + "name": "conversationalRetrievalQAChain", + "type": "ConversationalRetrievalQAChain", + "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Document QA - built on RetrievalQAChain to provide a chat history component", + "inputParams": [ + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true, + "id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean" + }, + { + "label": "System Message", + "name": "systemMessagePrompt", + "type": "string", + "rows": 4, + "additionalParams": true, + "optional": true, + "placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.", + "id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string" + }, + { + "label": "Chain Option", + "name": "chainOption", + "type": "options", + "options": [ + { + "label": "MapReduceDocumentsChain", + "name": "map_reduce", + "description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time" + }, + { + "label": "RefineDocumentsChain", + "name": "refine", + "description": "Suitable for QA tasks over a large number of documents." + }, + { + "label": "StuffDocumentsChain", + "name": "stuff", + "description": "Suitable for QA tasks over a small number of documents." + } + ], + "additionalParams": true, + "optional": true, + "id": "conversationalRetrievalQAChain_0-input-chainOption-options" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "BaseRetriever", + "id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "vectorStoreRetriever": "{{pineconeUpsert_1.data.instance}}" + }, + "outputAnchors": [ + { + "id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain|BaseLangChain", + "name": "conversationalRetrievalQAChain", + "label": "ConversationalRetrievalQAChain", + "type": "ConversationalRetrievalQAChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1653.6177539108153, + "y": 266.4856653480158 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "github_1", + "sourceHandle": "github_1-output-github-Document", + "target": "pineconeUpsert_1", + "targetHandle": "pineconeUpsert_1-input-document-Document", + "type": "buttonedge", + "id": "github_1-github_1-output-github-Document-pineconeUpsert_1-pineconeUpsert_1-input-document-Document", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_1", + "sourceHandle": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pineconeUpsert_1", + "targetHandle": "pineconeUpsert_1-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_1-openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeUpsert_1-pineconeUpsert_1-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "recursiveCharacterTextSplitter_1", + "sourceHandle": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", + "target": "github_1", + "targetHandle": "github_1-input-textSplitter-TextSplitter", + "type": "buttonedge", + "id": "recursiveCharacterTextSplitter_1-recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-github_1-github_1-input-textSplitter-TextSplitter", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "pineconeUpsert_1", + "sourceHandle": "pineconeUpsert_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "type": "buttonedge", + "id": "pineconeUpsert_1-pineconeUpsert_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Local QnA.json b/packages/server/marketplaces/Local QnA.json new file mode 100644 index 0000000000000000000000000000000000000000..9cfba9549916f99e20ea766676e0e71ca0617119 --- /dev/null +++ b/packages/server/marketplaces/Local QnA.json @@ -0,0 +1,521 @@ +{ + "description": "QnA chain using local LLM, Embedding models, and Faiss local vector store", + "nodes": [ + { + "width": 300, + "height": 376, + "id": "recursiveCharacterTextSplitter_1", + "position": { + "x": 422.81091375202413, + "y": 122.99825010325736 + }, + "type": "customNode", + "data": { + "id": "recursiveCharacterTextSplitter_1", + "label": "Recursive Character Text Splitter", + "name": "recursiveCharacterTextSplitter", + "type": "RecursiveCharacterTextSplitter", + "baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"], + "category": "Text Splitters", + "description": "Split documents recursively by different characters - starting with \"\n\n\", then \"\n\", then \" \"", + "inputParams": [ + { + "label": "Chunk Size", + "name": "chunkSize", + "type": "number", + "default": 1000, + "optional": true, + "id": "recursiveCharacterTextSplitter_1-input-chunkSize-number" + }, + { + "label": "Chunk Overlap", + "name": "chunkOverlap", + "type": "number", + "optional": true, + "id": "recursiveCharacterTextSplitter_1-input-chunkOverlap-number" + } + ], + "inputAnchors": [], + "inputs": { + "chunkSize": 1000, + "chunkOverlap": "" + }, + "outputAnchors": [ + { + "id": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", + "name": "recursiveCharacterTextSplitter", + "label": "RecursiveCharacterTextSplitter", + "type": "RecursiveCharacterTextSplitter | TextSplitter" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 422.81091375202413, + "y": 122.99825010325736 + }, + "dragging": false + }, + { + "width": 300, + "height": 428, + "id": "conversationalRetrievalQAChain_0", + "position": { + "x": 1634.455879160561, + "y": 428.77742668929807 + }, + "type": "customNode", + "data": { + "id": "conversationalRetrievalQAChain_0", + "label": "Conversational Retrieval QA Chain", + "name": "conversationalRetrievalQAChain", + "type": "ConversationalRetrievalQAChain", + "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Document QA - built on RetrievalQAChain to provide a chat history component", + "inputParams": [ + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true, + "id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean" + }, + { + "label": "System Message", + "name": "systemMessagePrompt", + "type": "string", + "rows": 4, + "additionalParams": true, + "optional": true, + "placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.", + "id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string" + }, + { + "label": "Chain Option", + "name": "chainOption", + "type": "options", + "options": [ + { + "label": "MapReduceDocumentsChain", + "name": "map_reduce", + "description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time" + }, + { + "label": "RefineDocumentsChain", + "name": "refine", + "description": "Suitable for QA tasks over a large number of documents." + }, + { + "label": "StuffDocumentsChain", + "name": "stuff", + "description": "Suitable for QA tasks over a small number of documents." + } + ], + "additionalParams": true, + "optional": true, + "id": "conversationalRetrievalQAChain_0-input-chainOption-options" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "BaseRetriever", + "id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever" + } + ], + "inputs": { + "model": "{{chatLocalAI_0.data.instance}}", + "vectorStoreRetriever": "{{faissUpsert_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain|BaseLangChain", + "name": "conversationalRetrievalQAChain", + "label": "ConversationalRetrievalQAChain", + "type": "ConversationalRetrievalQAChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1634.455879160561, + "y": 428.77742668929807 + }, + "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "faissUpsert_0", + "position": { + "x": 1204.6898035516715, + "y": 521.0933926644659 + }, + "type": "customNode", + "data": { + "id": "faissUpsert_0", + "label": "Faiss Upsert Document", + "name": "faissUpsert", + "type": "Faiss", + "baseClasses": ["Faiss", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert documents to Faiss", + "inputParams": [ + { + "label": "Base Path to store", + "name": "basePath", + "description": "Path to store faiss.index file", + "placeholder": "C:\\Users\\User\\Desktop", + "type": "string", + "id": "faissUpsert_0-input-basePath-string" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "faissUpsert_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "id": "faissUpsert_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "faissUpsert_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "document": ["{{textFile_0.data.instance}}"], + "embeddings": "{{localAIEmbeddings_0.data.instance}}", + "basePath": "C:\\Users\\your-folder", + "topK": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "faissUpsert_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Faiss Retriever", + "type": "Faiss | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "faissUpsert_0-output-vectorStore-Faiss|SaveableVectorStore|VectorStore", + "name": "vectorStore", + "label": "Faiss Vector Store", + "type": "Faiss | SaveableVectorStore | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1204.6898035516715, + "y": 521.0933926644659 + }, + "dragging": false + }, + { + "width": 300, + "height": 526, + "id": "chatLocalAI_0", + "position": { + "x": 1191.9512064167336, + "y": -44.05401001663306 + }, + "type": "customNode", + "data": { + "id": "chatLocalAI_0", + "label": "ChatLocalAI", + "name": "chatLocalAI", + "type": "ChatLocalAI", + "baseClasses": ["ChatLocalAI", "BaseChatModel", "LLM", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Use local LLMs like llama.cpp, gpt4all using LocalAI", + "inputParams": [ + { + "label": "Base Path", + "name": "basePath", + "type": "string", + "placeholder": "http://localhost:8080/v1", + "id": "chatLocalAI_0-input-basePath-string" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "string", + "placeholder": "gpt4all-lora-quantized.bin", + "id": "chatLocalAI_0-input-modelName-string" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatLocalAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatLocalAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatLocalAI_0-input-topP-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatLocalAI_0-input-timeout-number" + } + ], + "inputAnchors": [], + "inputs": { + "basePath": "http://localhost:8080/v1", + "modelName": "ggml-gpt4all-j.bin", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatLocalAI_0-output-chatLocalAI-ChatLocalAI|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "chatLocalAI", + "label": "ChatLocalAI", + "type": "ChatLocalAI | BaseChatModel | LLM | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1191.9512064167336, + "y": -44.05401001663306 + }, + "dragging": false + }, + { + "width": 300, + "height": 410, + "id": "textFile_0", + "position": { + "x": 809.5432731751458, + "y": 55.85095796777051 + }, + "type": "customNode", + "data": { + "id": "textFile_0", + "label": "Text File", + "name": "textFile", + "type": "Document", + "baseClasses": ["Document"], + "category": "Document Loaders", + "description": "Load data from text files", + "inputParams": [ + { + "label": "Txt File", + "name": "txtFile", + "type": "file", + "fileType": ".txt", + "id": "textFile_0-input-txtFile-file" + }, + { + "label": "Metadata", + "name": "metadata", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "textFile_0-input-metadata-json" + } + ], + "inputAnchors": [ + { + "label": "Text Splitter", + "name": "textSplitter", + "type": "TextSplitter", + "optional": true, + "id": "textFile_0-input-textSplitter-TextSplitter" + } + ], + "inputs": { + "textSplitter": "{{recursiveCharacterTextSplitter_1.data.instance}}", + "metadata": "" + }, + "outputAnchors": [ + { + "id": "textFile_0-output-textFile-Document", + "name": "textFile", + "label": "Document", + "type": "Document" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 809.5432731751458, + "y": 55.85095796777051 + }, + "dragging": false + }, + { + "width": 300, + "height": 376, + "id": "localAIEmbeddings_0", + "position": { + "x": 809.5432731751458, + "y": 507.4586304746849 + }, + "type": "customNode", + "data": { + "id": "localAIEmbeddings_0", + "label": "LocalAI Embeddings", + "name": "localAIEmbeddings", + "type": "LocalAI Embeddings", + "baseClasses": ["LocalAI Embeddings", "Embeddings"], + "category": "Embeddings", + "description": "Use local embeddings models like llama.cpp", + "inputParams": [ + { + "label": "Base Path", + "name": "basePath", + "type": "string", + "placeholder": "http://localhost:8080/v1", + "id": "localAIEmbeddings_0-input-basePath-string" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "string", + "placeholder": "text-embedding-ada-002", + "id": "localAIEmbeddings_0-input-modelName-string" + } + ], + "inputAnchors": [], + "inputs": { + "basePath": "http://localhost:8080/v1", + "modelName": "text-embedding-ada-002" + }, + "outputAnchors": [ + { + "id": "localAIEmbeddings_0-output-localAIEmbeddings-LocalAI Embeddings|Embeddings", + "name": "localAIEmbeddings", + "label": "LocalAI Embeddings", + "type": "LocalAI Embeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 809.5432731751458, + "y": 507.4586304746849 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "faissUpsert_0", + "sourceHandle": "faissUpsert_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "type": "buttonedge", + "id": "faissUpsert_0-faissUpsert_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } + }, + { + "source": "chatLocalAI_0", + "sourceHandle": "chatLocalAI_0-output-chatLocalAI-ChatLocalAI|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatLocalAI_0-chatLocalAI_0-output-chatLocalAI-ChatLocalAI|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|BaseLangChain-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "recursiveCharacterTextSplitter_1", + "sourceHandle": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", + "target": "textFile_0", + "targetHandle": "textFile_0-input-textSplitter-TextSplitter", + "type": "buttonedge", + "id": "recursiveCharacterTextSplitter_1-recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-textFile_0-textFile_0-input-textSplitter-TextSplitter", + "data": { + "label": "" + } + }, + { + "source": "textFile_0", + "sourceHandle": "textFile_0-output-textFile-Document", + "target": "faissUpsert_0", + "targetHandle": "faissUpsert_0-input-document-Document", + "type": "buttonedge", + "id": "textFile_0-textFile_0-output-textFile-Document-faissUpsert_0-faissUpsert_0-input-document-Document", + "data": { + "label": "" + } + }, + { + "source": "localAIEmbeddings_0", + "sourceHandle": "localAIEmbeddings_0-output-localAIEmbeddings-LocalAI Embeddings|Embeddings", + "target": "faissUpsert_0", + "targetHandle": "faissUpsert_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "localAIEmbeddings_0-localAIEmbeddings_0-output-localAIEmbeddings-LocalAI Embeddings|Embeddings-faissUpsert_0-faissUpsert_0-input-embeddings-Embeddings", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/MRKLAgent.json b/packages/server/marketplaces/MRKLAgent.json new file mode 100644 index 0000000000000000000000000000000000000000..257123e025f351b74d2ecbee72e926ac8709bb42 --- /dev/null +++ b/packages/server/marketplaces/MRKLAgent.json @@ -0,0 +1,331 @@ +{ + "description": "An agent that uses the React Framework to decide what action to take", + "nodes": [ + { + "width": 300, + "height": 278, + "id": "serpAPI_1", + "position": { + "x": 312.0655985817535, + "y": 112.09909989842703 + }, + "type": "customNode", + "data": { + "id": "serpAPI_1", + "label": "Serp API", + "name": "serpAPI", + "type": "SerpAPI", + "baseClasses": ["SerpAPI", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Wrapper around SerpAPI - a real-time API to access Google search results", + "inputParams": [ + { + "label": "Serp Api Key", + "name": "apiKey", + "type": "password", + "id": "serpAPI_1-input-apiKey-password" + } + ], + "inputAnchors": [], + "inputs": {}, + "outputAnchors": [ + { + "id": "serpAPI_1-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain", + "name": "serpAPI", + "label": "SerpAPI", + "type": "SerpAPI | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 312.0655985817535, + "y": 112.09909989842703 + }, + "dragging": false + }, + { + "width": 300, + "height": 143, + "id": "calculator_1", + "position": { + "x": 664.1366474718458, + "y": 123.16419000640141 + }, + "type": "customNode", + "data": { + "id": "calculator_1", + "label": "Calculator", + "name": "calculator", + "type": "Calculator", + "baseClasses": ["Calculator", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Perform calculations on response", + "inputParams": [], + "inputAnchors": [], + "inputs": {}, + "outputAnchors": [ + { + "id": "calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain", + "name": "calculator", + "label": "Calculator", + "type": "Calculator | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 664.1366474718458, + "y": 123.16419000640141 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "openAI_1", + "position": { + "x": 663.1307301893027, + "y": 394.7618562930441 + }, + "type": "customNode", + "data": { + "id": "openAI_1", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 663.1307301893027, + "y": 394.7618562930441 + }, + "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "mrklAgentLLM_0", + "position": { + "x": 1055.3271135179489, + "y": 245.36098016819074 + }, + "type": "customNode", + "data": { + "id": "mrklAgentLLM_0", + "label": "MRKL Agent for LLMs", + "name": "mrklAgentLLM", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], + "category": "Agents", + "description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs", + "inputParams": [], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "mrklAgentLLM_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "mrklAgentLLM_0-input-model-BaseLanguageModel" + } + ], + "inputs": { + "tools": ["{{calculator_1.data.instance}}", "{{serpAPI_1.data.instance}}"], + "model": "{{openAI_1.data.instance}}" + }, + "outputAnchors": [ + { + "id": "mrklAgentLLM_0-output-mrklAgentLLM-AgentExecutor|BaseChain|BaseLangChain", + "name": "mrklAgentLLM", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1055.3271135179489, + "y": 245.36098016819074 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "calculator_1", + "sourceHandle": "calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain", + "target": "mrklAgentLLM_0", + "targetHandle": "mrklAgentLLM_0-input-tools-Tool", + "type": "buttonedge", + "id": "calculator_1-calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "serpAPI_1", + "sourceHandle": "serpAPI_1-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain", + "target": "mrklAgentLLM_0", + "targetHandle": "mrklAgentLLM_0-input-tools-Tool", + "type": "buttonedge", + "id": "serpAPI_1-serpAPI_1-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "openAI_1", + "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "mrklAgentLLM_0", + "targetHandle": "mrklAgentLLM_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Metadata Filter Load.json b/packages/server/marketplaces/Metadata Filter Load.json new file mode 100644 index 0000000000000000000000000000000000000000..dfc6d6fb11d54755cca00a94dc1e472beed2fd0e --- /dev/null +++ b/packages/server/marketplaces/Metadata Filter Load.json @@ -0,0 +1,484 @@ +{ + "description": "Load existing index with metadata filters and feed into conversational retrieval QA chain", + "nodes": [ + { + "width": 300, + "height": 524, + "id": "openAI_1", + "position": { + "x": 1195.6182217299724, + "y": -12.958591115085468 + }, + "type": "customNode", + "data": { + "id": "openAI_1", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": "0", + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1195.6182217299724, + "y": -12.958591115085468 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 330, + "id": "openAIEmbeddings_1", + "position": { + "x": 777.5098693425334, + "y": 308.4221448953297 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_1", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_1-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 777.5098693425334, + "y": 308.4221448953297 + }, + "dragging": false + }, + { + "width": 300, + "height": 703, + "id": "pineconeExistingIndex_0", + "position": { + "x": 1187.519066203033, + "y": 542.6635399602128 + }, + "type": "customNode", + "data": { + "id": "pineconeExistingIndex_0", + "label": "Pinecone Load Existing Index", + "name": "pineconeExistingIndex", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Load existing index from Pinecone (i.e: Document has been upserted)", + "inputParams": [ + { + "label": "Pinecone Api Key", + "name": "pineconeApiKey", + "type": "password", + "id": "pineconeExistingIndex_0-input-pineconeApiKey-password" + }, + { + "label": "Pinecone Environment", + "name": "pineconeEnv", + "type": "string", + "id": "pineconeExistingIndex_0-input-pineconeEnv-string" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pineconeExistingIndex_0-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "optional": true, + "additionalParams": true, + "id": "pineconeExistingIndex_0-input-pineconeNamespace-string" + }, + { + "label": "Pinecone Metadata Filter", + "name": "pineconeMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pineconeExistingIndex_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pineconeExistingIndex_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "embeddings": "{{openAIEmbeddings_1.data.instance}}", + "pineconeEnv": "northamerica-northeast1-gcp", + "pineconeIndex": "myindex", + "pineconeNamespace": "my-namespace", + "pineconeMetadataFilter": "{\"id\":\"doc1\"}" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1187.519066203033, + "y": 542.6635399602128 + }, + "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "conversationalRetrievalQAChain_0", + "position": { + "x": 1585.900129303412, + "y": 405.9784391258126 + }, + "type": "customNode", + "data": { + "id": "conversationalRetrievalQAChain_0", + "label": "Conversational Retrieval QA Chain", + "name": "conversationalRetrievalQAChain", + "type": "ConversationalRetrievalQAChain", + "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Document QA - built on RetrievalQAChain to provide a chat history component", + "inputParams": [ + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true, + "id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean" + }, + { + "label": "System Message", + "name": "systemMessagePrompt", + "type": "string", + "rows": 4, + "additionalParams": true, + "optional": true, + "placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.", + "id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string" + }, + { + "label": "Chain Option", + "name": "chainOption", + "type": "options", + "options": [ + { + "label": "MapReduceDocumentsChain", + "name": "map_reduce", + "description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time" + }, + { + "label": "RefineDocumentsChain", + "name": "refine", + "description": "Suitable for QA tasks over a large number of documents." + }, + { + "label": "StuffDocumentsChain", + "name": "stuff", + "description": "Suitable for QA tasks over a small number of documents." + } + ], + "additionalParams": true, + "optional": true, + "id": "conversationalRetrievalQAChain_0-input-chainOption-options" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "BaseRetriever", + "id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever" + } + ], + "inputs": { + "model": "{{openAI_1.data.instance}}", + "vectorStoreRetriever": "{{pineconeExistingIndex_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain|BaseLangChain", + "name": "conversationalRetrievalQAChain", + "label": "ConversationalRetrievalQAChain", + "type": "ConversationalRetrievalQAChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1585.900129303412, + "y": 405.9784391258126 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "openAIEmbeddings_1", + "sourceHandle": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pineconeExistingIndex_0", + "targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_1-openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "openAI_1", + "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "pineconeExistingIndex_0", + "sourceHandle": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "type": "buttonedge", + "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Metadata Filter Upsert.json b/packages/server/marketplaces/Metadata Filter Upsert.json new file mode 100644 index 0000000000000000000000000000000000000000..8733665420cbf2b4072991f77597fa065718c0e8 --- /dev/null +++ b/packages/server/marketplaces/Metadata Filter Upsert.json @@ -0,0 +1,732 @@ +{ + "description": "Upsert multiple files with metadata filters and feed into conversational retrieval QA chain", + "nodes": [ + { + "width": 300, + "height": 376, + "id": "recursiveCharacterTextSplitter_1", + "position": { + "x": 347.5233039646277, + "y": 129.29305204134062 + }, + "type": "customNode", + "data": { + "id": "recursiveCharacterTextSplitter_1", + "label": "Recursive Character Text Splitter", + "name": "recursiveCharacterTextSplitter", + "type": "RecursiveCharacterTextSplitter", + "baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"], + "category": "Text Splitters", + "description": "Split documents recursively by different characters - starting with \"\n\n\", then \"\n\", then \" \"", + "inputParams": [ + { + "label": "Chunk Size", + "name": "chunkSize", + "type": "number", + "default": 1000, + "optional": true, + "id": "recursiveCharacterTextSplitter_1-input-chunkSize-number" + }, + { + "label": "Chunk Overlap", + "name": "chunkOverlap", + "type": "number", + "optional": true, + "id": "recursiveCharacterTextSplitter_1-input-chunkOverlap-number" + } + ], + "inputAnchors": [], + "inputs": { + "chunkSize": 1000, + "chunkOverlap": "" + }, + "outputAnchors": [ + { + "id": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", + "name": "recursiveCharacterTextSplitter", + "label": "RecursiveCharacterTextSplitter", + "type": "RecursiveCharacterTextSplitter | TextSplitter" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 347.5233039646277, + "y": 129.29305204134062 + }, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "openAI_1", + "position": { + "x": 1159.184721109528, + "y": -38.76565405456694 + }, + "type": "customNode", + "data": { + "id": "openAI_1", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": "0", + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1159.184721109528, + "y": -38.76565405456694 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 330, + "id": "openAIEmbeddings_1", + "position": { + "x": 749.4044250705479, + "y": 858.4858399327618 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_1", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_1-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 749.4044250705479, + "y": 858.4858399327618 + }, + "dragging": false + }, + { + "width": 300, + "height": 392, + "id": "textFile_0", + "position": { + "x": 756.5586098635717, + "y": -121.81747478707992 + }, + "type": "customNode", + "data": { + "id": "textFile_0", + "label": "Text File", + "name": "textFile", + "type": "Document", + "baseClasses": ["Document"], + "category": "Document Loaders", + "description": "Load data from text files", + "inputParams": [ + { + "label": "Txt File", + "name": "txtFile", + "type": "file", + "fileType": ".txt", + "id": "textFile_0-input-txtFile-file" + }, + { + "label": "Metadata", + "name": "metadata", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "textFile_0-input-metadata-json" + } + ], + "inputAnchors": [ + { + "label": "Text Splitter", + "name": "textSplitter", + "type": "TextSplitter", + "optional": true, + "id": "textFile_0-input-textSplitter-TextSplitter" + } + ], + "inputs": { + "textSplitter": "{{recursiveCharacterTextSplitter_1.data.instance}}", + "metadata": "{\"id\":\"doc1\"}" + }, + "outputAnchors": [ + { + "id": "textFile_0-output-textFile-Document", + "name": "textFile", + "label": "Document", + "type": "Document" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 756.5586098635717, + "y": -121.81747478707992 + }, + "dragging": false + }, + { + "width": 300, + "height": 488, + "id": "pdfFile_0", + "position": { + "x": 752.0044222860163, + "y": 318.11704520478617 + }, + "type": "customNode", + "data": { + "id": "pdfFile_0", + "label": "Pdf File", + "name": "pdfFile", + "type": "Document", + "baseClasses": ["Document"], + "category": "Document Loaders", + "description": "Load data from PDF files", + "inputParams": [ + { + "label": "Pdf File", + "name": "pdfFile", + "type": "file", + "fileType": ".pdf", + "id": "pdfFile_0-input-pdfFile-file" + }, + { + "label": "Usage", + "name": "usage", + "type": "options", + "options": [ + { + "label": "One document per page", + "name": "perPage" + }, + { + "label": "One document per file", + "name": "perFile" + } + ], + "default": "perPage", + "id": "pdfFile_0-input-usage-options" + }, + { + "label": "Metadata", + "name": "metadata", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "pdfFile_0-input-metadata-json" + } + ], + "inputAnchors": [ + { + "label": "Text Splitter", + "name": "textSplitter", + "type": "TextSplitter", + "optional": true, + "id": "pdfFile_0-input-textSplitter-TextSplitter" + } + ], + "inputs": { + "textSplitter": "{{recursiveCharacterTextSplitter_1.data.instance}}", + "usage": "perPage", + "metadata": "{\"id\":\"doc2\"}" + }, + "outputAnchors": [ + { + "id": "pdfFile_0-output-pdfFile-Document", + "name": "pdfFile", + "label": "Document", + "type": "Document" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 752.0044222860163, + "y": 318.11704520478617 + }, + "dragging": false + }, + { + "width": 300, + "height": 702, + "id": "pineconeUpsert_0", + "position": { + "x": 1161.8813042660154, + "y": 537.0216614326227 + }, + "type": "customNode", + "data": { + "id": "pineconeUpsert_0", + "label": "Pinecone Upsert Document", + "name": "pineconeUpsert", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert documents to Pinecone", + "inputParams": [ + { + "label": "Pinecone Api Key", + "name": "pineconeApiKey", + "type": "password", + "id": "pineconeUpsert_0-input-pineconeApiKey-password" + }, + { + "label": "Pinecone Environment", + "name": "pineconeEnv", + "type": "string", + "id": "pineconeUpsert_0-input-pineconeEnv-string" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pineconeUpsert_0-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "optional": true, + "additionalParams": true, + "id": "pineconeUpsert_0-input-pineconeNamespace-string" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pineconeUpsert_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "id": "pineconeUpsert_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pineconeUpsert_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "document": ["{{pdfFile_0.data.instance}}", "{{textFile_0.data.instance}}"], + "embeddings": "{{openAIEmbeddings_1.data.instance}}", + "pineconeEnv": "northamerica-northeast1-gcp", + "pineconeIndex": "myindex", + "pineconeNamespace": "my-namespace" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pineconeUpsert_0-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1161.8813042660154, + "y": 537.0216614326227 + }, + "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "conversationalRetrievalQAChain_0", + "position": { + "x": 1570.3859788160953, + "y": 423.6687850109136 + }, + "type": "customNode", + "data": { + "id": "conversationalRetrievalQAChain_0", + "label": "Conversational Retrieval QA Chain", + "name": "conversationalRetrievalQAChain", + "type": "ConversationalRetrievalQAChain", + "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Document QA - built on RetrievalQAChain to provide a chat history component", + "inputParams": [ + { + "label": "Return Source Documents", + "name": "returnSourceDocuments", + "type": "boolean", + "optional": true, + "id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean" + }, + { + "label": "System Message", + "name": "systemMessagePrompt", + "type": "string", + "rows": 4, + "additionalParams": true, + "optional": true, + "placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.", + "id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string" + }, + { + "label": "Chain Option", + "name": "chainOption", + "type": "options", + "options": [ + { + "label": "MapReduceDocumentsChain", + "name": "map_reduce", + "description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time" + }, + { + "label": "RefineDocumentsChain", + "name": "refine", + "description": "Suitable for QA tasks over a large number of documents." + }, + { + "label": "StuffDocumentsChain", + "name": "stuff", + "description": "Suitable for QA tasks over a small number of documents." + } + ], + "additionalParams": true, + "optional": true, + "id": "conversationalRetrievalQAChain_0-input-chainOption-options" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "BaseRetriever", + "id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever" + } + ], + "inputs": { + "model": "{{openAI_1.data.instance}}", + "vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain|BaseLangChain", + "name": "conversationalRetrievalQAChain", + "label": "ConversationalRetrievalQAChain", + "type": "ConversationalRetrievalQAChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1570.3859788160953, + "y": 423.6687850109136 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "recursiveCharacterTextSplitter_1", + "sourceHandle": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", + "target": "textFile_0", + "targetHandle": "textFile_0-input-textSplitter-TextSplitter", + "type": "buttonedge", + "id": "recursiveCharacterTextSplitter_1-recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-textFile_0-textFile_0-input-textSplitter-TextSplitter", + "data": { + "label": "" + } + }, + { + "source": "recursiveCharacterTextSplitter_1", + "sourceHandle": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", + "target": "pdfFile_0", + "targetHandle": "pdfFile_0-input-textSplitter-TextSplitter", + "type": "buttonedge", + "id": "recursiveCharacterTextSplitter_1-recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-pdfFile_0-pdfFile_0-input-textSplitter-TextSplitter", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_1", + "sourceHandle": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pineconeUpsert_0", + "targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_1-openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "pdfFile_0", + "sourceHandle": "pdfFile_0-output-pdfFile-Document", + "target": "pineconeUpsert_0", + "targetHandle": "pineconeUpsert_0-input-document-Document", + "type": "buttonedge", + "id": "pdfFile_0-pdfFile_0-output-pdfFile-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document", + "data": { + "label": "" + } + }, + { + "source": "textFile_0", + "sourceHandle": "textFile_0-output-textFile-Document", + "target": "pineconeUpsert_0", + "targetHandle": "pineconeUpsert_0-input-document-Document", + "type": "buttonedge", + "id": "textFile_0-textFile_0-output-textFile-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document", + "data": { + "label": "" + } + }, + { + "source": "openAI_1", + "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "pineconeUpsert_0", + "sourceHandle": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "type": "buttonedge", + "id": "pineconeUpsert_0-pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Multi Prompt Chain.json b/packages/server/marketplaces/Multi Prompt Chain.json new file mode 100644 index 0000000000000000000000000000000000000000..a9c41a76b9edb11cc4c3c0fe656049eed2ad54f1 --- /dev/null +++ b/packages/server/marketplaces/Multi Prompt Chain.json @@ -0,0 +1,450 @@ +{ + "description": "A chain that automatically picks an appropriate prompt from multiple prompts", + "nodes": [ + { + "width": 300, + "height": 632, + "id": "promptRetriever_0", + "position": { + "x": 197.46642699727397, + "y": 25.945621297410923 + }, + "type": "customNode", + "data": { + "id": "promptRetriever_0", + "label": "Prompt Retriever", + "name": "promptRetriever", + "type": "PromptRetriever", + "baseClasses": ["PromptRetriever"], + "category": "Retrievers", + "description": "Store prompt template with name & description to be later queried by MultiPromptChain", + "inputParams": [ + { + "label": "Prompt Name", + "name": "name", + "type": "string", + "placeholder": "physics-qa", + "id": "promptRetriever_0-input-name-string" + }, + { + "label": "Prompt Description", + "name": "description", + "type": "string", + "rows": 3, + "description": "Description of what the prompt does and when it should be used", + "placeholder": "Good for answering questions about physics", + "id": "promptRetriever_0-input-description-string" + }, + { + "label": "Prompt System Message", + "name": "systemMessage", + "type": "string", + "rows": 4, + "placeholder": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.", + "id": "promptRetriever_0-input-systemMessage-string" + } + ], + "inputAnchors": [], + "inputs": { + "name": "physics", + "description": "Good for answering questions about physics", + "systemMessage": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know." + }, + "outputAnchors": [ + { + "id": "promptRetriever_0-output-promptRetriever-PromptRetriever", + "name": "promptRetriever", + "label": "PromptRetriever", + "type": "PromptRetriever" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 197.46642699727397, + "y": 25.945621297410923 + }, + "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "multiPromptChain_0", + "position": { + "x": 1619.1305522575494, + "y": 210.28103293821243 + }, + "type": "customNode", + "data": { + "id": "multiPromptChain_0", + "label": "Multi Prompt Chain", + "name": "multiPromptChain", + "type": "MultiPromptChain", + "baseClasses": ["MultiPromptChain", "MultiRouteChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Chain automatically picks an appropriate prompt from multiple prompt templates", + "inputParams": [], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "multiPromptChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt Retriever", + "name": "promptRetriever", + "type": "PromptRetriever", + "list": true, + "id": "multiPromptChain_0-input-promptRetriever-PromptRetriever" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "promptRetriever": [ + "{{promptRetriever_0.data.instance}}", + "{{promptRetriever_2.data.instance}}", + "{{promptRetriever_1.data.instance}}" + ] + }, + "outputAnchors": [ + { + "id": "multiPromptChain_0-output-multiPromptChain-MultiPromptChain|MultiRouteChain|BaseChain|BaseLangChain", + "name": "multiPromptChain", + "label": "MultiPromptChain", + "type": "MultiPromptChain | MultiRouteChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1619.1305522575494, + "y": 210.28103293821243 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 632, + "id": "promptRetriever_1", + "position": { + "x": 539.1322780233141, + "y": -250.72967142925938 + }, + "type": "customNode", + "data": { + "id": "promptRetriever_1", + "label": "Prompt Retriever", + "name": "promptRetriever", + "type": "PromptRetriever", + "baseClasses": ["PromptRetriever"], + "category": "Retrievers", + "description": "Store prompt template with name & description to be later queried by MultiPromptChain", + "inputParams": [ + { + "label": "Prompt Name", + "name": "name", + "type": "string", + "placeholder": "physics-qa", + "id": "promptRetriever_1-input-name-string" + }, + { + "label": "Prompt Description", + "name": "description", + "type": "string", + "rows": 3, + "description": "Description of what the prompt does and when it should be used", + "placeholder": "Good for answering questions about physics", + "id": "promptRetriever_1-input-description-string" + }, + { + "label": "Prompt System Message", + "name": "systemMessage", + "type": "string", + "rows": 4, + "placeholder": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.", + "id": "promptRetriever_1-input-systemMessage-string" + } + ], + "inputAnchors": [], + "inputs": { + "name": "math", + "description": "Good for answering math questions", + "systemMessage": "You are a very good mathematician. You are great at answering math questions. You are so good because you are able to break down hard problems into their component parts, answer the component parts, and then put them together to answer the broader question." + }, + "outputAnchors": [ + { + "id": "promptRetriever_1-output-promptRetriever-PromptRetriever", + "name": "promptRetriever", + "label": "PromptRetriever", + "type": "PromptRetriever" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 539.1322780233141, + "y": -250.72967142925938 + }, + "dragging": false + }, + { + "width": 300, + "height": 632, + "id": "promptRetriever_2", + "position": { + "x": 872.6184534864304, + "y": -366.9443140594265 + }, + "type": "customNode", + "data": { + "id": "promptRetriever_2", + "label": "Prompt Retriever", + "name": "promptRetriever", + "type": "PromptRetriever", + "baseClasses": ["PromptRetriever"], + "category": "Retrievers", + "description": "Store prompt template with name & description to be later queried by MultiPromptChain", + "inputParams": [ + { + "label": "Prompt Name", + "name": "name", + "type": "string", + "placeholder": "physics-qa", + "id": "promptRetriever_2-input-name-string" + }, + { + "label": "Prompt Description", + "name": "description", + "type": "string", + "rows": 3, + "description": "Description of what the prompt does and when it should be used", + "placeholder": "Good for answering questions about physics", + "id": "promptRetriever_2-input-description-string" + }, + { + "label": "Prompt System Message", + "name": "systemMessage", + "type": "string", + "rows": 4, + "placeholder": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.", + "id": "promptRetriever_2-input-systemMessage-string" + } + ], + "inputAnchors": [], + "inputs": { + "name": "history", + "description": "Good for answering questions about history", + "systemMessage": "You are a very smart history professor. You are great at answering questions about history in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know." + }, + "outputAnchors": [ + { + "id": "promptRetriever_2-output-promptRetriever-PromptRetriever", + "name": "promptRetriever", + "label": "PromptRetriever", + "type": "PromptRetriever" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 872.6184534864304, + "y": -366.9443140594265 + }, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "chatOpenAI_0", + "position": { + "x": 1230.07368145571, + "y": -296.44522826934826 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1230.07368145571, + "y": -296.44522826934826 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "promptRetriever_0", + "sourceHandle": "promptRetriever_0-output-promptRetriever-PromptRetriever", + "target": "multiPromptChain_0", + "targetHandle": "multiPromptChain_0-input-promptRetriever-PromptRetriever", + "type": "buttonedge", + "id": "promptRetriever_0-promptRetriever_0-output-promptRetriever-PromptRetriever-multiPromptChain_0-multiPromptChain_0-input-promptRetriever-PromptRetriever", + "data": { + "label": "" + } + }, + { + "source": "promptRetriever_2", + "sourceHandle": "promptRetriever_2-output-promptRetriever-PromptRetriever", + "target": "multiPromptChain_0", + "targetHandle": "multiPromptChain_0-input-promptRetriever-PromptRetriever", + "type": "buttonedge", + "id": "promptRetriever_2-promptRetriever_2-output-promptRetriever-PromptRetriever-multiPromptChain_0-multiPromptChain_0-input-promptRetriever-PromptRetriever", + "data": { + "label": "" + } + }, + { + "source": "promptRetriever_1", + "sourceHandle": "promptRetriever_1-output-promptRetriever-PromptRetriever", + "target": "multiPromptChain_0", + "targetHandle": "multiPromptChain_0-input-promptRetriever-PromptRetriever", + "type": "buttonedge", + "id": "promptRetriever_1-promptRetriever_1-output-promptRetriever-PromptRetriever-multiPromptChain_0-multiPromptChain_0-input-promptRetriever-PromptRetriever", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "multiPromptChain_0", + "targetHandle": "multiPromptChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-multiPromptChain_0-multiPromptChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Multi Retrieval QA Chain.json b/packages/server/marketplaces/Multi Retrieval QA Chain.json new file mode 100644 index 0000000000000000000000000000000000000000..8f2ca89e65e890e73c7b3ef5327aeaa114375591 --- /dev/null +++ b/packages/server/marketplaces/Multi Retrieval QA Chain.json @@ -0,0 +1,906 @@ +{ + "description": "A chain that automatically picks an appropriate retriever from multiple different vector databases", + "nodes": [ + { + "width": 300, + "height": 504, + "id": "vectorStoreRetriever_0", + "position": { + "x": 712.9322670298264, + "y": 860.5462810572917 + }, + "type": "customNode", + "data": { + "id": "vectorStoreRetriever_0", + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "VectorStoreRetriever", + "baseClasses": ["VectorStoreRetriever"], + "category": "Retrievers", + "description": "Store vector store as retriever. Used with MultiRetrievalQAChain", + "inputParams": [ + { + "label": "Retriever Name", + "name": "name", + "type": "string", + "placeholder": "netflix movies", + "id": "vectorStoreRetriever_0-input-name-string" + }, + { + "label": "Retriever Description", + "name": "description", + "type": "string", + "rows": 3, + "description": "Description of when to use the vector store retriever", + "placeholder": "Good for answering questions about netflix movies", + "id": "vectorStoreRetriever_0-input-description-string" + } + ], + "inputAnchors": [ + { + "label": "Vector Store", + "name": "vectorStore", + "type": "VectorStore", + "id": "vectorStoreRetriever_0-input-vectorStore-VectorStore" + } + ], + "inputs": { + "vectorStore": "{{supabaseExistingIndex_0.data.instance}}", + "name": "aqua teen", + "description": "Good for answering questions about Aqua Teen Hunger Force theme song" + }, + "outputAnchors": [ + { + "id": "vectorStoreRetriever_0-output-vectorStoreRetriever-VectorStoreRetriever", + "name": "vectorStoreRetriever", + "label": "VectorStoreRetriever", + "type": "VectorStoreRetriever" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 712.9322670298264, + "y": 860.5462810572917 + }, + "dragging": false + }, + { + "width": 300, + "height": 279, + "id": "multiRetrievalQAChain_0", + "position": { + "x": 1563.0150452201099, + "y": 460.78375893303934 + }, + "type": "customNode", + "data": { + "id": "multiRetrievalQAChain_0", + "label": "Multi Retrieval QA Chain", + "name": "multiRetrievalQAChain", + "type": "MultiRetrievalQAChain", + "baseClasses": ["MultiRetrievalQAChain", "MultiRouteChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "QA Chain that automatically picks an appropriate vector store from multiple retrievers", + "inputParams": [], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "multiRetrievalQAChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "VectorStoreRetriever", + "list": true, + "id": "multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "vectorStoreRetriever": [ + "{{vectorStoreRetriever_0.data.instance}}", + "{{vectorStoreRetriever_1.data.instance}}", + "{{vectorStoreRetriever_2.data.instance}}" + ] + }, + "outputAnchors": [ + { + "id": "multiRetrievalQAChain_0-output-multiRetrievalQAChain-MultiRetrievalQAChain|MultiRouteChain|BaseChain|BaseLangChain", + "name": "multiRetrievalQAChain", + "label": "MultiRetrievalQAChain", + "type": "MultiRetrievalQAChain | MultiRouteChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1563.0150452201099, + "y": 460.78375893303934 + }, + "dragging": false + }, + { + "width": 300, + "height": 504, + "id": "vectorStoreRetriever_1", + "position": { + "x": 711.4902931206071, + "y": 315.2414600651632 + }, + "type": "customNode", + "data": { + "id": "vectorStoreRetriever_1", + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "VectorStoreRetriever", + "baseClasses": ["VectorStoreRetriever"], + "category": "Retrievers", + "description": "Store vector store as retriever. Used with MultiRetrievalQAChain", + "inputParams": [ + { + "label": "Retriever Name", + "name": "name", + "type": "string", + "placeholder": "netflix movies", + "id": "vectorStoreRetriever_1-input-name-string" + }, + { + "label": "Retriever Description", + "name": "description", + "type": "string", + "rows": 3, + "description": "Description of when to use the vector store retriever", + "placeholder": "Good for answering questions about netflix movies", + "id": "vectorStoreRetriever_1-input-description-string" + } + ], + "inputAnchors": [ + { + "label": "Vector Store", + "name": "vectorStore", + "type": "VectorStore", + "id": "vectorStoreRetriever_1-input-vectorStore-VectorStore" + } + ], + "inputs": { + "vectorStore": "{{chromaExistingIndex_0.data.instance}}", + "name": "mst3k", + "description": "Good for answering questions about Mystery Science Theater 3000 theme song" + }, + "outputAnchors": [ + { + "id": "vectorStoreRetriever_1-output-vectorStoreRetriever-VectorStoreRetriever", + "name": "vectorStoreRetriever", + "label": "VectorStoreRetriever", + "type": "VectorStoreRetriever" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 711.4902931206071, + "y": 315.2414600651632 + }, + "dragging": false + }, + { + "width": 300, + "height": 504, + "id": "vectorStoreRetriever_2", + "position": { + "x": 706.0716220151372, + "y": -217.51566869136752 + }, + "type": "customNode", + "data": { + "id": "vectorStoreRetriever_2", + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "VectorStoreRetriever", + "baseClasses": ["VectorStoreRetriever"], + "category": "Retrievers", + "description": "Store vector store as retriever. Used with MultiRetrievalQAChain", + "inputParams": [ + { + "label": "Retriever Name", + "name": "name", + "type": "string", + "placeholder": "netflix movies", + "id": "vectorStoreRetriever_2-input-name-string" + }, + { + "label": "Retriever Description", + "name": "description", + "type": "string", + "rows": 3, + "description": "Description of when to use the vector store retriever", + "placeholder": "Good for answering questions about netflix movies", + "id": "vectorStoreRetriever_2-input-description-string" + } + ], + "inputAnchors": [ + { + "label": "Vector Store", + "name": "vectorStore", + "type": "VectorStore", + "id": "vectorStoreRetriever_2-input-vectorStore-VectorStore" + } + ], + "inputs": { + "vectorStore": "{{pineconeExistingIndex_0.data.instance}}", + "name": "animaniacs", + "description": "Good for answering questions about Animaniacs theme song" + }, + "outputAnchors": [ + { + "id": "vectorStoreRetriever_2-output-vectorStoreRetriever-VectorStoreRetriever", + "name": "vectorStoreRetriever", + "label": "VectorStoreRetriever", + "type": "VectorStoreRetriever" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 706.0716220151372, + "y": -217.51566869136752 + }, + "dragging": false + }, + { + "width": 300, + "height": 523, + "id": "chatOpenAI_0", + "position": { + "x": 1206.027762600755, + "y": -212.35338654620222 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1206.027762600755, + "y": -212.35338654620222 + }, + "dragging": false + }, + { + "width": 300, + "height": 329, + "id": "openAIEmbeddings_0", + "position": { + "x": -254.88737984323413, + "y": 279.72801937636154 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_0", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_0-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": -254.88737984323413, + "y": 279.72801937636154 + }, + "dragging": false + }, + { + "width": 300, + "height": 603, + "id": "pineconeExistingIndex_0", + "position": { + "x": 271.2513182410521, + "y": -410.32709109501735 + }, + "type": "customNode", + "data": { + "id": "pineconeExistingIndex_0", + "label": "Pinecone Load Existing Index", + "name": "pineconeExistingIndex", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Load existing index from Pinecone (i.e: Document has been upserted)", + "inputParams": [ + { + "label": "Pinecone Api Key", + "name": "pineconeApiKey", + "type": "password", + "id": "pineconeExistingIndex_0-input-pineconeApiKey-password" + }, + { + "label": "Pinecone Environment", + "name": "pineconeEnv", + "type": "string", + "id": "pineconeExistingIndex_0-input-pineconeEnv-string" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pineconeExistingIndex_0-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "optional": true, + "additionalParams": true, + "id": "pineconeExistingIndex_0-input-pineconeNamespace-string" + }, + { + "label": "Pinecone Metadata Filter", + "name": "pineconeMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pineconeExistingIndex_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pineconeExistingIndex_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "pineconeEnv": "", + "pineconeIndex": "", + "pineconeNamespace": "", + "pineconeMetadataFilter": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "vectorStore" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 271.2513182410521, + "y": -410.32709109501735 + }, + "dragging": false + }, + { + "width": 300, + "height": 505, + "id": "chromaExistingIndex_0", + "position": { + "x": 269.2940530300552, + "y": 262.41814510537796 + }, + "type": "customNode", + "data": { + "id": "chromaExistingIndex_0", + "label": "Chroma Load Existing Index", + "name": "chromaExistingIndex", + "type": "Chroma", + "baseClasses": ["Chroma", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Load existing index from Chroma (i.e: Document has been upserted)", + "inputParams": [ + { + "label": "Collection Name", + "name": "collectionName", + "type": "string", + "id": "chromaExistingIndex_0-input-collectionName-string" + }, + { + "label": "Chroma URL", + "name": "chromaURL", + "type": "string", + "optional": true, + "id": "chromaExistingIndex_0-input-chromaURL-string" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "chromaExistingIndex_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "chromaExistingIndex_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "collectionName": "", + "chromaURL": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "chromaExistingIndex_0-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Chroma Retriever", + "type": "Chroma | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "chromaExistingIndex_0-output-vectorStore-Chroma|VectorStore", + "name": "vectorStore", + "label": "Chroma Vector Store", + "type": "Chroma | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "vectorStore" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 269.2940530300552, + "y": 262.41814510537796 + }, + "dragging": false + }, + { + "width": 300, + "height": 702, + "id": "supabaseExistingIndex_0", + "position": { + "x": 273.7097153973373, + "y": 821.872758974335 + }, + "type": "customNode", + "data": { + "id": "supabaseExistingIndex_0", + "label": "Supabase Load Existing Index", + "name": "supabaseExistingIndex", + "type": "Supabase", + "baseClasses": ["Supabase", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Load existing index from Supabase (i.e: Document has been upserted)", + "inputParams": [ + { + "label": "Supabase API Key", + "name": "supabaseApiKey", + "type": "password", + "id": "supabaseExistingIndex_0-input-supabaseApiKey-password" + }, + { + "label": "Supabase Project URL", + "name": "supabaseProjUrl", + "type": "string", + "id": "supabaseExistingIndex_0-input-supabaseProjUrl-string" + }, + { + "label": "Table Name", + "name": "tableName", + "type": "string", + "id": "supabaseExistingIndex_0-input-tableName-string" + }, + { + "label": "Query Name", + "name": "queryName", + "type": "string", + "id": "supabaseExistingIndex_0-input-queryName-string" + }, + { + "label": "Supabase Metadata Filter", + "name": "supabaseMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "supabaseExistingIndex_0-input-supabaseMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "supabaseExistingIndex_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "supabaseExistingIndex_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "supabaseProjUrl": "", + "tableName": "", + "queryName": "", + "supabaseMetadataFilter": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "supabaseExistingIndex_0-output-retriever-Supabase|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Supabase Retriever", + "type": "Supabase | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "supabaseExistingIndex_0-output-vectorStore-Supabase|VectorStore", + "name": "vectorStore", + "label": "Supabase Vector Store", + "type": "Supabase | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "vectorStore" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 273.7097153973373, + "y": 821.872758974335 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "vectorStoreRetriever_0", + "sourceHandle": "vectorStoreRetriever_0-output-vectorStoreRetriever-VectorStoreRetriever", + "target": "multiRetrievalQAChain_0", + "targetHandle": "multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", + "type": "buttonedge", + "id": "vectorStoreRetriever_0-vectorStoreRetriever_0-output-vectorStoreRetriever-VectorStoreRetriever-multiRetrievalQAChain_0-multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", + "data": { + "label": "" + } + }, + { + "source": "vectorStoreRetriever_1", + "sourceHandle": "vectorStoreRetriever_1-output-vectorStoreRetriever-VectorStoreRetriever", + "target": "multiRetrievalQAChain_0", + "targetHandle": "multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", + "type": "buttonedge", + "id": "vectorStoreRetriever_1-vectorStoreRetriever_1-output-vectorStoreRetriever-VectorStoreRetriever-multiRetrievalQAChain_0-multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", + "data": { + "label": "" + } + }, + { + "source": "vectorStoreRetriever_2", + "sourceHandle": "vectorStoreRetriever_2-output-vectorStoreRetriever-VectorStoreRetriever", + "target": "multiRetrievalQAChain_0", + "targetHandle": "multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", + "type": "buttonedge", + "id": "vectorStoreRetriever_2-vectorStoreRetriever_2-output-vectorStoreRetriever-VectorStoreRetriever-multiRetrievalQAChain_0-multiRetrievalQAChain_0-input-vectorStoreRetriever-VectorStoreRetriever", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "multiRetrievalQAChain_0", + "targetHandle": "multiRetrievalQAChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-multiRetrievalQAChain_0-multiRetrievalQAChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "pineconeExistingIndex_0", + "sourceHandle": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", + "target": "vectorStoreRetriever_2", + "targetHandle": "vectorStoreRetriever_2-input-vectorStore-VectorStore", + "type": "buttonedge", + "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore-vectorStoreRetriever_2-vectorStoreRetriever_2-input-vectorStore-VectorStore", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pineconeExistingIndex_0", + "targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "chromaExistingIndex_0", + "sourceHandle": "chromaExistingIndex_0-output-vectorStore-Chroma|VectorStore", + "target": "vectorStoreRetriever_1", + "targetHandle": "vectorStoreRetriever_1-input-vectorStore-VectorStore", + "type": "buttonedge", + "id": "chromaExistingIndex_0-chromaExistingIndex_0-output-vectorStore-Chroma|VectorStore-vectorStoreRetriever_1-vectorStoreRetriever_1-input-vectorStore-VectorStore", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "chromaExistingIndex_0", + "targetHandle": "chromaExistingIndex_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-chromaExistingIndex_0-chromaExistingIndex_0-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "supabaseExistingIndex_0", + "targetHandle": "supabaseExistingIndex_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-supabaseExistingIndex_0-supabaseExistingIndex_0-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "supabaseExistingIndex_0", + "sourceHandle": "supabaseExistingIndex_0-output-vectorStore-Supabase|VectorStore", + "target": "vectorStoreRetriever_0", + "targetHandle": "vectorStoreRetriever_0-input-vectorStore-VectorStore", + "type": "buttonedge", + "id": "supabaseExistingIndex_0-supabaseExistingIndex_0-output-vectorStore-Supabase|VectorStore-vectorStoreRetriever_0-vectorStoreRetriever_0-input-vectorStore-VectorStore", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Multiple VectorDB.json b/packages/server/marketplaces/Multiple VectorDB.json new file mode 100644 index 0000000000000000000000000000000000000000..05f7ca5e12a7de010244ad9e3bd3160dbc3316ab --- /dev/null +++ b/packages/server/marketplaces/Multiple VectorDB.json @@ -0,0 +1,1368 @@ +{ + "description": "Use the agent to choose between multiple different vector databases, with the ability to use other tools", + "nodes": [ + { + "width": 300, + "height": 329, + "id": "openAIEmbeddings_2", + "position": { + "x": 155.07832615625986, + "y": -778.383353751991 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_2", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_2-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_2-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_2-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_2-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_2-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_2-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 155.07832615625986, + "y": -778.383353751991 + }, + "dragging": false + }, + { + "width": 300, + "height": 505, + "id": "chromaExistingIndex_1", + "position": { + "x": 522.8177328694987, + "y": -723.8834555183237 + }, + "type": "customNode", + "data": { + "id": "chromaExistingIndex_1", + "label": "Chroma Load Existing Index", + "name": "chromaExistingIndex", + "type": "Chroma", + "baseClasses": ["Chroma", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Load existing index from Chroma (i.e: Document has been upserted)", + "inputParams": [ + { + "label": "Collection Name", + "name": "collectionName", + "type": "string", + "id": "chromaExistingIndex_1-input-collectionName-string" + }, + { + "label": "Chroma URL", + "name": "chromaURL", + "type": "string", + "optional": true, + "id": "chromaExistingIndex_1-input-chromaURL-string" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "chromaExistingIndex_1-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "chromaExistingIndex_1-input-embeddings-Embeddings" + } + ], + "inputs": { + "embeddings": "{{openAIEmbeddings_2.data.instance}}", + "collectionName": "ai-paper" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "chromaExistingIndex_1-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Chroma Retriever", + "type": "Chroma | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "chromaExistingIndex_1-output-vectorStore-Chroma|VectorStore", + "name": "vectorStore", + "label": "Chroma Vector Store", + "type": "Chroma | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "positionAbsolute": { + "x": 522.8177328694987, + "y": -723.8834555183237 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 523, + "id": "openAI_3", + "position": { + "x": 527.7101375911075, + "y": -1290.6752949922043 + }, + "type": "customNode", + "data": { + "id": "openAI_3", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_3-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_3-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_3-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 527.7101375911075, + "y": -1290.6752949922043 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 601, + "id": "chainTool_2", + "position": { + "x": 1251.240972921597, + "y": -922.9180420195128 + }, + "type": "customNode", + "data": { + "id": "chainTool_2", + "label": "Chain Tool", + "name": "chainTool", + "type": "ChainTool", + "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Use a chain as allowed tool for agent", + "inputParams": [ + { + "label": "Chain Name", + "name": "name", + "type": "string", + "placeholder": "state-of-union-qa", + "id": "chainTool_2-input-name-string" + }, + { + "label": "Chain Description", + "name": "description", + "type": "string", + "rows": 3, + "placeholder": "State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.", + "id": "chainTool_2-input-description-string" + }, + { + "label": "Return Direct", + "name": "returnDirect", + "type": "boolean", + "optional": true, + "id": "chainTool_2-input-returnDirect-boolean" + } + ], + "inputAnchors": [ + { + "label": "Base Chain", + "name": "baseChain", + "type": "BaseChain", + "id": "chainTool_2-input-baseChain-BaseChain" + } + ], + "inputs": { + "name": "ai-paper-qa", + "description": "AI Paper QA - useful for when you need to ask questions about the AI-Generated Content paper.", + "returnDirect": "", + "baseChain": "{{retrievalQAChain_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", + "name": "chainTool", + "label": "ChainTool", + "type": "ChainTool | DynamicTool | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1251.240972921597, + "y": -922.9180420195128 + }, + "dragging": false + }, + { + "width": 300, + "height": 142, + "id": "calculator_1", + "position": { + "x": 1649.5389102641816, + "y": -835.8729983638877 + }, + "type": "customNode", + "data": { + "id": "calculator_1", + "label": "Calculator", + "name": "calculator", + "type": "Calculator", + "baseClasses": ["Calculator", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Perform calculations on response", + "inputParams": [], + "inputAnchors": [], + "inputs": {}, + "outputAnchors": [ + { + "id": "calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain", + "name": "calculator", + "label": "Calculator", + "type": "Calculator | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1649.5389102641816, + "y": -835.8729983638877 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 277, + "id": "serpAPI_0", + "position": { + "x": 1654.5273488033688, + "y": -622.1607096176143 + }, + "type": "customNode", + "data": { + "id": "serpAPI_0", + "label": "Serp API", + "name": "serpAPI", + "type": "SerpAPI", + "baseClasses": ["SerpAPI", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Wrapper around SerpAPI - a real-time API to access Google search results", + "inputParams": [ + { + "label": "Serp Api Key", + "name": "apiKey", + "type": "password", + "id": "serpAPI_0-input-apiKey-password" + } + ], + "inputAnchors": [], + "inputs": {}, + "outputAnchors": [ + { + "id": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain", + "name": "serpAPI", + "label": "SerpAPI", + "type": "SerpAPI | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1654.5273488033688, + "y": -622.1607096176143 + }, + "dragging": false + }, + { + "width": 300, + "height": 329, + "id": "openAIEmbeddings_3", + "position": { + "x": 163.902196956619, + "y": 318.66096921035574 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_3", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_3-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_3-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_3-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_3-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_3-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_3-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 163.902196956619, + "y": 318.66096921035574 + }, + "dragging": false + }, + { + "width": 300, + "height": 523, + "id": "openAI_4", + "position": { + "x": 529.8870809493459, + "y": -137.8839994127831 + }, + "type": "customNode", + "data": { + "id": "openAI_4", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_4-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_4-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_4-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_4-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_4-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_4-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_4-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_4-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_4-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_4-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_4-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_4-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 529.8870809493459, + "y": -137.8839994127831 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 603, + "id": "pineconeExistingIndex_1", + "position": { + "x": 525.6644489497978, + "y": 420.1233379157454 + }, + "type": "customNode", + "data": { + "id": "pineconeExistingIndex_1", + "label": "Pinecone Load Existing Index", + "name": "pineconeExistingIndex", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Load existing index from Pinecone (i.e: Document has been upserted)", + "inputParams": [ + { + "label": "Pinecone Api Key", + "name": "pineconeApiKey", + "type": "password", + "id": "pineconeExistingIndex_1-input-pineconeApiKey-password" + }, + { + "label": "Pinecone Environment", + "name": "pineconeEnv", + "type": "string", + "id": "pineconeExistingIndex_1-input-pineconeEnv-string" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pineconeExistingIndex_1-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "optional": true, + "additionalParams": true, + "id": "pineconeExistingIndex_1-input-pineconeNamespace-string" + }, + { + "label": "Pinecone Metadata Filter", + "name": "pineconeMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "pineconeExistingIndex_1-input-pineconeMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pineconeExistingIndex_1-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pineconeExistingIndex_1-input-embeddings-Embeddings" + } + ], + "inputs": { + "embeddings": "{{openAIEmbeddings_3.data.instance}}", + "pineconeEnv": "us-west4-gcp", + "pineconeIndex": "state-of-union", + "pineconeNamespace": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pineconeExistingIndex_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pineconeExistingIndex_1-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 525.6644489497978, + "y": 420.1233379157454 + } + }, + { + "width": 300, + "height": 601, + "id": "chainTool_3", + "position": { + "x": 1267.7142132085273, + "y": -85.7749282485849 + }, + "type": "customNode", + "data": { + "id": "chainTool_3", + "label": "Chain Tool", + "name": "chainTool", + "type": "ChainTool", + "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Use a chain as allowed tool for agent", + "inputParams": [ + { + "label": "Chain Name", + "name": "name", + "type": "string", + "placeholder": "state-of-union-qa", + "id": "chainTool_3-input-name-string" + }, + { + "label": "Chain Description", + "name": "description", + "type": "string", + "rows": 3, + "placeholder": "State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.", + "id": "chainTool_3-input-description-string" + }, + { + "label": "Return Direct", + "name": "returnDirect", + "type": "boolean", + "optional": true, + "id": "chainTool_3-input-returnDirect-boolean" + } + ], + "inputAnchors": [ + { + "label": "Base Chain", + "name": "baseChain", + "type": "BaseChain", + "id": "chainTool_3-input-baseChain-BaseChain" + } + ], + "inputs": { + "name": "state-of-union-qa", + "description": "State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.", + "returnDirect": "", + "baseChain": "{{retrievalQAChain_1.data.instance}}" + }, + "outputAnchors": [ + { + "id": "chainTool_3-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", + "name": "chainTool", + "label": "ChainTool", + "type": "ChainTool | DynamicTool | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 1267.7142132085273, + "y": -85.7749282485849 + } + }, + { + "width": 300, + "height": 523, + "id": "openAI_5", + "position": { + "x": 1683.95439713088, + "y": 329.0556949149878 + }, + "type": "customNode", + "data": { + "id": "openAI_5", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_5-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_5-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_5-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_5-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_5-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_5-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_5-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_5-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_5-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_5-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_5-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": "0", + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_5-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1683.95439713088, + "y": 329.0556949149878 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 279, + "id": "mrklAgentLLM_0", + "position": { + "x": 2061.891333395338, + "y": -140.0694021759809 + }, + "type": "customNode", + "data": { + "id": "mrklAgentLLM_0", + "label": "MRKL Agent for LLMs", + "name": "mrklAgentLLM", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], + "category": "Agents", + "description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs", + "inputParams": [], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "mrklAgentLLM_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "mrklAgentLLM_0-input-model-BaseLanguageModel" + } + ], + "inputs": { + "tools": [ + "{{serpAPI_0.data.instance}}", + "{{calculator_1.data.instance}}", + "{{chainTool_2.data.instance}}", + "{{chainTool_3.data.instance}}" + ], + "model": "{{openAI_5.data.instance}}" + }, + "outputAnchors": [ + { + "id": "mrklAgentLLM_0-output-mrklAgentLLM-AgentExecutor|BaseChain|BaseLangChain", + "name": "mrklAgentLLM", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 2061.891333395338, + "y": -140.0694021759809 + }, + "dragging": false + }, + { + "width": 300, + "height": 279, + "id": "retrievalQAChain_0", + "position": { + "x": 898.1253096948574, + "y": -859.1174013418433 + }, + "type": "customNode", + "data": { + "id": "retrievalQAChain_0", + "label": "Retrieval QA Chain", + "name": "retrievalQAChain", + "type": "RetrievalQAChain", + "baseClasses": ["RetrievalQAChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "QA chain to answer a question based on the retrieved documents", + "inputParams": [], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "retrievalQAChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "BaseRetriever", + "id": "retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever" + } + ], + "inputs": { + "model": "{{openAI_3.data.instance}}", + "vectorStoreRetriever": "{{chromaExistingIndex_1.data.instance}}" + }, + "outputAnchors": [ + { + "id": "retrievalQAChain_0-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain", + "name": "retrievalQAChain", + "label": "RetrievalQAChain", + "type": "RetrievalQAChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 898.1253096948574, + "y": -859.1174013418433 + }, + "dragging": false + }, + { + "width": 300, + "height": 279, + "id": "retrievalQAChain_1", + "position": { + "x": 895.4349543765911, + "y": 166.60331503487222 + }, + "type": "customNode", + "data": { + "id": "retrievalQAChain_1", + "label": "Retrieval QA Chain", + "name": "retrievalQAChain", + "type": "RetrievalQAChain", + "baseClasses": ["RetrievalQAChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "QA chain to answer a question based on the retrieved documents", + "inputParams": [], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "retrievalQAChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Vector Store Retriever", + "name": "vectorStoreRetriever", + "type": "BaseRetriever", + "id": "retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever" + } + ], + "inputs": { + "model": "{{openAI_4.data.instance}}", + "vectorStoreRetriever": "{{pineconeExistingIndex_1.data.instance}}" + }, + "outputAnchors": [ + { + "id": "retrievalQAChain_1-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain", + "name": "retrievalQAChain", + "label": "RetrievalQAChain", + "type": "RetrievalQAChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 895.4349543765911, + "y": 166.60331503487222 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "openAIEmbeddings_2", + "sourceHandle": "openAIEmbeddings_2-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "chromaExistingIndex_1", + "targetHandle": "chromaExistingIndex_1-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_2-openAIEmbeddings_2-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-chromaExistingIndex_1-chromaExistingIndex_1-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_3", + "sourceHandle": "openAIEmbeddings_3-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pineconeExistingIndex_1", + "targetHandle": "pineconeExistingIndex_1-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_3-openAIEmbeddings_3-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_1-pineconeExistingIndex_1-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "serpAPI_0", + "sourceHandle": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain", + "target": "mrklAgentLLM_0", + "targetHandle": "mrklAgentLLM_0-input-tools-Tool", + "type": "buttonedge", + "id": "serpAPI_0-serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "calculator_1", + "sourceHandle": "calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain", + "target": "mrklAgentLLM_0", + "targetHandle": "mrklAgentLLM_0-input-tools-Tool", + "type": "buttonedge", + "id": "calculator_1-calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "chainTool_2", + "sourceHandle": "chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", + "target": "mrklAgentLLM_0", + "targetHandle": "mrklAgentLLM_0-input-tools-Tool", + "type": "buttonedge", + "id": "chainTool_2-chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "chainTool_3", + "sourceHandle": "chainTool_3-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", + "target": "mrklAgentLLM_0", + "targetHandle": "mrklAgentLLM_0-input-tools-Tool", + "type": "buttonedge", + "id": "chainTool_3-chainTool_3-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "openAI_5", + "sourceHandle": "openAI_5-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "mrklAgentLLM_0", + "targetHandle": "mrklAgentLLM_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_5-openAI_5-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "openAI_3", + "sourceHandle": "openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "retrievalQAChain_0", + "targetHandle": "retrievalQAChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_3-openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-retrievalQAChain_0-retrievalQAChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "chromaExistingIndex_1", + "sourceHandle": "chromaExistingIndex_1-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever", + "target": "retrievalQAChain_0", + "targetHandle": "retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "type": "buttonedge", + "id": "chromaExistingIndex_1-chromaExistingIndex_1-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever-retrievalQAChain_0-retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } + }, + { + "source": "retrievalQAChain_0", + "sourceHandle": "retrievalQAChain_0-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain", + "target": "chainTool_2", + "targetHandle": "chainTool_2-input-baseChain-BaseChain", + "type": "buttonedge", + "id": "retrievalQAChain_0-retrievalQAChain_0-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain-chainTool_2-chainTool_2-input-baseChain-BaseChain", + "data": { + "label": "" + } + }, + { + "source": "openAI_4", + "sourceHandle": "openAI_4-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "retrievalQAChain_1", + "targetHandle": "retrievalQAChain_1-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_4-openAI_4-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-retrievalQAChain_1-retrievalQAChain_1-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "pineconeExistingIndex_1", + "sourceHandle": "pineconeExistingIndex_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "target": "retrievalQAChain_1", + "targetHandle": "retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever", + "type": "buttonedge", + "id": "pineconeExistingIndex_1-pineconeExistingIndex_1-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-retrievalQAChain_1-retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } + }, + { + "source": "retrievalQAChain_1", + "sourceHandle": "retrievalQAChain_1-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain", + "target": "chainTool_3", + "targetHandle": "chainTool_3-input-baseChain-BaseChain", + "type": "buttonedge", + "id": "retrievalQAChain_1-retrievalQAChain_1-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain-chainTool_3-chainTool_3-input-baseChain-BaseChain", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Prompt Chaining.json b/packages/server/marketplaces/Prompt Chaining.json new file mode 100644 index 0000000000000000000000000000000000000000..33a64081aec28039d9475fdd41a77bccd334ddf8 --- /dev/null +++ b/packages/server/marketplaces/Prompt Chaining.json @@ -0,0 +1,650 @@ +{ + "description": "Use output from a chain as prompt for another chain", + "nodes": [ + { + "width": 300, + "height": 526, + "id": "openAI_2", + "position": { + "x": 793.6674026500068, + "y": -20.826430802683774 + }, + "type": "customNode", + "data": { + "id": "openAI_2", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_2-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_2-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_2-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_2-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 793.6674026500068, + "y": -20.826430802683774 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 534, + "id": "promptTemplate_2", + "position": { + "x": 796.3399644963663, + "y": 512.349657546027 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_2", + "label": "Prompt Template", + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_2-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "string", + "rows": 4, + "placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_2-input-promptValues-string" + } + ], + "inputAnchors": [], + "inputs": { + "template": "You are an AI who performs one task based on the following objective: {objective}.\nRespond with how you would complete this task:", + "promptValues": "{\n \"objective\": \"{{question}}\"\n}" + }, + "outputAnchors": [ + { + "id": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 796.3399644963663, + "y": 512.349657546027 + }, + "dragging": false + }, + { + "width": 300, + "height": 407, + "id": "llmChain_2", + "position": { + "x": 1225.2861408370582, + "y": 485.62403908243243 + }, + "type": "customNode", + "data": { + "id": "llmChain_2", + "label": "LLM Chain", + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_2-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_2-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_2-input-prompt-BasePromptTemplate" + } + ], + "inputs": { + "model": "{{openAI_2.data.instance}}", + "prompt": "{{promptTemplate_2.data.instance}}", + "chainName": "First Chain" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_2-output-llmChain-LLMChain|BaseChain|BaseLangChain", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | BaseLangChain" + }, + { + "id": "llmChain_2-output-outputPrediction-string", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "outputPrediction" + }, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 1225.2861408370582, + "y": 485.62403908243243 + } + }, + { + "width": 300, + "height": 534, + "id": "promptTemplate_3", + "position": { + "x": 1589.206555911206, + "y": 460.23470154201766 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_3", + "label": "Prompt Template", + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_3-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "string", + "rows": 4, + "placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_3-input-promptValues-string" + } + ], + "inputAnchors": [], + "inputs": { + "template": "You are a task creation AI that uses the result of an execution agent to create new tasks with the following objective: {objective}.\nThe last completed task has the result: {result}.\nBased on the result, create new tasks to be completed by the AI system that do not overlap with result.\nReturn the tasks as an array.", + "promptValues": "{\n \"objective\": \"{{question}}\",\n \"result\": \"\"\n}" + }, + "outputAnchors": [ + { + "id": "promptTemplate_3-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1589.206555911206, + "y": 460.23470154201766 + }, + "dragging": false + }, + { + "width": 300, + "height": 526, + "id": "openAI_3", + "position": { + "x": 1225.2861408370586, + "y": -62.7856517905272 + }, + "type": "customNode", + "data": { + "id": "openAI_3", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_3-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_3-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_3-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_3-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1225.2861408370586, + "y": -62.7856517905272 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 407, + "id": "llmChain_3", + "position": { + "x": 1972.2671768945252, + "y": 142.73435419451476 + }, + "type": "customNode", + "data": { + "id": "llmChain_3", + "label": "LLM Chain", + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_3-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_3-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_3-input-prompt-BasePromptTemplate" + } + ], + "inputs": { + "model": "{{openAI_3.data.instance}}", + "prompt": "{{promptTemplate_3.data.instance}}", + "chainName": "LastChain" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_3-output-llmChain-LLMChain|BaseChain|BaseLangChain", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | BaseLangChain" + }, + { + "id": "llmChain_3-output-outputPrediction-string", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 1972.2671768945252, + "y": 142.73435419451476 + } + } + ], + "edges": [ + { + "source": "llmChain_2", + "sourceHandle": "llmChain_2-output-outputPrediction-string", + "target": "promptTemplate_3", + "targetHandle": "promptTemplate_3-input-promptValues-string", + "type": "buttonedge", + "id": "llmChain_2-llmChain_2-output-outputPrediction-string-promptTemplate_3-promptTemplate_3-input-promptValues-string", + "data": { + "label": "" + } + }, + { + "source": "openAI_2", + "sourceHandle": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "llmChain_2", + "targetHandle": "llmChain_2-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_2-openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-llmChain_2-llmChain_2-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "promptTemplate_2", + "sourceHandle": "promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_2", + "targetHandle": "llmChain_2-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_2-promptTemplate_2-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_2-llmChain_2-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "openAI_3", + "sourceHandle": "openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "llmChain_3", + "targetHandle": "llmChain_3-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_3-openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-llmChain_3-llmChain_3-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "promptTemplate_3", + "sourceHandle": "promptTemplate_3-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_3", + "targetHandle": "llmChain_3-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_3-promptTemplate_3-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_3-llmChain_3-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/SQL DB Chain.json b/packages/server/marketplaces/SQL DB Chain.json new file mode 100644 index 0000000000000000000000000000000000000000..e7826aa2bba67e21f0b4bc966a350bae6a0a5381 --- /dev/null +++ b/packages/server/marketplaces/SQL DB Chain.json @@ -0,0 +1,241 @@ +{ + "description": "Answer questions over a SQL database", + "nodes": [ + { + "width": 300, + "height": 524, + "id": "openAI_1", + "position": { + "x": 835.4668837832456, + "y": 182.4724119898708 + }, + "type": "customNode", + "data": { + "id": "openAI_1", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 835.4668837832456, + "y": 182.4724119898708 + }, + "dragging": false + }, + { + "width": 300, + "height": 424, + "id": "sqlDatabaseChain_0", + "position": { + "x": 1255.0372022176111, + "y": 217.507437391498 + }, + "type": "customNode", + "data": { + "id": "sqlDatabaseChain_0", + "label": "Sql Database Chain", + "name": "sqlDatabaseChain", + "type": "SqlDatabaseChain", + "baseClasses": ["SqlDatabaseChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Answer questions over a SQL database", + "inputParams": [ + { + "label": "Database", + "name": "database", + "type": "options", + "options": [ + { + "label": "SQlite", + "name": "sqlite" + } + ], + "default": "sqlite", + "id": "sqlDatabaseChain_0-input-database-options" + }, + { + "label": "Database File Path", + "name": "dbFilePath", + "type": "string", + "placeholder": "C:/Users/chinook.db", + "id": "sqlDatabaseChain_0-input-dbFilePath-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "sqlDatabaseChain_0-input-model-BaseLanguageModel" + } + ], + "inputs": { + "model": "{{openAI_1.data.instance}}", + "database": "sqlite", + "dbFilePath": "" + }, + "outputAnchors": [ + { + "id": "sqlDatabaseChain_0-output-sqlDatabaseChain-SqlDatabaseChain|BaseChain|BaseLangChain", + "name": "sqlDatabaseChain", + "label": "SqlDatabaseChain", + "type": "SqlDatabaseChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1255.0372022176111, + "y": 217.507437391498 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "openAI_1", + "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "sqlDatabaseChain_0", + "targetHandle": "sqlDatabaseChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-sqlDatabaseChain_0-sqlDatabaseChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Simple Conversation Chain.json b/packages/server/marketplaces/Simple Conversation Chain.json new file mode 100644 index 0000000000000000000000000000000000000000..f3decc853ba93fe857003505699b69abb3a04cfa --- /dev/null +++ b/packages/server/marketplaces/Simple Conversation Chain.json @@ -0,0 +1,290 @@ +{ + "description": "Basic example of Conversation Chain with built-in memory - works exactly like ChatGPT", + "nodes": [ + { + "width": 300, + "height": 524, + "id": "chatOpenAI_0", + "position": { + "x": 750.6529856117049, + "y": -75.72544375812092 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 750.6529856117049, + "y": -75.72544375812092 + }, + "selected": false, + "dragging": false + }, + { + "width": 300, + "height": 376, + "id": "bufferMemory_0", + "position": { + "x": 753.4300788823234, + "y": 479.5336426526603 + }, + "type": "customNode", + "data": { + "id": "bufferMemory_0", + "label": "Buffer Memory", + "name": "bufferMemory", + "type": "BufferMemory", + "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], + "category": "Memory", + "description": "Remembers previous conversational back and forths directly", + "inputParams": [ + { + "label": "Memory Key", + "name": "memoryKey", + "type": "string", + "default": "chat_history", + "id": "bufferMemory_0-input-memoryKey-string" + }, + { + "label": "Input Key", + "name": "inputKey", + "type": "string", + "default": "input", + "id": "bufferMemory_0-input-inputKey-string" + } + ], + "inputAnchors": [], + "inputs": { + "memoryKey": "chat_history", + "inputKey": "input" + }, + "outputAnchors": [ + { + "id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "name": "bufferMemory", + "label": "BufferMemory", + "type": "BufferMemory | BaseChatMemory | BaseMemory" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 753.4300788823234, + "y": 479.5336426526603 + }, + "dragging": false + }, + { + "width": 300, + "height": 332, + "id": "conversationChain_0", + "position": { + "x": 1201.6630991237407, + "y": 291.86981791303066 + }, + "type": "customNode", + "data": { + "id": "conversationChain_0", + "label": "Conversation Chain", + "name": "conversationChain", + "type": "ConversationChain", + "baseClasses": ["ConversationChain", "LLMChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Chat models specific conversational chain with memory", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessagePrompt", + "type": "string", + "rows": 4, + "additionalParams": true, + "optional": true, + "placeholder": "You are a helpful assistant that write codes", + "id": "conversationChain_0-input-systemMessagePrompt-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseChatModel", + "id": "conversationChain_0-input-model-BaseChatModel" + }, + { + "label": "Memory", + "name": "memory", + "type": "BaseMemory", + "id": "conversationChain_0-input-memory-BaseMemory" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "memory": "{{bufferMemory_0.data.instance}}", + "systemMessagePrompt": "" + }, + "outputAnchors": [ + { + "id": "conversationChain_0-output-conversationChain-ConversationChain|LLMChain|BaseChain|BaseLangChain", + "name": "conversationChain", + "label": "ConversationChain", + "type": "ConversationChain | LLMChain | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1201.6630991237407, + "y": 291.86981791303066 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "conversationChain_0", + "targetHandle": "conversationChain_0-input-model-BaseChatModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationChain_0-conversationChain_0-input-model-BaseChatModel", + "data": { + "label": "" + } + }, + { + "source": "bufferMemory_0", + "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "target": "conversationChain_0", + "targetHandle": "conversationChain_0-input-memory-BaseMemory", + "type": "buttonedge", + "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationChain_0-conversationChain_0-input-memory-BaseMemory", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Simple LLM Chain.json b/packages/server/marketplaces/Simple LLM Chain.json new file mode 100644 index 0000000000000000000000000000000000000000..c9d354bc067b098c4bc3b336a3aa405b5b44b1bd --- /dev/null +++ b/packages/server/marketplaces/Simple LLM Chain.json @@ -0,0 +1,323 @@ +{ + "description": "Basic example of stateless (no memory) LLM Chain with a Prompt Template and LLM Model", + "nodes": [ + { + "width": 300, + "height": 526, + "id": "openAI_1", + "position": { + "x": 510.75932526856377, + "y": -44.80152395958956 + }, + "type": "customNode", + "data": { + "id": "openAI_1", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 510.75932526856377, + "y": -44.80152395958956 + }, + "dragging": false + }, + { + "width": 300, + "height": 534, + "id": "promptTemplate_1", + "position": { + "x": 514.5434056794296, + "y": 507.47798128037107 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_1", + "label": "Prompt Template", + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_1-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "string", + "rows": 4, + "placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_1-input-promptValues-string" + } + ], + "inputAnchors": [], + "inputs": { + "template": "", + "promptValues": "" + }, + "outputAnchors": [ + { + "id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 514.5434056794296, + "y": 507.47798128037107 + }, + "dragging": false + }, + { + "width": 300, + "height": 407, + "id": "llmChain_1", + "position": { + "x": 970.9254258940236, + "y": 320.56761595884564 + }, + "type": "customNode", + "data": { + "id": "llmChain_1", + "label": "LLM Chain", + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_1-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_1-input-prompt-BasePromptTemplate" + } + ], + "inputs": { + "model": "{{openAI_1.data.instance}}", + "prompt": "{{promptTemplate_1.data.instance}}", + "chainName": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|BaseLangChain", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | BaseLangChain" + }, + { + "id": "llmChain_1-output-outputPrediction-string", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "positionAbsolute": { + "x": 970.9254258940236, + "y": 320.56761595884564 + }, + "selected": false, + "dragging": false + } + ], + "edges": [ + { + "source": "openAI_1", + "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "promptTemplate_1", + "sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Translator.json b/packages/server/marketplaces/Translator.json new file mode 100644 index 0000000000000000000000000000000000000000..6e36f943593bb931f9de5ed878d3569ab5d36788 --- /dev/null +++ b/packages/server/marketplaces/Translator.json @@ -0,0 +1,318 @@ +{ + "description": "Language translation using LLM Chain with a Chat Prompt Template and Chat Model", + "nodes": [ + { + "width": 300, + "height": 711, + "id": "chatPromptTemplate_1", + "position": { + "x": 441.8516979620723, + "y": 636.1108860994266 + }, + "type": "customNode", + "data": { + "id": "chatPromptTemplate_1", + "label": "Chat Prompt Template", + "name": "chatPromptTemplate", + "type": "ChatPromptTemplate", + "baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a chat prompt", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessagePrompt", + "type": "string", + "rows": 4, + "placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.", + "id": "chatPromptTemplate_1-input-systemMessagePrompt-string" + }, + { + "label": "Human Message", + "name": "humanMessagePrompt", + "type": "string", + "rows": 4, + "placeholder": "{text}", + "id": "chatPromptTemplate_1-input-humanMessagePrompt-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "string", + "rows": 4, + "placeholder": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "chatPromptTemplate_1-input-promptValues-string" + } + ], + "inputAnchors": [], + "inputs": { + "systemMessagePrompt": "You are a helpful assistant that translates {input_language} to {output_language}.", + "humanMessagePrompt": "{input}", + "promptValues": "{\n \"input_language\": \"English\",\n \"output_language\": \"French\"\n}" + }, + "outputAnchors": [ + { + "id": "chatPromptTemplate_1-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate", + "name": "chatPromptTemplate", + "label": "ChatPromptTemplate", + "type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 441.8516979620723, + "y": 636.1108860994266 + }, + "dragging": false + }, + { + "width": 300, + "height": 526, + "id": "chatOpenAI_1", + "position": { + "x": 439.5219561593599, + "y": 93.61600226758335 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_1", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 439.5219561593599, + "y": 93.61600226758335 + }, + "dragging": false + }, + { + "width": 300, + "height": 407, + "id": "llmChain_1", + "position": { + "x": 865.7775572410412, + "y": 543.9211372857111 + }, + "type": "customNode", + "data": { + "id": "llmChain_1", + "label": "LLM Chain", + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_1-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_1-input-prompt-BasePromptTemplate" + } + ], + "inputs": { + "model": "{{chatOpenAI_1.data.instance}}", + "prompt": "{{chatPromptTemplate_1.data.instance}}", + "chainName": "Language Translation" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|BaseLangChain", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | BaseLangChain" + }, + { + "id": "llmChain_1-output-outputPrediction-string", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 865.7775572410412, + "y": 543.9211372857111 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "chatPromptTemplate_1", + "sourceHandle": "chatPromptTemplate_1-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "chatPromptTemplate_1-chatPromptTemplate_1-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/WebBrowser.json b/packages/server/marketplaces/WebBrowser.json new file mode 100644 index 0000000000000000000000000000000000000000..7d7ff35785008b37622d6dd2c2d3727a135da1b1 --- /dev/null +++ b/packages/server/marketplaces/WebBrowser.json @@ -0,0 +1,614 @@ +{ + "description": "Conversational Agent with ability to visit a website and extract information", + "nodes": [ + { + "width": 300, + "height": 524, + "id": "chatOpenAI_0", + "position": { + "x": 348.0817836845733, + "y": -86.56099395751443 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 348.0817836845733, + "y": -86.56099395751443 + }, + "dragging": false + }, + { + "width": 300, + "height": 376, + "id": "bufferMemory_0", + "position": { + "x": 15.045898260926037, + "y": 114.13407401971622 + }, + "type": "customNode", + "data": { + "id": "bufferMemory_0", + "label": "Buffer Memory", + "name": "bufferMemory", + "type": "BufferMemory", + "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], + "category": "Memory", + "description": "Remembers previous conversational back and forths directly", + "inputParams": [ + { + "label": "Memory Key", + "name": "memoryKey", + "type": "string", + "default": "chat_history", + "id": "bufferMemory_0-input-memoryKey-string" + }, + { + "label": "Input Key", + "name": "inputKey", + "type": "string", + "default": "input", + "id": "bufferMemory_0-input-inputKey-string" + } + ], + "inputAnchors": [], + "inputs": { + "memoryKey": "chat_history", + "inputKey": "input" + }, + "outputAnchors": [ + { + "id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "name": "bufferMemory", + "label": "BufferMemory", + "type": "BufferMemory | BaseChatMemory | BaseMemory" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 15.045898260926037, + "y": 114.13407401971622 + }, + "dragging": false + }, + { + "width": 300, + "height": 330, + "id": "openAIEmbeddings_0", + "position": { + "x": 693.9266260641734, + "y": 37.098856540087496 + }, + "type": "customNode", + "data": { + "id": "openAIEmbeddings_0", + "label": "OpenAI Embeddings", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAIEmbeddings_0-input-openAIApiKey-password" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-stripNewLines-boolean" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "stripNewLines": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 693.9266260641734, + "y": 37.098856540087496 + }, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "chatOpenAI_1", + "position": { + "x": 691.5132411896494, + "y": -533.1696369549378 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_1", + "label": "ChatOpenAI", + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "chatOpenAI_1-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0314", + "name": "gpt-4-0314" + }, + { + "label": "gpt-4-32k-0314", + "name": "gpt-4-32k-0314" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0301", + "name": "gpt-3.5-turbo-0301" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_1-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.9, + "optional": true, + "id": "chatOpenAI_1-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 691.5132411896494, + "y": -533.1696369549378 + }, + "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "webBrowser_0", + "position": { + "x": 1091.0866823400172, + "y": -16.43806989958216 + }, + "type": "customNode", + "data": { + "id": "webBrowser_0", + "label": "Web Browser", + "name": "webBrowser", + "type": "WebBrowser", + "baseClasses": ["WebBrowser", "Tool", "StructuredTool", "BaseLangChain"], + "category": "Tools", + "description": "Gives agent the ability to visit a website and extract information", + "inputParams": [], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "webBrowser_0-input-model-BaseLanguageModel" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "webBrowser_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "model": "{{chatOpenAI_1.data.instance}}", + "embeddings": "{{openAIEmbeddings_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "webBrowser_0-output-webBrowser-WebBrowser|Tool|StructuredTool|BaseLangChain", + "name": "webBrowser", + "label": "WebBrowser", + "type": "WebBrowser | Tool | StructuredTool | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1091.0866823400172, + "y": -16.43806989958216 + }, + "dragging": false + }, + { + "width": 300, + "height": 383, + "id": "conversationalAgent_0", + "position": { + "x": 1451.6222493253506, + "y": 239.69137914100338 + }, + "type": "customNode", + "data": { + "id": "conversationalAgent_0", + "label": "Conversational Agent", + "name": "conversationalAgent", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], + "category": "Agents", + "description": "Conversational agent for a chat model. It will utilize chat specific prompts", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessage", + "type": "string", + "rows": 4, + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-systemMessage-string" + }, + { + "label": "Human Message", + "name": "humanMessage", + "type": "string", + "rows": 4, + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-humanMessage-string" + } + ], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "conversationalAgent_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "conversationalAgent_0-input-model-BaseLanguageModel" + }, + { + "label": "Memory", + "name": "memory", + "type": "BaseChatMemory", + "id": "conversationalAgent_0-input-memory-BaseChatMemory" + } + ], + "inputs": { + "tools": ["{{webBrowser_0.data.instance}}"], + "model": "{{chatOpenAI_0.data.instance}}", + "memory": "{{bufferMemory_0.data.instance}}", + "systemMessage": "", + "humanMessage": "" + }, + "outputAnchors": [ + { + "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|BaseLangChain", + "name": "conversationalAgent", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1451.6222493253506, + "y": 239.69137914100338 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "webBrowser_0", + "targetHandle": "webBrowser_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-webBrowser_0-webBrowser_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "webBrowser_0", + "targetHandle": "webBrowser_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-webBrowser_0-webBrowser_0-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "webBrowser_0", + "sourceHandle": "webBrowser_0-output-webBrowser-WebBrowser|Tool|StructuredTool|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-tools-Tool", + "type": "buttonedge", + "id": "webBrowser_0-webBrowser_0-output-webBrowser-WebBrowser|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "bufferMemory_0", + "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory", + "type": "buttonedge", + "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/Zapier NLA.json b/packages/server/marketplaces/Zapier NLA.json new file mode 100644 index 0000000000000000000000000000000000000000..19b30107f4df24144962925a0b7a9c8b2a4a6ea3 --- /dev/null +++ b/packages/server/marketplaces/Zapier NLA.json @@ -0,0 +1,281 @@ +{ + "description": "An agent that uses Zapier NLA to accesss apps and actions on Zapier's platform", + "nodes": [ + { + "width": 300, + "height": 278, + "id": "zapierNLA_0", + "position": { + "x": 546.0561178227484, + "y": 83.03303671691799 + }, + "type": "customNode", + "data": { + "id": "zapierNLA_0", + "label": "Zapier NLA", + "name": "zapierNLA", + "type": "ZapierNLA", + "baseClasses": ["ZapierNLA", "Tool"], + "category": "Tools", + "description": "Access to apps and actions on Zapier's platform through a natural language API interface", + "inputParams": [ + { + "label": "Zapier NLA Api Key", + "name": "apiKey", + "type": "password", + "id": "zapierNLA_0-input-apiKey-password" + } + ], + "inputAnchors": [], + "inputs": {}, + "outputAnchors": [ + { + "id": "zapierNLA_0-output-zapierNLA-ZapierNLA|Tool", + "name": "zapierNLA", + "label": "ZapierNLA", + "type": "ZapierNLA | Tool" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 546.0561178227484, + "y": 83.03303671691799 + }, + "dragging": false + }, + { + "width": 300, + "height": 524, + "id": "openAI_0", + "position": { + "x": 547.3867724775708, + "y": 394.1919189424442 + }, + "type": "customNode", + "data": { + "id": "openAI_0", + "label": "OpenAI", + "name": "openAI", + "type": "OpenAI", + "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], + "category": "LLMs", + "description": "Wrapper around OpenAI large language models", + "inputParams": [ + { + "label": "OpenAI Api Key", + "name": "openAIApiKey", + "type": "password", + "id": "openAI_0-input-openAIApiKey-password" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "text-davinci-003", + "name": "text-davinci-003" + }, + { + "label": "text-davinci-002", + "name": "text-davinci-002" + }, + { + "label": "text-curie-001", + "name": "text-curie-001" + }, + { + "label": "text-babbage-001", + "name": "text-babbage-001" + } + ], + "default": "text-davinci-003", + "optional": true, + "id": "openAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "default": 0.7, + "optional": true, + "id": "openAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_0-input-topP-number" + }, + { + "label": "Best Of", + "name": "bestOf", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_0-input-bestOf-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_0-input-presencePenalty-number" + }, + { + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_0-input-batchSize-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAI_0-input-basepath-string" + } + ], + "inputAnchors": [], + "inputs": { + "modelName": "text-davinci-003", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "bestOf": "", + "frequencyPenalty": "", + "presencePenalty": "", + "batchSize": "", + "timeout": "" + }, + "outputAnchors": [ + { + "id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "name": "openAI", + "label": "OpenAI", + "type": "OpenAI | BaseLLM | BaseLanguageModel | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 547.3867724775708, + "y": 394.1919189424442 + }, + "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "mrklAgentLLM_0", + "position": { + "x": 1002.5779315680477, + "y": 329.9701389591812 + }, + "type": "customNode", + "data": { + "id": "mrklAgentLLM_0", + "label": "MRKL Agent for LLMs", + "name": "mrklAgentLLM", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], + "category": "Agents", + "description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs", + "inputParams": [], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "mrklAgentLLM_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "mrklAgentLLM_0-input-model-BaseLanguageModel" + } + ], + "inputs": { + "tools": ["{{zapierNLA_0.data.instance}}"], + "model": "{{openAI_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "mrklAgentLLM_0-output-mrklAgentLLM-AgentExecutor|BaseChain|BaseLangChain", + "name": "mrklAgentLLM", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | BaseLangChain" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1002.5779315680477, + "y": 329.9701389591812 + }, + "selected": false + } + ], + "edges": [ + { + "source": "zapierNLA_0", + "sourceHandle": "zapierNLA_0-output-zapierNLA-ZapierNLA|Tool", + "target": "mrklAgentLLM_0", + "targetHandle": "mrklAgentLLM_0-input-tools-Tool", + "type": "buttonedge", + "id": "zapierNLA_0-zapierNLA_0-output-zapierNLA-ZapierNLA|Tool-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "openAI_0", + "sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain", + "target": "mrklAgentLLM_0", + "targetHandle": "mrklAgentLLM_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/nodemon.json b/packages/server/nodemon.json new file mode 100644 index 0000000000000000000000000000000000000000..d896b48b0c3157c64bf4e23a28cae1f3267a3a1e --- /dev/null +++ b/packages/server/nodemon.json @@ -0,0 +1,6 @@ +{ + "ignore": ["**/*.spec.ts", ".git", "node_modules"], + "watch": ["commands", "index.ts", "src"], + "exec": "yarn oclif-dev", + "ext": "ts" +} diff --git a/packages/server/package.json b/packages/server/package.json new file mode 100644 index 0000000000000000000000000000000000000000..cdfb11c17201ae70aa67d6923224dfb19d08e537 --- /dev/null +++ b/packages/server/package.json @@ -0,0 +1,74 @@ +{ + "name": "flowise", + "version": "1.2.11", + "description": "Flowiseai Server", + "main": "dist/index", + "types": "dist/index.d.ts", + "bin": { + "flowise": "./bin/run" + }, + "files": [ + "bin", + "marketplaces", + "dist", + "npm-shrinkwrap.json", + "oclif.manifest.json", + "oauth2.html" + ], + "oclif": { + "bin": "flowise", + "commands": "./dist/commands" + }, + "scripts": { + "build": "tsc", + "start": "run-script-os", + "start:windows": "cd bin && run start", + "start:default": "cd bin && ./run start", + "dev": "concurrently \"yarn watch\" \"nodemon\"", + "oclif-dev": "run-script-os", + "oclif-dev:windows": "cd bin && dev start", + "oclif-dev:default": "cd bin && ./dev start", + "postpack": "shx rm -f oclif.manifest.json", + "prepack": "yarn build && oclif manifest && oclif readme", + "typeorm": "typeorm-ts-node-commonjs", + "watch": "tsc --watch", + "version": "oclif readme && git add README.md" + }, + "keywords": [], + "homepage": "https://flowiseai.com", + "author": { + "name": "Henry Heng", + "email": "henryheng@flowiseai.com" + }, + "engines": { + "node": ">=18.15.0" + }, + "license": "SEE LICENSE IN LICENSE.md", + "dependencies": { + "@oclif/core": "^1.13.10", + "axios": "^0.27.2", + "cors": "^2.8.5", + "dotenv": "^16.0.0", + "express": "^4.17.3", + "express-basic-auth": "^1.2.1", + "flowise-components": "*", + "flowise-ui": "*", + "moment-timezone": "^0.5.34", + "multer": "^1.4.5-lts.1", + "reflect-metadata": "^0.1.13", + "socket.io": "^4.6.1", + "sqlite3": "^5.1.6", + "typeorm": "^0.3.6" + }, + "devDependencies": { + "@types/cors": "^2.8.12", + "@types/multer": "^1.4.7", + "concurrently": "^7.1.0", + "nodemon": "^2.0.15", + "oclif": "^3", + "run-script-os": "^1.1.6", + "shx": "^0.3.3", + "ts-node": "^10.7.0", + "typescript": "^4.8.4" + } +} diff --git a/packages/server/src/ChatflowPool.ts b/packages/server/src/ChatflowPool.ts new file mode 100644 index 0000000000000000000000000000000000000000..35b0d94784d02bbba6a2040484d24a8f47abd4e4 --- /dev/null +++ b/packages/server/src/ChatflowPool.ts @@ -0,0 +1,47 @@ +import { ICommonObject } from 'flowise-components' +import { IActiveChatflows, INodeData, IReactFlowNode } from './Interface' + +/** + * This pool is to keep track of active chatflow pools + * so we can prevent building langchain flow all over again + */ +export class ChatflowPool { + activeChatflows: IActiveChatflows = {} + + /** + * Add to the pool + * @param {string} chatflowid + * @param {INodeData} endingNodeData + * @param {IReactFlowNode[]} startingNodes + * @param {ICommonObject} overrideConfig + */ + add(chatflowid: string, endingNodeData: INodeData, startingNodes: IReactFlowNode[], overrideConfig?: ICommonObject) { + this.activeChatflows[chatflowid] = { + startingNodes, + endingNodeData, + inSync: true + } + if (overrideConfig) this.activeChatflows[chatflowid].overrideConfig = overrideConfig + } + + /** + * Update to the pool + * @param {string} chatflowid + * @param {boolean} inSync + */ + updateInSync(chatflowid: string, inSync: boolean) { + if (Object.prototype.hasOwnProperty.call(this.activeChatflows, chatflowid)) { + this.activeChatflows[chatflowid].inSync = inSync + } + } + + /** + * Remove from the pool + * @param {string} chatflowid + */ + async remove(chatflowid: string) { + if (Object.prototype.hasOwnProperty.call(this.activeChatflows, chatflowid)) { + delete this.activeChatflows[chatflowid] + } + } +} diff --git a/packages/server/src/ChildProcess.ts b/packages/server/src/ChildProcess.ts new file mode 100644 index 0000000000000000000000000000000000000000..483379d0886d7b974ad25d1a0b50fa7381f12b27 --- /dev/null +++ b/packages/server/src/ChildProcess.ts @@ -0,0 +1,148 @@ +import { IChildProcessMessage, IReactFlowNode, IReactFlowObject, IRunChatflowMessageValue, INodeData } from './Interface' +import { buildLangchain, constructGraphs, getEndingNode, getStartingNodes, resolveVariables } from './utils' + +export class ChildProcess { + /** + * Stop child process when app is killed + */ + static async stopChildProcess() { + setTimeout(() => { + process.exit(0) + }, 50000) + } + + /** + * Process prediction + * @param {IRunChatflowMessageValue} messageValue + * @return {Promise} + */ + async runChildProcess(messageValue: IRunChatflowMessageValue): Promise { + process.on('SIGTERM', ChildProcess.stopChildProcess) + process.on('SIGINT', ChildProcess.stopChildProcess) + + await sendToParentProcess('start', '_') + + // Create a Queue and add our initial node in it + const { endingNodeData, chatflow, incomingInput, componentNodes } = messageValue + + let nodeToExecuteData: INodeData + let addToChatFlowPool: any = {} + + /* Don't rebuild the flow (to avoid duplicated upsert, recomputation) when all these conditions met: + * - Node Data already exists in pool + * - Still in sync (i.e the flow has not been modified since) + * - Existing overrideConfig and new overrideConfig are the same + * - Flow doesn't start with nodes that depend on incomingInput.question + ***/ + if (endingNodeData) { + nodeToExecuteData = endingNodeData + } else { + /*** Get chatflows and prepare data ***/ + const flowData = chatflow.flowData + const parsedFlowData: IReactFlowObject = JSON.parse(flowData) + const nodes = parsedFlowData.nodes + const edges = parsedFlowData.edges + + /*** Get Ending Node with Directed Graph ***/ + const { graph, nodeDependencies } = constructGraphs(nodes, edges) + const directedGraph = graph + const endingNodeId = getEndingNode(nodeDependencies, directedGraph) + if (!endingNodeId) { + await sendToParentProcess('error', `Ending node must be either a Chain or Agent`) + return + } + + const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data + if (!endingNodeData) { + await sendToParentProcess('error', `Ending node must be either a Chain or Agent`) + return + } + + if ( + endingNodeData.outputs && + Object.keys(endingNodeData.outputs).length && + !Object.values(endingNodeData.outputs).includes(endingNodeData.name) + ) { + await sendToParentProcess( + 'error', + `Output of ${endingNodeData.label} (${endingNodeData.id}) must be ${endingNodeData.label}, can't be an Output Prediction` + ) + return + } + + /*** Get Starting Nodes with Non-Directed Graph ***/ + const constructedObj = constructGraphs(nodes, edges, true) + const nonDirectedGraph = constructedObj.graph + const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId) + + /*** BFS to traverse from Starting Nodes to Ending Node ***/ + const reactFlowNodes = await buildLangchain( + startingNodeIds, + nodes, + graph, + depthQueue, + componentNodes, + incomingInput.question, + incomingInput?.overrideConfig + ) + + const nodeToExecute = reactFlowNodes.find((node: IReactFlowNode) => node.id === endingNodeId) + if (!nodeToExecute) { + await sendToParentProcess('error', `Node ${endingNodeId} not found`) + return + } + + const reactFlowNodeData: INodeData = resolveVariables(nodeToExecute.data, reactFlowNodes, incomingInput.question) + nodeToExecuteData = reactFlowNodeData + + const startingNodes = nodes.filter((nd) => startingNodeIds.includes(nd.id)) + addToChatFlowPool = { + chatflowid: chatflow.id, + nodeToExecuteData, + startingNodes, + overrideConfig: incomingInput?.overrideConfig + } + } + + const nodeInstanceFilePath = componentNodes[nodeToExecuteData.name].filePath as string + const nodeModule = await import(nodeInstanceFilePath) + const nodeInstance = new nodeModule.nodeClass() + + const result = await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history }) + + await sendToParentProcess('finish', { result, addToChatFlowPool }) + } +} + +/** + * Send data back to parent process + * @param {string} key Key of message + * @param {*} value Value of message + * @returns {Promise} + */ +async function sendToParentProcess(key: string, value: any): Promise { + // tslint:disable-line:no-any + return new Promise((resolve, reject) => { + process.send!( + { + key, + value + }, + (error: Error) => { + if (error) { + return reject(error) + } + resolve() + } + ) + }) +} + +const childProcess = new ChildProcess() + +process.on('message', async (message: IChildProcessMessage) => { + if (message.key === 'start') { + await childProcess.runChildProcess(message.value) + process.exit() + } +}) diff --git a/packages/server/src/DataSource.ts b/packages/server/src/DataSource.ts new file mode 100644 index 0000000000000000000000000000000000000000..76c8e1445690c89b2ef163db8a6e1f7a16f83a3c --- /dev/null +++ b/packages/server/src/DataSource.ts @@ -0,0 +1,27 @@ +import 'reflect-metadata' +import path from 'path' +import { DataSource } from 'typeorm' +import { ChatFlow } from './entity/ChatFlow' +import { ChatMessage } from './entity/ChatMessage' +import { getUserHome } from './utils' + +let appDataSource: DataSource + +export const init = async (): Promise => { + const homePath = path.join(getUserHome(), '.flowise') + + appDataSource = new DataSource({ + type: 'sqlite', + database: path.resolve(homePath, 'database.sqlite'), + synchronize: true, + entities: [ChatFlow, ChatMessage], + migrations: [] + }) +} + +export function getDataSource(): DataSource { + if (appDataSource === undefined) { + init() + } + return appDataSource +} diff --git a/packages/server/src/Interface.ts b/packages/server/src/Interface.ts new file mode 100644 index 0000000000000000000000000000000000000000..b6876df3f16e2ce01424003de531129b32d0d5e2 --- /dev/null +++ b/packages/server/src/Interface.ts @@ -0,0 +1,154 @@ +import { ICommonObject, INode, INodeData as INodeDataFromComponent, INodeParams } from 'flowise-components' + +export type MessageType = 'apiMessage' | 'userMessage' + +/** + * Databases + */ +export interface IChatFlow { + id: string + name: string + flowData: string + apikeyid: string + deployed: boolean + updatedDate: Date + createdDate: Date +} + +export interface IChatMessage { + id: string + role: MessageType + content: string + chatflowid: string + createdDate: Date + sourceDocuments: string +} + +export interface IComponentNodes { + [key: string]: INode +} + +export interface IVariableDict { + [key: string]: string +} + +export interface INodeDependencies { + [key: string]: number +} + +export interface INodeDirectedGraph { + [key: string]: string[] +} + +export interface INodeData extends INodeDataFromComponent { + inputAnchors: INodeParams[] + inputParams: INodeParams[] + outputAnchors: INodeParams[] +} + +export interface IReactFlowNode { + id: string + position: { + x: number + y: number + } + type: string + data: INodeData + positionAbsolute: { + x: number + y: number + } + z: number + handleBounds: { + source: any + target: any + } + width: number + height: number + selected: boolean + dragging: boolean +} + +export interface IReactFlowEdge { + source: string + sourceHandle: string + target: string + targetHandle: string + type: string + id: string + data: { + label: string + } +} + +export interface IReactFlowObject { + nodes: IReactFlowNode[] + edges: IReactFlowEdge[] + viewport: { + x: number + y: number + zoom: number + } +} + +export interface IExploredNode { + [key: string]: { + remainingLoop: number + lastSeenDepth: number + } +} + +export interface INodeQueue { + nodeId: string + depth: number +} + +export interface IDepthQueue { + [key: string]: number +} + +export interface IMessage { + message: string + type: MessageType +} + +export interface IncomingInput { + question: string + history: IMessage[] + overrideConfig?: ICommonObject + socketIOClientId?: string +} + +export interface IActiveChatflows { + [key: string]: { + startingNodes: IReactFlowNode[] + endingNodeData: INodeData + inSync: boolean + overrideConfig?: ICommonObject + } +} + +export interface IOverrideConfig { + node: string + label: string + name: string + type: string +} + +export interface IDatabaseExport { + chatmessages: IChatMessage[] + chatflows: IChatFlow[] + apikeys: ICommonObject[] +} + +export interface IRunChatflowMessageValue { + chatflow: IChatFlow + incomingInput: IncomingInput + componentNodes: IComponentNodes + endingNodeData?: INodeData +} + +export interface IChildProcessMessage { + key: string + value?: any +} diff --git a/packages/server/src/NodesPool.ts b/packages/server/src/NodesPool.ts new file mode 100644 index 0000000000000000000000000000000000000000..1ee506eaeb6fa4f484a80ba6597569a72b684b79 --- /dev/null +++ b/packages/server/src/NodesPool.ts @@ -0,0 +1,62 @@ +import { IComponentNodes } from './Interface' + +import path from 'path' +import { Dirent } from 'fs' +import { getNodeModulesPackagePath } from './utils' +import { promises } from 'fs' + +export class NodesPool { + componentNodes: IComponentNodes = {} + + /** + * Initialize to get all nodes + */ + async initialize() { + const packagePath = getNodeModulesPackagePath('flowise-components') + const nodesPath = path.join(packagePath, 'dist', 'nodes') + const nodeFiles = await this.getFiles(nodesPath) + return Promise.all( + nodeFiles.map(async (file) => { + if (file.endsWith('.js')) { + const nodeModule = await require(file) + + if (nodeModule.nodeClass) { + const newNodeInstance = new nodeModule.nodeClass() + newNodeInstance.filePath = file + + this.componentNodes[newNodeInstance.name] = newNodeInstance + + // Replace file icon with absolute path + if ( + newNodeInstance.icon && + (newNodeInstance.icon.endsWith('.svg') || + newNodeInstance.icon.endsWith('.png') || + newNodeInstance.icon.endsWith('.jpg')) + ) { + const filePath = file.replace(/\\/g, '/').split('/') + filePath.pop() + const nodeIconAbsolutePath = `${filePath.join('/')}/${newNodeInstance.icon}` + this.componentNodes[newNodeInstance.name].icon = nodeIconAbsolutePath + } + } + } + }) + ) + } + + /** + * Recursive function to get node files + * @param {string} dir + * @returns {string[]} + */ + async getFiles(dir: string): Promise { + const dirents = await promises.readdir(dir, { withFileTypes: true }) + const files = await Promise.all( + dirents.map((dirent: Dirent) => { + const res = path.resolve(dir, dirent.name) + return dirent.isDirectory() ? this.getFiles(res) : res + }) + ) + return Array.prototype.concat(...files) + } +} diff --git a/packages/server/src/commands/start.ts b/packages/server/src/commands/start.ts new file mode 100644 index 0000000000000000000000000000000000000000..94b8d9958cb4d5d9c713fce92f1ceda946e78f74 --- /dev/null +++ b/packages/server/src/commands/start.ts @@ -0,0 +1,71 @@ +import { Command, Flags } from '@oclif/core' +import path from 'path' +import * as Server from '../index' +import * as DataSource from '../DataSource' +import dotenv from 'dotenv' + +dotenv.config({ path: path.join(__dirname, '..', '..', '.env'), override: true }) + +enum EXIT_CODE { + SUCCESS = 0, + FAILED = 1 +} +let processExitCode = EXIT_CODE.SUCCESS + +export default class Start extends Command { + static args = [] + static flags = { + FLOWISE_USERNAME: Flags.string(), + FLOWISE_PASSWORD: Flags.string(), + PORT: Flags.string(), + EXECUTION_MODE: Flags.string() + } + + async stopProcess() { + console.info('Shutting down Flowise...') + try { + // Shut down the app after timeout if it ever stuck removing pools + setTimeout(() => { + console.info('Flowise was forced to shut down after 30 secs') + process.exit(processExitCode) + }, 30000) + + // Removing pools + const serverApp = Server.getInstance() + if (serverApp) await serverApp.stopApp() + } catch (error) { + console.error('There was an error shutting down Flowise...', error) + } + process.exit(processExitCode) + } + + async run(): Promise { + process.on('SIGTERM', this.stopProcess) + process.on('SIGINT', this.stopProcess) + + // Prevent throw new Error from crashing the app + // TODO: Get rid of this and send proper error message to ui + process.on('uncaughtException', (err) => { + console.error('uncaughtException: ', err) + }) + + const { flags } = await this.parse(Start) + if (flags.FLOWISE_USERNAME) process.env.FLOWISE_USERNAME = flags.FLOWISE_USERNAME + if (flags.FLOWISE_PASSWORD) process.env.FLOWISE_PASSWORD = flags.FLOWISE_PASSWORD + if (flags.PORT) process.env.PORT = flags.PORT + if (flags.EXECUTION_MODE) process.env.EXECUTION_MODE = flags.EXECUTION_MODE + + await (async () => { + try { + this.log('Starting Flowise...') + await DataSource.init() + await Server.start() + } catch (error) { + console.error('There was an error starting Flowise...', error) + processExitCode = EXIT_CODE.FAILED + // @ts-ignore + process.emit('SIGINT') + } + })() + } +} diff --git a/packages/server/src/entity/ChatFlow.ts b/packages/server/src/entity/ChatFlow.ts new file mode 100644 index 0000000000000000000000000000000000000000..d9b12929412ab41c7f96e78eab9df3502e9fb9e0 --- /dev/null +++ b/packages/server/src/entity/ChatFlow.ts @@ -0,0 +1,27 @@ +/* eslint-disable */ +import { Entity, Column, CreateDateColumn, UpdateDateColumn, PrimaryGeneratedColumn } from 'typeorm' +import { IChatFlow } from '../Interface' + +@Entity() +export class ChatFlow implements IChatFlow { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column() + name: string + + @Column() + flowData: string + + @Column({ nullable: true }) + apikeyid: string + + @Column() + deployed: boolean + + @CreateDateColumn() + createdDate: Date + + @UpdateDateColumn() + updatedDate: Date +} diff --git a/packages/server/src/entity/ChatMessage.ts b/packages/server/src/entity/ChatMessage.ts new file mode 100644 index 0000000000000000000000000000000000000000..236dc5f93d8c5d9848cb7bc222ac05d333c389b7 --- /dev/null +++ b/packages/server/src/entity/ChatMessage.ts @@ -0,0 +1,25 @@ +/* eslint-disable */ +import { Entity, Column, CreateDateColumn, PrimaryGeneratedColumn, Index } from 'typeorm' +import { IChatMessage, MessageType } from '../Interface' + +@Entity() +export class ChatMessage implements IChatMessage { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column() + role: MessageType + + @Index() + @Column() + chatflowid: string + + @Column() + content: string + + @Column({ nullable: true }) + sourceDocuments: string + + @CreateDateColumn() + createdDate: Date +} diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..9d7e1c9038bb509f4e641d25f6ec421841f41a9c --- /dev/null +++ b/packages/server/src/index.ts @@ -0,0 +1,688 @@ +import express, { Request, Response } from 'express' +import multer from 'multer' +import path from 'path' +import cors from 'cors' +import http from 'http' +import * as fs from 'fs' +import basicAuth from 'express-basic-auth' +import { Server } from 'socket.io' + +import { + IChatFlow, + IncomingInput, + IReactFlowNode, + IReactFlowObject, + INodeData, + IDatabaseExport, + IRunChatflowMessageValue, + IChildProcessMessage +} from './Interface' +import { + getNodeModulesPackagePath, + getStartingNodes, + buildLangchain, + getEndingNode, + constructGraphs, + resolveVariables, + isStartNodeDependOnInput, + getAPIKeys, + addAPIKey, + updateAPIKey, + deleteAPIKey, + compareKeys, + mapMimeTypeToInputField, + findAvailableConfigs, + isSameOverrideConfig, + replaceAllAPIKeys, + isFlowValidForStream, + isVectorStoreFaiss +} from './utils' +import { cloneDeep } from 'lodash' +import { getDataSource } from './DataSource' +import { NodesPool } from './NodesPool' +import { ChatFlow } from './entity/ChatFlow' +import { ChatMessage } from './entity/ChatMessage' +import { ChatflowPool } from './ChatflowPool' +import { ICommonObject } from 'flowise-components' +import { fork } from 'child_process' + +export class App { + app: express.Application + nodesPool: NodesPool + chatflowPool: ChatflowPool + AppDataSource = getDataSource() + + constructor() { + this.app = express() + } + + async initDatabase() { + // Initialize database + this.AppDataSource.initialize() + .then(async () => { + console.info('📦[server]: Data Source has been initialized!') + + // Initialize pools + this.nodesPool = new NodesPool() + await this.nodesPool.initialize() + + this.chatflowPool = new ChatflowPool() + + // Initialize API keys + await getAPIKeys() + }) + .catch((err) => { + console.error('❌[server]: Error during Data Source initialization:', err) + }) + } + + async config(socketIO?: Server) { + // Limit is needed to allow sending/receiving base64 encoded string + this.app.use(express.json({ limit: '50mb' })) + this.app.use(express.urlencoded({ limit: '50mb', extended: true })) + + // Allow access from * + this.app.use(cors()) + + if (process.env.FLOWISE_USERNAME && process.env.FLOWISE_PASSWORD) { + const username = process.env.FLOWISE_USERNAME + const password = process.env.FLOWISE_PASSWORD + const basicAuthMiddleware = basicAuth({ + users: { [username]: password } + }) + const whitelistURLs = ['/api/v1/prediction/', '/api/v1/node-icon/', '/api/v1/chatflows-streaming'] + this.app.use((req, res, next) => { + if (req.url.includes('/api/v1/')) { + whitelistURLs.some((url) => req.url.includes(url)) ? next() : basicAuthMiddleware(req, res, next) + } else next() + }) + } + + const upload = multer({ dest: `${path.join(__dirname, '..', 'uploads')}/` }) + + // ---------------------------------------- + // Nodes + // ---------------------------------------- + + // Get all component nodes + this.app.get('/api/v1/nodes', (req: Request, res: Response) => { + const returnData = [] + for (const nodeName in this.nodesPool.componentNodes) { + const clonedNode = cloneDeep(this.nodesPool.componentNodes[nodeName]) + returnData.push(clonedNode) + } + return res.json(returnData) + }) + + // Get specific component node via name + this.app.get('/api/v1/nodes/:name', (req: Request, res: Response) => { + if (Object.prototype.hasOwnProperty.call(this.nodesPool.componentNodes, req.params.name)) { + return res.json(this.nodesPool.componentNodes[req.params.name]) + } else { + throw new Error(`Node ${req.params.name} not found`) + } + }) + + // Returns specific component node icon via name + this.app.get('/api/v1/node-icon/:name', (req: Request, res: Response) => { + if (Object.prototype.hasOwnProperty.call(this.nodesPool.componentNodes, req.params.name)) { + const nodeInstance = this.nodesPool.componentNodes[req.params.name] + if (nodeInstance.icon === undefined) { + throw new Error(`Node ${req.params.name} icon not found`) + } + + if (nodeInstance.icon.endsWith('.svg') || nodeInstance.icon.endsWith('.png') || nodeInstance.icon.endsWith('.jpg')) { + const filepath = nodeInstance.icon + res.sendFile(filepath) + } else { + throw new Error(`Node ${req.params.name} icon is missing icon`) + } + } else { + throw new Error(`Node ${req.params.name} not found`) + } + }) + + // ---------------------------------------- + // Chatflows + // ---------------------------------------- + + // Get all chatflows + this.app.get('/api/v1/chatflows', async (req: Request, res: Response) => { + const chatflows: IChatFlow[] = await this.AppDataSource.getRepository(ChatFlow).find() + return res.json(chatflows) + }) + + // Get specific chatflow via id + this.app.get('/api/v1/chatflows/:id', async (req: Request, res: Response) => { + const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: req.params.id + }) + if (chatflow) return res.json(chatflow) + return res.status(404).send(`Chatflow ${req.params.id} not found`) + }) + + // Save chatflow + this.app.post('/api/v1/chatflows', async (req: Request, res: Response) => { + const body = req.body + const newChatFlow = new ChatFlow() + Object.assign(newChatFlow, body) + + const chatflow = this.AppDataSource.getRepository(ChatFlow).create(newChatFlow) + const results = await this.AppDataSource.getRepository(ChatFlow).save(chatflow) + + return res.json(results) + }) + + // Update chatflow + this.app.put('/api/v1/chatflows/:id', async (req: Request, res: Response) => { + const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: req.params.id + }) + + if (!chatflow) { + res.status(404).send(`Chatflow ${req.params.id} not found`) + return + } + + const body = req.body + const updateChatFlow = new ChatFlow() + Object.assign(updateChatFlow, body) + + this.AppDataSource.getRepository(ChatFlow).merge(chatflow, updateChatFlow) + const result = await this.AppDataSource.getRepository(ChatFlow).save(chatflow) + + // Update chatflowpool inSync to false, to build Langchain again because data has been changed + this.chatflowPool.updateInSync(chatflow.id, false) + + return res.json(result) + }) + + // Delete chatflow via id + this.app.delete('/api/v1/chatflows/:id', async (req: Request, res: Response) => { + const results = await this.AppDataSource.getRepository(ChatFlow).delete({ id: req.params.id }) + return res.json(results) + }) + + // Check if chatflow valid for streaming + this.app.get('/api/v1/chatflows-streaming/:id', async (req: Request, res: Response) => { + const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: req.params.id + }) + if (!chatflow) return res.status(404).send(`Chatflow ${req.params.id} not found`) + + /*** Get Ending Node with Directed Graph ***/ + const flowData = chatflow.flowData + const parsedFlowData: IReactFlowObject = JSON.parse(flowData) + const nodes = parsedFlowData.nodes + const edges = parsedFlowData.edges + const { graph, nodeDependencies } = constructGraphs(nodes, edges) + const endingNodeId = getEndingNode(nodeDependencies, graph) + if (!endingNodeId) return res.status(500).send(`Ending node must be either a Chain or Agent`) + const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data + if (!endingNodeData) return res.status(500).send(`Ending node must be either a Chain or Agent`) + + const obj = { + isStreaming: isFlowValidForStream(nodes, endingNodeData) + } + return res.json(obj) + }) + + // ---------------------------------------- + // ChatMessage + // ---------------------------------------- + + // Get all chatmessages from chatflowid + this.app.get('/api/v1/chatmessage/:id', async (req: Request, res: Response) => { + const chatmessages = await this.AppDataSource.getRepository(ChatMessage).findBy({ + chatflowid: req.params.id + }) + return res.json(chatmessages) + }) + + // Add chatmessages for chatflowid + this.app.post('/api/v1/chatmessage/:id', async (req: Request, res: Response) => { + const body = req.body + const newChatMessage = new ChatMessage() + Object.assign(newChatMessage, body) + + const chatmessage = this.AppDataSource.getRepository(ChatMessage).create(newChatMessage) + const results = await this.AppDataSource.getRepository(ChatMessage).save(chatmessage) + + return res.json(results) + }) + + // Delete all chatmessages from chatflowid + this.app.delete('/api/v1/chatmessage/:id', async (req: Request, res: Response) => { + const results = await this.AppDataSource.getRepository(ChatMessage).delete({ chatflowid: req.params.id }) + return res.json(results) + }) + + // ---------------------------------------- + // Configuration + // ---------------------------------------- + + this.app.get('/api/v1/flow-config/:id', async (req: Request, res: Response) => { + const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: req.params.id + }) + if (!chatflow) return res.status(404).send(`Chatflow ${req.params.id} not found`) + const flowData = chatflow.flowData + const parsedFlowData: IReactFlowObject = JSON.parse(flowData) + const nodes = parsedFlowData.nodes + const availableConfigs = findAvailableConfigs(nodes) + return res.json(availableConfigs) + }) + + // ---------------------------------------- + // Export Load Chatflow & ChatMessage & Apikeys + // ---------------------------------------- + + this.app.get('/api/v1/database/export', async (req: Request, res: Response) => { + const chatmessages = await this.AppDataSource.getRepository(ChatMessage).find() + const chatflows = await this.AppDataSource.getRepository(ChatFlow).find() + const apikeys = await getAPIKeys() + const result: IDatabaseExport = { + chatmessages, + chatflows, + apikeys + } + return res.json(result) + }) + + this.app.post('/api/v1/database/load', async (req: Request, res: Response) => { + const databaseItems: IDatabaseExport = req.body + + await this.AppDataSource.getRepository(ChatFlow).delete({}) + await this.AppDataSource.getRepository(ChatMessage).delete({}) + + let error = '' + + // Get a new query runner instance + const queryRunner = this.AppDataSource.createQueryRunner() + + // Start a new transaction + await queryRunner.startTransaction() + + try { + const chatflows: ChatFlow[] = databaseItems.chatflows + const chatmessages: ChatMessage[] = databaseItems.chatmessages + + await queryRunner.manager.insert(ChatFlow, chatflows) + await queryRunner.manager.insert(ChatMessage, chatmessages) + + await queryRunner.commitTransaction() + } catch (err: any) { + error = err?.message ?? 'Error loading database' + await queryRunner.rollbackTransaction() + } finally { + await queryRunner.release() + } + + await replaceAllAPIKeys(databaseItems.apikeys) + + if (error) return res.status(500).send(error) + return res.status(201).send('OK') + }) + + // ---------------------------------------- + // Prediction + // ---------------------------------------- + + // Send input message and get prediction result (External) + this.app.post('/api/v1/prediction/:id', upload.array('files'), async (req: Request, res: Response) => { + await this.processPrediction(req, res, socketIO) + }) + + // Send input message and get prediction result (Internal) + this.app.post('/api/v1/internal-prediction/:id', async (req: Request, res: Response) => { + await this.processPrediction(req, res, socketIO, true) + }) + + // ---------------------------------------- + // Marketplaces + // ---------------------------------------- + + // Get all chatflows for marketplaces + this.app.get('/api/v1/marketplaces', async (req: Request, res: Response) => { + const marketplaceDir = path.join(__dirname, '..', 'marketplaces') + const jsonsInDir = fs.readdirSync(marketplaceDir).filter((file) => path.extname(file) === '.json') + const templates: any[] = [] + jsonsInDir.forEach((file, index) => { + const filePath = path.join(__dirname, '..', 'marketplaces', file) + const fileData = fs.readFileSync(filePath) + const fileDataObj = JSON.parse(fileData.toString()) + const template = { + id: index, + name: file.split('.json')[0], + flowData: fileData.toString(), + description: fileDataObj?.description || '' + } + templates.push(template) + }) + return res.json(templates) + }) + + // ---------------------------------------- + // API Keys + // ---------------------------------------- + + // Get api keys + this.app.get('/api/v1/apikey', async (req: Request, res: Response) => { + const keys = await getAPIKeys() + return res.json(keys) + }) + + // Add new api key + this.app.post('/api/v1/apikey', async (req: Request, res: Response) => { + const keys = await addAPIKey(req.body.keyName) + return res.json(keys) + }) + + // Update api key + this.app.put('/api/v1/apikey/:id', async (req: Request, res: Response) => { + const keys = await updateAPIKey(req.params.id, req.body.keyName) + return res.json(keys) + }) + + // Delete new api key + this.app.delete('/api/v1/apikey/:id', async (req: Request, res: Response) => { + const keys = await deleteAPIKey(req.params.id) + return res.json(keys) + }) + + // ---------------------------------------- + // Serve UI static + // ---------------------------------------- + + const packagePath = getNodeModulesPackagePath('flowise-ui') + const uiBuildPath = path.join(packagePath, 'build') + const uiHtmlPath = path.join(packagePath, 'build', 'index.html') + + this.app.use('/', express.static(uiBuildPath)) + + // All other requests not handled will return React app + this.app.use((req, res) => { + res.sendFile(uiHtmlPath) + }) + } + + /** + * Validate API Key + * @param {Request} req + * @param {Response} res + * @param {ChatFlow} chatflow + */ + async validateKey(req: Request, res: Response, chatflow: ChatFlow) { + const chatFlowApiKeyId = chatflow.apikeyid + const authorizationHeader = (req.headers['Authorization'] as string) ?? (req.headers['authorization'] as string) ?? '' + + if (chatFlowApiKeyId && !authorizationHeader) return res.status(401).send(`Unauthorized`) + + const suppliedKey = authorizationHeader.split(`Bearer `).pop() + if (chatFlowApiKeyId && suppliedKey) { + const keys = await getAPIKeys() + const apiSecret = keys.find((key) => key.id === chatFlowApiKeyId)?.apiSecret + if (!compareKeys(apiSecret, suppliedKey)) return res.status(401).send(`Unauthorized`) + } + } + + /** + * Start child process + * @param {ChatFlow} chatflow + * @param {IncomingInput} incomingInput + * @param {INodeData} endingNodeData + */ + async startChildProcess(chatflow: ChatFlow, incomingInput: IncomingInput, endingNodeData?: INodeData) { + try { + const controller = new AbortController() + const { signal } = controller + + let childpath = path.join(__dirname, '..', 'dist', 'ChildProcess.js') + if (!fs.existsSync(childpath)) childpath = 'ChildProcess.ts' + + const childProcess = fork(childpath, [], { signal }) + + const value = { + chatflow, + incomingInput, + componentNodes: cloneDeep(this.nodesPool.componentNodes), + endingNodeData + } as IRunChatflowMessageValue + childProcess.send({ key: 'start', value } as IChildProcessMessage) + + let childProcessTimeout: NodeJS.Timeout + + return new Promise((resolve, reject) => { + childProcess.on('message', async (message: IChildProcessMessage) => { + if (message.key === 'finish') { + const { result, addToChatFlowPool } = message.value as ICommonObject + if (childProcessTimeout) { + clearTimeout(childProcessTimeout) + } + if (Object.keys(addToChatFlowPool).length) { + const { chatflowid, nodeToExecuteData, startingNodes, overrideConfig } = addToChatFlowPool + this.chatflowPool.add(chatflowid, nodeToExecuteData, startingNodes, overrideConfig) + } + resolve(result) + } + if (message.key === 'start') { + if (process.env.EXECUTION_TIMEOUT) { + childProcessTimeout = setTimeout(async () => { + childProcess.kill() + resolve(undefined) + }, parseInt(process.env.EXECUTION_TIMEOUT, 10)) + } + } + if (message.key === 'error') { + let errMessage = message.value as string + if (childProcessTimeout) { + clearTimeout(childProcessTimeout) + } + reject(errMessage) + } + }) + }) + } catch (err) { + console.error(err) + } + } + + /** + * Process Prediction + * @param {Request} req + * @param {Response} res + * @param {Server} socketIO + * @param {boolean} isInternal + */ + async processPrediction(req: Request, res: Response, socketIO?: Server, isInternal = false) { + try { + const chatflowid = req.params.id + let incomingInput: IncomingInput = req.body + + let nodeToExecuteData: INodeData + + const chatflow = await this.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowid + }) + if (!chatflow) return res.status(404).send(`Chatflow ${chatflowid} not found`) + + if (!isInternal) { + await this.validateKey(req, res, chatflow) + } + + let isStreamValid = false + + const files = (req.files as any[]) || [] + + if (files.length) { + const overrideConfig: ICommonObject = { ...req.body } + for (const file of files) { + const fileData = fs.readFileSync(file.path, { encoding: 'base64' }) + const dataBase64String = `data:${file.mimetype};base64,${fileData},filename:${file.filename}` + + const fileInputField = mapMimeTypeToInputField(file.mimetype) + if (overrideConfig[fileInputField]) { + overrideConfig[fileInputField] = JSON.stringify([...JSON.parse(overrideConfig[fileInputField]), dataBase64String]) + } else { + overrideConfig[fileInputField] = JSON.stringify([dataBase64String]) + } + } + incomingInput = { + question: req.body.question ?? 'hello', + overrideConfig, + history: [] + } + } + + /* Don't rebuild the flow (to avoid duplicated upsert, recomputation) when all these conditions met: + * - Node Data already exists in pool + * - Still in sync (i.e the flow has not been modified since) + * - Existing overrideConfig and new overrideConfig are the same + * - Flow doesn't start with nodes that depend on incomingInput.question + ***/ + const isRebuildNeeded = () => { + return ( + Object.prototype.hasOwnProperty.call(this.chatflowPool.activeChatflows, chatflowid) && + this.chatflowPool.activeChatflows[chatflowid].inSync && + isSameOverrideConfig( + isInternal, + this.chatflowPool.activeChatflows[chatflowid].overrideConfig, + incomingInput.overrideConfig + ) && + !isStartNodeDependOnInput(this.chatflowPool.activeChatflows[chatflowid].startingNodes) + ) + } + + if (process.env.EXECUTION_MODE === 'child') { + if (isRebuildNeeded()) { + nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData + try { + const result = await this.startChildProcess(chatflow, incomingInput, nodeToExecuteData) + + return res.json(result) + } catch (error) { + return res.status(500).send(error) + } + } else { + try { + const result = await this.startChildProcess(chatflow, incomingInput) + return res.json(result) + } catch (error) { + return res.status(500).send(error) + } + } + } else { + /*** Get chatflows and prepare data ***/ + const flowData = chatflow.flowData + const parsedFlowData: IReactFlowObject = JSON.parse(flowData) + const nodes = parsedFlowData.nodes + const edges = parsedFlowData.edges + + if (isRebuildNeeded()) { + nodeToExecuteData = this.chatflowPool.activeChatflows[chatflowid].endingNodeData + isStreamValid = isFlowValidForStream(nodes, nodeToExecuteData) + } else { + /*** Get Ending Node with Directed Graph ***/ + const { graph, nodeDependencies } = constructGraphs(nodes, edges) + const directedGraph = graph + const endingNodeId = getEndingNode(nodeDependencies, directedGraph) + if (!endingNodeId) return res.status(500).send(`Ending node must be either a Chain or Agent`) + + const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data + if (!endingNodeData) return res.status(500).send(`Ending node must be either a Chain or Agent`) + + if ( + endingNodeData.outputs && + Object.keys(endingNodeData.outputs).length && + !Object.values(endingNodeData.outputs).includes(endingNodeData.name) + ) { + return res + .status(500) + .send( + `Output of ${endingNodeData.label} (${endingNodeData.id}) must be ${endingNodeData.label}, can't be an Output Prediction` + ) + } + + isStreamValid = isFlowValidForStream(nodes, endingNodeData) + + /*** Get Starting Nodes with Non-Directed Graph ***/ + const constructedObj = constructGraphs(nodes, edges, true) + const nonDirectedGraph = constructedObj.graph + const { startingNodeIds, depthQueue } = getStartingNodes(nonDirectedGraph, endingNodeId) + + /*** BFS to traverse from Starting Nodes to Ending Node ***/ + const reactFlowNodes = await buildLangchain( + startingNodeIds, + nodes, + graph, + depthQueue, + this.nodesPool.componentNodes, + incomingInput.question, + incomingInput?.overrideConfig + ) + + const nodeToExecute = reactFlowNodes.find((node: IReactFlowNode) => node.id === endingNodeId) + if (!nodeToExecute) return res.status(404).send(`Node ${endingNodeId} not found`) + + const reactFlowNodeData: INodeData = resolveVariables(nodeToExecute.data, reactFlowNodes, incomingInput.question) + nodeToExecuteData = reactFlowNodeData + + const startingNodes = nodes.filter((nd) => startingNodeIds.includes(nd.id)) + this.chatflowPool.add(chatflowid, nodeToExecuteData, startingNodes, incomingInput?.overrideConfig) + } + + const nodeInstanceFilePath = this.nodesPool.componentNodes[nodeToExecuteData.name].filePath as string + const nodeModule = await import(nodeInstanceFilePath) + const nodeInstance = new nodeModule.nodeClass() + + isStreamValid = isStreamValid && !isVectorStoreFaiss(nodeToExecuteData) + const result = isStreamValid + ? await nodeInstance.run(nodeToExecuteData, incomingInput.question, { + chatHistory: incomingInput.history, + socketIO, + socketIOClientId: incomingInput.socketIOClientId + }) + : await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history }) + + return res.json(result) + } + } catch (e: any) { + return res.status(500).send(e.message) + } + } + + async stopApp() { + try { + const removePromises: any[] = [] + await Promise.all(removePromises) + } catch (e) { + console.error(`❌[server]: Flowise Server shut down error: ${e}`) + } + } +} + +let serverApp: App | undefined + +export async function start(): Promise { + serverApp = new App() + + const port = parseInt(process.env.PORT || '', 10) || 7860 + const server = http.createServer(serverApp.app) + + const io = new Server(server, { + cors: { + origin: '*' + } + }) + + await serverApp.initDatabase() + await serverApp.config(io) + + server.listen(port, () => { + console.info(`⚡️[server]: Flowise Server is listening at ${port}`) + }) +} + +export function getInstance(): App | undefined { + return serverApp +} diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..18473c511407858d86692bed38f8beff7b81bbf4 --- /dev/null +++ b/packages/server/src/utils/index.ts @@ -0,0 +1,661 @@ +import path from 'path' +import fs from 'fs' +import moment from 'moment' +import { + IComponentNodes, + IDepthQueue, + IExploredNode, + INodeDependencies, + INodeDirectedGraph, + INodeQueue, + IReactFlowEdge, + IReactFlowNode, + IVariableDict, + INodeData, + IOverrideConfig +} from '../Interface' +import { cloneDeep, get, omit, merge } from 'lodash' +import { ICommonObject, getInputVariables } from 'flowise-components' +import { scryptSync, randomBytes, timingSafeEqual } from 'crypto' + +const QUESTION_VAR_PREFIX = 'question' + +/** + * Returns the home folder path of the user if + * none can be found it falls back to the current + * working directory + * + */ +export const getUserHome = (): string => { + let variableName = 'HOME' + if (process.platform === 'win32') { + variableName = 'USERPROFILE' + } + + if (process.env[variableName] === undefined) { + // If for some reason the variable does not exist + // fall back to current folder + return process.cwd() + } + return process.env[variableName] as string +} + +/** + * Returns the path of node modules package + * @param {string} packageName + * @returns {string} + */ +export const getNodeModulesPackagePath = (packageName: string): string => { + const checkPaths = [ + path.join(__dirname, '..', 'node_modules', packageName), + path.join(__dirname, '..', '..', 'node_modules', packageName), + path.join(__dirname, '..', '..', '..', 'node_modules', packageName), + path.join(__dirname, '..', '..', '..', '..', 'node_modules', packageName), + path.join(__dirname, '..', '..', '..', '..', '..', 'node_modules', packageName) + ] + for (const checkPath of checkPaths) { + if (fs.existsSync(checkPath)) { + return checkPath + } + } + return '' +} + +/** + * Construct graph and node dependencies score + * @param {IReactFlowNode[]} reactFlowNodes + * @param {IReactFlowEdge[]} reactFlowEdges + * @param {boolean} isNondirected + */ +export const constructGraphs = (reactFlowNodes: IReactFlowNode[], reactFlowEdges: IReactFlowEdge[], isNondirected = false) => { + const nodeDependencies = {} as INodeDependencies + const graph = {} as INodeDirectedGraph + + for (let i = 0; i < reactFlowNodes.length; i += 1) { + const nodeId = reactFlowNodes[i].id + nodeDependencies[nodeId] = 0 + graph[nodeId] = [] + } + + for (let i = 0; i < reactFlowEdges.length; i += 1) { + const source = reactFlowEdges[i].source + const target = reactFlowEdges[i].target + + if (Object.prototype.hasOwnProperty.call(graph, source)) { + graph[source].push(target) + } else { + graph[source] = [target] + } + + if (isNondirected) { + if (Object.prototype.hasOwnProperty.call(graph, target)) { + graph[target].push(source) + } else { + graph[target] = [source] + } + } + nodeDependencies[target] += 1 + } + + return { graph, nodeDependencies } +} + +/** + * Get starting nodes and check if flow is valid + * @param {INodeDependencies} graph + * @param {string} endNodeId + */ +export const getStartingNodes = (graph: INodeDirectedGraph, endNodeId: string) => { + const visited = new Set() + const queue: Array<[string, number]> = [[endNodeId, 0]] + const depthQueue: IDepthQueue = { + [endNodeId]: 0 + } + + let maxDepth = 0 + let startingNodeIds: string[] = [] + + while (queue.length > 0) { + const [currentNode, depth] = queue.shift()! + + if (visited.has(currentNode)) { + continue + } + + visited.add(currentNode) + + if (depth > maxDepth) { + maxDepth = depth + startingNodeIds = [currentNode] + } else if (depth === maxDepth) { + startingNodeIds.push(currentNode) + } + + for (const neighbor of graph[currentNode]) { + if (!visited.has(neighbor)) { + queue.push([neighbor, depth + 1]) + depthQueue[neighbor] = depth + 1 + } + } + } + + const depthQueueReversed: IDepthQueue = {} + for (const nodeId in depthQueue) { + if (Object.prototype.hasOwnProperty.call(depthQueue, nodeId)) { + depthQueueReversed[nodeId] = Math.abs(depthQueue[nodeId] - maxDepth) + } + } + + return { startingNodeIds, depthQueue: depthQueueReversed } +} + +/** + * Get ending node and check if flow is valid + * @param {INodeDependencies} nodeDependencies + * @param {INodeDirectedGraph} graph + */ +export const getEndingNode = (nodeDependencies: INodeDependencies, graph: INodeDirectedGraph) => { + let endingNodeId = '' + Object.keys(graph).forEach((nodeId) => { + if (Object.keys(nodeDependencies).length === 1) { + endingNodeId = nodeId + } else if (!graph[nodeId].length && nodeDependencies[nodeId] > 0) { + endingNodeId = nodeId + } + }) + return endingNodeId +} + +/** + * Build langchain from start to end + * @param {string} startingNodeId + * @param {IReactFlowNode[]} reactFlowNodes + * @param {INodeDirectedGraph} graph + * @param {IDepthQueue} depthQueue + * @param {IComponentNodes} componentNodes + * @param {string} question + */ +export const buildLangchain = async ( + startingNodeIds: string[], + reactFlowNodes: IReactFlowNode[], + graph: INodeDirectedGraph, + depthQueue: IDepthQueue, + componentNodes: IComponentNodes, + question: string, + overrideConfig?: ICommonObject +) => { + const flowNodes = cloneDeep(reactFlowNodes) + + // Create a Queue and add our initial node in it + const nodeQueue = [] as INodeQueue[] + const exploredNode = {} as IExploredNode + + // In the case of infinite loop, only max 3 loops will be executed + const maxLoop = 3 + + for (let i = 0; i < startingNodeIds.length; i += 1) { + nodeQueue.push({ nodeId: startingNodeIds[i], depth: 0 }) + exploredNode[startingNodeIds[i]] = { remainingLoop: maxLoop, lastSeenDepth: 0 } + } + + while (nodeQueue.length) { + const { nodeId, depth } = nodeQueue.shift() as INodeQueue + + const reactFlowNode = flowNodes.find((nd) => nd.id === nodeId) + const nodeIndex = flowNodes.findIndex((nd) => nd.id === nodeId) + if (!reactFlowNode || reactFlowNode === undefined || nodeIndex < 0) continue + + try { + const nodeInstanceFilePath = componentNodes[reactFlowNode.data.name].filePath as string + const nodeModule = await import(nodeInstanceFilePath) + const newNodeInstance = new nodeModule.nodeClass() + + let flowNodeData = cloneDeep(reactFlowNode.data) + if (overrideConfig) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig) + const reactFlowNodeData: INodeData = resolveVariables(flowNodeData, flowNodes, question) + + flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question) + } catch (e: any) { + console.error(e) + throw new Error(e) + } + + const neighbourNodeIds = graph[nodeId] + const nextDepth = depth + 1 + + // Find other nodes that are on the same depth level + const sameDepthNodeIds = Object.keys(depthQueue).filter((key) => depthQueue[key] === nextDepth) + + for (const id of sameDepthNodeIds) { + if (neighbourNodeIds.includes(id)) continue + neighbourNodeIds.push(id) + } + + for (let i = 0; i < neighbourNodeIds.length; i += 1) { + const neighNodeId = neighbourNodeIds[i] + + // If nodeId has been seen, cycle detected + if (Object.prototype.hasOwnProperty.call(exploredNode, neighNodeId)) { + const { remainingLoop, lastSeenDepth } = exploredNode[neighNodeId] + + if (lastSeenDepth === nextDepth) continue + + if (remainingLoop === 0) { + break + } + const remainingLoopMinusOne = remainingLoop - 1 + exploredNode[neighNodeId] = { remainingLoop: remainingLoopMinusOne, lastSeenDepth: nextDepth } + nodeQueue.push({ nodeId: neighNodeId, depth: nextDepth }) + } else { + exploredNode[neighNodeId] = { remainingLoop: maxLoop, lastSeenDepth: nextDepth } + nodeQueue.push({ nodeId: neighNodeId, depth: nextDepth }) + } + } + } + return flowNodes +} + +/** + * Get variable value from outputResponses.output + * @param {string} paramValue + * @param {IReactFlowNode[]} reactFlowNodes + * @param {string} question + * @param {boolean} isAcceptVariable + * @returns {string} + */ +export const getVariableValue = (paramValue: string, reactFlowNodes: IReactFlowNode[], question: string, isAcceptVariable = false) => { + let returnVal = paramValue + const variableStack = [] + const variableDict = {} as IVariableDict + let startIdx = 0 + const endIdx = returnVal.length - 1 + + while (startIdx < endIdx) { + const substr = returnVal.substring(startIdx, startIdx + 2) + + // Store the opening double curly bracket + if (substr === '{{') { + variableStack.push({ substr, startIdx: startIdx + 2 }) + } + + // Found the complete variable + if (substr === '}}' && variableStack.length > 0 && variableStack[variableStack.length - 1].substr === '{{') { + const variableStartIdx = variableStack[variableStack.length - 1].startIdx + const variableEndIdx = startIdx + const variableFullPath = returnVal.substring(variableStartIdx, variableEndIdx) + + if (isAcceptVariable && variableFullPath === QUESTION_VAR_PREFIX) { + variableDict[`{{${variableFullPath}}}`] = question + } + + // Split by first occurrence of '.' to get just nodeId + const [variableNodeId, _] = variableFullPath.split('.') + const executedNode = reactFlowNodes.find((nd) => nd.id === variableNodeId) + if (executedNode) { + const variableValue = get(executedNode.data, 'instance') + if (isAcceptVariable) { + variableDict[`{{${variableFullPath}}}`] = variableValue + } else { + returnVal = variableValue + } + } + variableStack.pop() + } + startIdx += 1 + } + + if (isAcceptVariable) { + const variablePaths = Object.keys(variableDict) + variablePaths.sort() // Sort by length of variable path because longer path could possibly contains nested variable + variablePaths.forEach((path) => { + const variableValue = variableDict[path] + // Replace all occurrence + returnVal = returnVal.split(path).join(variableValue) + }) + return returnVal + } + return returnVal +} + +/** + * Temporarily disable streaming if vectorStore is Faiss + * @param {INodeData} flowNodeData + * @returns {boolean} + */ +export const isVectorStoreFaiss = (flowNodeData: INodeData) => { + if (flowNodeData.inputs && flowNodeData.inputs.vectorStoreRetriever) { + const vectorStoreRetriever = flowNodeData.inputs.vectorStoreRetriever + if (typeof vectorStoreRetriever === 'string' && vectorStoreRetriever.includes('faiss')) return true + if ( + typeof vectorStoreRetriever === 'object' && + vectorStoreRetriever.vectorStore && + vectorStoreRetriever.vectorStore.constructor.name === 'FaissStore' + ) + return true + } + return false +} + +/** + * Loop through each inputs and resolve variable if neccessary + * @param {INodeData} reactFlowNodeData + * @param {IReactFlowNode[]} reactFlowNodes + * @param {string} question + * @returns {INodeData} + */ +export const resolveVariables = (reactFlowNodeData: INodeData, reactFlowNodes: IReactFlowNode[], question: string): INodeData => { + let flowNodeData = cloneDeep(reactFlowNodeData) + if (reactFlowNodeData.instance && isVectorStoreFaiss(reactFlowNodeData)) { + // omit and merge because cloneDeep of instance gives "Illegal invocation" Exception + const flowNodeDataWithoutInstance = cloneDeep(omit(reactFlowNodeData, ['instance'])) + flowNodeData = merge(flowNodeDataWithoutInstance, { instance: reactFlowNodeData.instance }) + } + const types = 'inputs' + + const getParamValues = (paramsObj: ICommonObject) => { + for (const key in paramsObj) { + const paramValue: string = paramsObj[key] + if (Array.isArray(paramValue)) { + const resolvedInstances = [] + for (const param of paramValue) { + const resolvedInstance = getVariableValue(param, reactFlowNodes, question) + resolvedInstances.push(resolvedInstance) + } + paramsObj[key] = resolvedInstances + } else { + const isAcceptVariable = reactFlowNodeData.inputParams.find((param) => param.name === key)?.acceptVariable ?? false + const resolvedInstance = getVariableValue(paramValue, reactFlowNodes, question, isAcceptVariable) + paramsObj[key] = resolvedInstance + } + } + } + + const paramsObj = flowNodeData[types] ?? {} + + getParamValues(paramsObj) + + return flowNodeData +} + +/** + * Loop through each inputs and replace their value with override config values + * @param {INodeData} flowNodeData + * @param {ICommonObject} overrideConfig + * @returns {INodeData} + */ +export const replaceInputsWithConfig = (flowNodeData: INodeData, overrideConfig: ICommonObject) => { + const types = 'inputs' + + const getParamValues = (paramsObj: ICommonObject) => { + for (const key in paramsObj) { + const paramValue: string = paramsObj[key] + paramsObj[key] = overrideConfig[key] ?? paramValue + } + } + + const paramsObj = flowNodeData[types] ?? {} + + getParamValues(paramsObj) + + return flowNodeData +} + +/** + * Rebuild flow if LLMChain has dependency on other chains + * User Question => Prompt_0 => LLMChain_0 => Prompt-1 => LLMChain_1 + * @param {IReactFlowNode[]} startingNodes + * @returns {boolean} + */ +export const isStartNodeDependOnInput = (startingNodes: IReactFlowNode[]): boolean => { + for (const node of startingNodes) { + for (const inputName in node.data.inputs) { + const inputVariables = getInputVariables(node.data.inputs[inputName]) + if (inputVariables.length > 0) return true + } + } + return false +} + +/** + * Rebuild flow if new override config is provided + * @param {boolean} isInternal + * @param {ICommonObject} existingOverrideConfig + * @param {ICommonObject} newOverrideConfig + * @returns {boolean} + */ +export const isSameOverrideConfig = ( + isInternal: boolean, + existingOverrideConfig?: ICommonObject, + newOverrideConfig?: ICommonObject +): boolean => { + if (isInternal) { + if (existingOverrideConfig && Object.keys(existingOverrideConfig).length) return false + return true + } + // If existing and new overrideconfig are the same + if ( + existingOverrideConfig && + Object.keys(existingOverrideConfig).length && + newOverrideConfig && + Object.keys(newOverrideConfig).length && + JSON.stringify(existingOverrideConfig) === JSON.stringify(newOverrideConfig) + ) { + return true + } + // If there is no existing and new overrideconfig + if (!existingOverrideConfig && !newOverrideConfig) return true + return false +} + +/** + * Returns the api key path + * @returns {string} + */ +export const getAPIKeyPath = (): string => { + return path.join(__dirname, '..', '..', 'api.json') +} + +/** + * Generate the api key + * @returns {string} + */ +export const generateAPIKey = (): string => { + const buffer = randomBytes(32) + return buffer.toString('base64') +} + +/** + * Generate the secret key + * @param {string} apiKey + * @returns {string} + */ +export const generateSecretHash = (apiKey: string): string => { + const salt = randomBytes(8).toString('hex') + const buffer = scryptSync(apiKey, salt, 64) as Buffer + return `${buffer.toString('hex')}.${salt}` +} + +/** + * Verify valid keys + * @param {string} storedKey + * @param {string} suppliedKey + * @returns {boolean} + */ +export const compareKeys = (storedKey: string, suppliedKey: string): boolean => { + const [hashedPassword, salt] = storedKey.split('.') + const buffer = scryptSync(suppliedKey, salt, 64) as Buffer + return timingSafeEqual(Buffer.from(hashedPassword, 'hex'), buffer) +} + +/** + * Get API keys + * @returns {Promise} + */ +export const getAPIKeys = async (): Promise => { + try { + const content = await fs.promises.readFile(getAPIKeyPath(), 'utf8') + return JSON.parse(content) + } catch (error) { + const keyName = 'DefaultKey' + const apiKey = generateAPIKey() + const apiSecret = generateSecretHash(apiKey) + const content = [ + { + keyName, + apiKey, + apiSecret, + createdAt: moment().format('DD-MMM-YY'), + id: randomBytes(16).toString('hex') + } + ] + await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') + return content + } +} + +/** + * Add new API key + * @param {string} keyName + * @returns {Promise} + */ +export const addAPIKey = async (keyName: string): Promise => { + const existingAPIKeys = await getAPIKeys() + const apiKey = generateAPIKey() + const apiSecret = generateSecretHash(apiKey) + const content = [ + ...existingAPIKeys, + { + keyName, + apiKey, + apiSecret, + createdAt: moment().format('DD-MMM-YY'), + id: randomBytes(16).toString('hex') + } + ] + await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') + return content +} + +/** + * Update existing API key + * @param {string} keyIdToUpdate + * @param {string} newKeyName + * @returns {Promise} + */ +export const updateAPIKey = async (keyIdToUpdate: string, newKeyName: string): Promise => { + const existingAPIKeys = await getAPIKeys() + const keyIndex = existingAPIKeys.findIndex((key) => key.id === keyIdToUpdate) + if (keyIndex < 0) return [] + existingAPIKeys[keyIndex].keyName = newKeyName + await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(existingAPIKeys), 'utf8') + return existingAPIKeys +} + +/** + * Delete API key + * @param {string} keyIdToDelete + * @returns {Promise} + */ +export const deleteAPIKey = async (keyIdToDelete: string): Promise => { + const existingAPIKeys = await getAPIKeys() + const result = existingAPIKeys.filter((key) => key.id !== keyIdToDelete) + await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(result), 'utf8') + return result +} + +/** + * Replace all api keys + * @param {ICommonObject[]} content + * @returns {Promise} + */ +export const replaceAllAPIKeys = async (content: ICommonObject[]): Promise => { + try { + await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') + } catch (error) { + console.error(error) + } +} + +/** + * Map MimeType to InputField + * @param {string} mimeType + * @returns {Promise} + */ +export const mapMimeTypeToInputField = (mimeType: string) => { + switch (mimeType) { + case 'text/plain': + return 'txtFile' + case 'application/pdf': + return 'pdfFile' + case 'application/json': + return 'jsonFile' + case 'text/csv': + return 'csvFile' + case 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': + return 'docxFile' + default: + return '' + } +} + +/** + * Find all available inpur params config + * @param {IReactFlowNode[]} reactFlowNodes + * @returns {Promise} + */ +export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[]) => { + const configs: IOverrideConfig[] = [] + + for (const flowNode of reactFlowNodes) { + for (const inputParam of flowNode.data.inputParams) { + let obj: IOverrideConfig + if (inputParam.type === 'password' || inputParam.type === 'options') { + continue + } else if (inputParam.type === 'file') { + obj = { + node: flowNode.data.label, + label: inputParam.label, + name: 'files', + type: inputParam.fileType ?? inputParam.type + } + } else { + obj = { + node: flowNode.data.label, + label: inputParam.label, + name: inputParam.name, + type: inputParam.type + } + } + if (!configs.some((config) => JSON.stringify(config) === JSON.stringify(obj))) { + configs.push(obj) + } + } + } + + return configs +} + +/** + * Check to see if flow valid for stream + * @param {IReactFlowNode[]} reactFlowNodes + * @param {INodeData} endingNodeData + * @returns {boolean} + */ +export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNodeData: INodeData) => { + const streamAvailableLLMs = { + 'Chat Models': ['azureChatOpenAI', 'chatOpenAI', 'chatAnthropic'], + LLMs: ['azureOpenAI', 'openAI'] + } + + let isChatOrLLMsExist = false + for (const flowNode of reactFlowNodes) { + const data = flowNode.data + if (data.category === 'Chat Models' || data.category === 'LLMs') { + isChatOrLLMsExist = true + const validLLMs = streamAvailableLLMs[data.category] + if (!validLLMs.includes(data.name)) return false + } + } + + return isChatOrLLMsExist && endingNodeData.category === 'Chains' && !isVectorStoreFaiss(endingNodeData) +} diff --git a/packages/server/tsconfig.json b/packages/server/tsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..693ee1b875901419260cc463a283dc7c28d0e395 --- /dev/null +++ b/packages/server/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "lib": ["es2017"], + "target": "es2017" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, + "experimentalDecorators": true /* Enable experimental support for TC39 stage 2 draft decorators. */, + "emitDecoratorMetadata": true /* Emit design-type metadata for decorated declarations in source files. */, + "module": "commonjs" /* Specify what module code is generated. */, + "outDir": "dist", + "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */, + "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, + "strict": true /* Enable all strict type-checking options. */, + "skipLibCheck": true /* Skip type checking all .d.ts files. */, + "sourceMap": true, + "strictPropertyInitialization": false, + "declaration": true + }, + "include": ["src"] +} diff --git a/packages/ui/.env.example b/packages/ui/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..25241b73a372f72529115704bdd0a03940374f54 --- /dev/null +++ b/packages/ui/.env.example @@ -0,0 +1 @@ +PORT=8080 diff --git a/packages/ui/.npmignore b/packages/ui/.npmignore new file mode 100644 index 0000000000000000000000000000000000000000..251188673d84b87c2960ae1efd5e9a7f53353bb4 --- /dev/null +++ b/packages/ui/.npmignore @@ -0,0 +1,13 @@ +/tests +/src +/public +!build + +yarn-debug.log* +yarn-error.log* + +.eslintrc +.prettierignore +.prettierrc +jsconfig.json + diff --git a/packages/ui/README.md b/packages/ui/README.md new file mode 100644 index 0000000000000000000000000000000000000000..fff7f9ea732789ac6960a24d9a5b6d2eaa37a6e0 --- /dev/null +++ b/packages/ui/README.md @@ -0,0 +1,17 @@ + + +# Flowise UI + +React frontend ui for Flowise. + +![Flowise](https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.gif?raw=true) + +Install: + +```bash +npm i flowise-ui +``` + +## License + +Source code in this repository is made available under the [MIT License](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/ui/jsconfig.json b/packages/ui/jsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..d2071c6a9fe120e76319b3e36c662e02d317edbe --- /dev/null +++ b/packages/ui/jsconfig.json @@ -0,0 +1,9 @@ +{ + "compilerOptions": { + "target": "esnext", + "module": "commonjs", + "baseUrl": "src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules"] +} diff --git a/packages/ui/package.json b/packages/ui/package.json new file mode 100644 index 0000000000000000000000000000000000000000..0727fc875a7c8f0df697e62ca701cb794f048cf1 --- /dev/null +++ b/packages/ui/package.json @@ -0,0 +1,82 @@ +{ + "name": "flowise-ui", + "version": "1.2.10", + "license": "SEE LICENSE IN LICENSE.md", + "homepage": "https://flowiseai.com", + "author": { + "name": "HenryHeng", + "email": "henryheng@flowiseai.com" + }, + "dependencies": { + "@emotion/cache": "^11.4.0", + "@emotion/react": "^11.10.6", + "@emotion/styled": "^11.10.6", + "@mui/icons-material": "^5.0.3", + "@mui/material": "^5.11.12", + "@tabler/icons": "^1.39.1", + "clsx": "^1.1.1", + "formik": "^2.2.6", + "framer-motion": "^4.1.13", + "history": "^5.0.0", + "html-react-parser": "^3.0.4", + "lodash": "^4.17.21", + "moment": "^2.29.3", + "notistack": "^2.0.4", + "prismjs": "^1.28.0", + "prop-types": "^15.7.2", + "react": "^18.2.0", + "react-code-blocks": "^0.0.9-0", + "react-datepicker": "^4.8.0", + "react-device-detect": "^1.17.0", + "react-dom": "^18.2.0", + "react-json-view": "^1.21.3", + "react-markdown": "^8.0.6", + "react-perfect-scrollbar": "^1.5.8", + "react-redux": "^8.0.5", + "react-router": "~6.3.0", + "react-router-dom": "~6.3.0", + "react-simple-code-editor": "^0.11.2", + "react-syntax-highlighter": "^15.5.0", + "reactflow": "^11.5.6", + "redux": "^4.0.5", + "rehype-mathjax": "^4.0.2", + "remark-gfm": "^3.0.1", + "remark-math": "^5.1.1", + "socket.io-client": "^4.6.1", + "yup": "^0.32.9" + }, + "scripts": { + "start": "react-scripts start", + "dev": "react-scripts start", + "build": "react-scripts build", + "test": "react-scripts test", + "eject": "react-scripts eject" + }, + "babel": { + "presets": [ + "@babel/preset-react" + ] + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + }, + "devDependencies": { + "@babel/eslint-parser": "^7.15.8", + "@testing-library/jest-dom": "^5.11.10", + "@testing-library/react": "^14.0.0", + "@testing-library/user-event": "^12.8.3", + "pretty-quick": "^3.1.3", + "react-scripts": "^5.0.1", + "sass": "^1.42.1", + "typescript": "^4.8.4" + } +} diff --git a/packages/ui/public/favicon-16x16.png b/packages/ui/public/favicon-16x16.png new file mode 100644 index 0000000000000000000000000000000000000000..c056f6a9a6cbd680accff15f1a8308a08779ecf3 Binary files /dev/null and b/packages/ui/public/favicon-16x16.png differ diff --git a/packages/ui/public/favicon-32x32.png b/packages/ui/public/favicon-32x32.png new file mode 100644 index 0000000000000000000000000000000000000000..857abed066ef41d8cecce8f1c90a07bf850597d4 Binary files /dev/null and b/packages/ui/public/favicon-32x32.png differ diff --git a/packages/ui/public/favicon.ico b/packages/ui/public/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..ee99d266498ee34c78c93497ce48df31c8315409 Binary files /dev/null and b/packages/ui/public/favicon.ico differ diff --git a/packages/ui/public/index.html b/packages/ui/public/index.html new file mode 100644 index 0000000000000000000000000000000000000000..270cc8058097c6d3b50f49befa703b496665807c --- /dev/null +++ b/packages/ui/public/index.html @@ -0,0 +1,62 @@ + + + + Flowise - LangchainJS UI + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + diff --git a/packages/ui/src/App.js b/packages/ui/src/App.js new file mode 100644 index 0000000000000000000000000000000000000000..37b914bb1f511859063f678ba72a375ffc320a6e --- /dev/null +++ b/packages/ui/src/App.js @@ -0,0 +1,32 @@ +import { useSelector } from 'react-redux' + +import { ThemeProvider } from '@mui/material/styles' +import { CssBaseline, StyledEngineProvider } from '@mui/material' + +// routing +import Routes from 'routes' + +// defaultTheme +import themes from 'themes' + +// project imports +import NavigationScroll from 'layout/NavigationScroll' + +// ==============================|| APP ||============================== // + +const App = () => { + const customization = useSelector((state) => state.customization) + + return ( + + + + + + + + + ) +} + +export default App diff --git a/packages/ui/src/api/apikey.js b/packages/ui/src/api/apikey.js new file mode 100644 index 0000000000000000000000000000000000000000..aed0a2d5f238e521473b85c324929f8f92ed0638 --- /dev/null +++ b/packages/ui/src/api/apikey.js @@ -0,0 +1,16 @@ +import client from './client' + +const getAllAPIKeys = () => client.get('/apikey') + +const createNewAPI = (body) => client.post(`/apikey`, body) + +const updateAPI = (id, body) => client.put(`/apikey/${id}`, body) + +const deleteAPI = (id) => client.delete(`/apikey/${id}`) + +export default { + getAllAPIKeys, + createNewAPI, + updateAPI, + deleteAPI +} diff --git a/packages/ui/src/api/chatflows.js b/packages/ui/src/api/chatflows.js new file mode 100644 index 0000000000000000000000000000000000000000..1cd1ebb09a2a9f25884b38f2cc060da0056eabac --- /dev/null +++ b/packages/ui/src/api/chatflows.js @@ -0,0 +1,22 @@ +import client from './client' + +const getAllChatflows = () => client.get('/chatflows') + +const getSpecificChatflow = (id) => client.get(`/chatflows/${id}`) + +const createNewChatflow = (body) => client.post(`/chatflows`, body) + +const updateChatflow = (id, body) => client.put(`/chatflows/${id}`, body) + +const deleteChatflow = (id) => client.delete(`/chatflows/${id}`) + +const getIsChatflowStreaming = (id) => client.get(`/chatflows-streaming/${id}`) + +export default { + getAllChatflows, + getSpecificChatflow, + createNewChatflow, + updateChatflow, + deleteChatflow, + getIsChatflowStreaming +} diff --git a/packages/ui/src/api/chatmessage.js b/packages/ui/src/api/chatmessage.js new file mode 100644 index 0000000000000000000000000000000000000000..d93068e65e8baf7ba708b71675e89de6ef871c6a --- /dev/null +++ b/packages/ui/src/api/chatmessage.js @@ -0,0 +1,13 @@ +import client from './client' + +const getChatmessageFromChatflow = (id) => client.get(`/chatmessage/${id}`) + +const createNewChatmessage = (id, body) => client.post(`/chatmessage/${id}`, body) + +const deleteChatmessage = (id) => client.delete(`/chatmessage/${id}`) + +export default { + getChatmessageFromChatflow, + createNewChatmessage, + deleteChatmessage +} diff --git a/packages/ui/src/api/client.js b/packages/ui/src/api/client.js new file mode 100644 index 0000000000000000000000000000000000000000..8235bde4ca15e02f12df2f11bad79ab612fd6099 --- /dev/null +++ b/packages/ui/src/api/client.js @@ -0,0 +1,25 @@ +import axios from 'axios' +import { baseURL } from 'store/constant' + +const apiClient = axios.create({ + baseURL: `${baseURL}/api/v1`, + headers: { + 'Content-type': 'application/json' + } +}) + +apiClient.interceptors.request.use(function (config) { + const username = localStorage.getItem('username') + const password = localStorage.getItem('password') + + if (username && password) { + config.auth = { + username, + password + } + } + + return config +}) + +export default apiClient diff --git a/packages/ui/src/api/config.js b/packages/ui/src/api/config.js new file mode 100644 index 0000000000000000000000000000000000000000..0fb8297df1915077be4d1ffd6a32cb0a62086161 --- /dev/null +++ b/packages/ui/src/api/config.js @@ -0,0 +1,7 @@ +import client from './client' + +const getConfig = (id) => client.get(`/flow-config/${id}`) + +export default { + getConfig +} diff --git a/packages/ui/src/api/database.js b/packages/ui/src/api/database.js new file mode 100644 index 0000000000000000000000000000000000000000..f36fb72c77516ce568cea4f1a417ea3c03f6e4c0 --- /dev/null +++ b/packages/ui/src/api/database.js @@ -0,0 +1,9 @@ +import client from './client' + +const getExportDatabase = () => client.get('/database/export') +const createLoadDatabase = (body) => client.post('/database/load', body) + +export default { + getExportDatabase, + createLoadDatabase +} diff --git a/packages/ui/src/api/marketplaces.js b/packages/ui/src/api/marketplaces.js new file mode 100644 index 0000000000000000000000000000000000000000..6906fb4e4658552d0d72194bb2ab126837aeca19 --- /dev/null +++ b/packages/ui/src/api/marketplaces.js @@ -0,0 +1,7 @@ +import client from './client' + +const getAllMarketplaces = () => client.get('/marketplaces') + +export default { + getAllMarketplaces +} diff --git a/packages/ui/src/api/nodes.js b/packages/ui/src/api/nodes.js new file mode 100644 index 0000000000000000000000000000000000000000..7eb4c3518bd3c65bb62f95b6c811790b77417562 --- /dev/null +++ b/packages/ui/src/api/nodes.js @@ -0,0 +1,10 @@ +import client from './client' + +const getAllNodes = () => client.get('/nodes') + +const getSpecificNode = (name) => client.get(`/nodes/${name}`) + +export default { + getAllNodes, + getSpecificNode +} diff --git a/packages/ui/src/api/prediction.js b/packages/ui/src/api/prediction.js new file mode 100644 index 0000000000000000000000000000000000000000..d3512843c2a4ac7dd44d3bf8d720825e4fe2ab7e --- /dev/null +++ b/packages/ui/src/api/prediction.js @@ -0,0 +1,7 @@ +import client from './client' + +const sendMessageAndGetPrediction = (id, input) => client.post(`/internal-prediction/${id}`, input) + +export default { + sendMessageAndGetPrediction +} diff --git a/packages/ui/src/assets/images/api_empty.svg b/packages/ui/src/assets/images/api_empty.svg new file mode 100644 index 0000000000000000000000000000000000000000..45aac7201c8975da0107af559ef99f05fc240b6d --- /dev/null +++ b/packages/ui/src/assets/images/api_empty.svg @@ -0,0 +1 @@ +two_factor_authentication \ No newline at end of file diff --git a/packages/ui/src/assets/images/cURL.svg b/packages/ui/src/assets/images/cURL.svg new file mode 100644 index 0000000000000000000000000000000000000000..7f3644ae23b72fca1072c4ec3527811fbe5c2fd2 --- /dev/null +++ b/packages/ui/src/assets/images/cURL.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/embed.svg b/packages/ui/src/assets/images/embed.svg new file mode 100644 index 0000000000000000000000000000000000000000..d20ceab0cd2680423a2df0b825c1a7051a7a89ee --- /dev/null +++ b/packages/ui/src/assets/images/embed.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/flowise_logo.png b/packages/ui/src/assets/images/flowise_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..b0a07e532e6f8c4ad89ad147c6b49753092c77fd Binary files /dev/null and b/packages/ui/src/assets/images/flowise_logo.png differ diff --git a/packages/ui/src/assets/images/flowise_logo_dark.png b/packages/ui/src/assets/images/flowise_logo_dark.png new file mode 100644 index 0000000000000000000000000000000000000000..e53cfc0db5fa91e5f35d182e3de7d2be465b63cd Binary files /dev/null and b/packages/ui/src/assets/images/flowise_logo_dark.png differ diff --git a/packages/ui/src/assets/images/google-login-white.png b/packages/ui/src/assets/images/google-login-white.png new file mode 100644 index 0000000000000000000000000000000000000000..aabc4747823ebe72233f7c324ff7320dc26e1e0a Binary files /dev/null and b/packages/ui/src/assets/images/google-login-white.png differ diff --git a/packages/ui/src/assets/images/javascript.svg b/packages/ui/src/assets/images/javascript.svg new file mode 100644 index 0000000000000000000000000000000000000000..e7f9f2a3082cbd5dc15f42c5fd17550c4f52533d --- /dev/null +++ b/packages/ui/src/assets/images/javascript.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/python.svg b/packages/ui/src/assets/images/python.svg new file mode 100644 index 0000000000000000000000000000000000000000..9cbbf9478df8bac1409f7c788bfd76e15ec8346a --- /dev/null +++ b/packages/ui/src/assets/images/python.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/workflow_empty.svg b/packages/ui/src/assets/images/workflow_empty.svg new file mode 100644 index 0000000000000000000000000000000000000000..01677fda758fc61de2b41fde42391ffda7e6aca0 --- /dev/null +++ b/packages/ui/src/assets/images/workflow_empty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/scss/_themes-vars.module.scss b/packages/ui/src/assets/scss/_themes-vars.module.scss new file mode 100644 index 0000000000000000000000000000000000000000..374c36c920a8c865fb68d9906df2301d12da6ce5 --- /dev/null +++ b/packages/ui/src/assets/scss/_themes-vars.module.scss @@ -0,0 +1,157 @@ +// paper & background +$paper: #ffffff; + +// primary +$primaryLight: #e3f2fd; +$primaryMain: #2196f3; +$primaryDark: #1e88e5; +$primary200: #90caf9; +$primary800: #1565c0; + +// secondary +$secondaryLight: #ede7f6; +$secondaryMain: #673ab7; +$secondaryDark: #5e35b1; +$secondary200: #b39ddb; +$secondary800: #4527a0; + +// success Colors +$successLight: #cdf5d8; +$success200: #69f0ae; +$successMain: #00e676; +$successDark: #00c853; + +// error +$errorLight: #f3d2d2; +$errorMain: #f44336; +$errorDark: #c62828; + +// orange +$orangeLight: #fbe9e7; +$orangeMain: #ffab91; +$orangeDark: #d84315; + +// warning +$warningLight: #fff8e1; +$warningMain: #ffe57f; +$warningDark: #ffc107; + +// grey +$grey50: #fafafa; +$grey100: #f5f5f5; +$grey200: #eeeeee; +$grey300: #e0e0e0; +$grey500: #9e9e9e; +$grey600: #757575; +$grey700: #616161; +$grey900: #212121; + +// ==============================|| DARK THEME VARIANTS ||============================== // + +// paper & background +$darkBackground: #191b1f; +$darkPaper: #191b1f; + +// dark 800 & 900 +$darkLevel1: #252525; // level 1 +$darkLevel2: #242424; // level 2 + +// primary dark +$darkPrimaryLight: #23262c; +$darkPrimaryMain: #23262c; +$darkPrimaryDark: #191b1f; +$darkPrimary200: #c9d4e9; +$darkPrimary800: #32353b; + +// secondary dark +$darkSecondaryLight: #454c59; +$darkSecondaryMain: #7c4dff; +$darkSecondaryDark: #ffffff; +$darkSecondary200: #32353b; +$darkSecondary800: #6200ea; + +// text variants +$darkTextTitle: #d7dcec; +$darkTextPrimary: #bdc8f0; +$darkTextSecondary: #8492c4; + +// ==============================|| JAVASCRIPT ||============================== // + +:export { + // paper & background + paper: $paper; + + // primary + primaryLight: $primaryLight; + primary200: $primary200; + primaryMain: $primaryMain; + primaryDark: $primaryDark; + primary800: $primary800; + + // secondary + secondaryLight: $secondaryLight; + secondary200: $secondary200; + secondaryMain: $secondaryMain; + secondaryDark: $secondaryDark; + secondary800: $secondary800; + + // success + successLight: $successLight; + success200: $success200; + successMain: $successMain; + successDark: $successDark; + + // error + errorLight: $errorLight; + errorMain: $errorMain; + errorDark: $errorDark; + + // orange + orangeLight: $orangeLight; + orangeMain: $orangeMain; + orangeDark: $orangeDark; + + // warning + warningLight: $warningLight; + warningMain: $warningMain; + warningDark: $warningDark; + + // grey + grey50: $grey50; + grey100: $grey100; + grey200: $grey200; + grey300: $grey300; + grey500: $grey500; + grey600: $grey600; + grey700: $grey700; + grey900: $grey900; + + // ==============================|| DARK THEME VARIANTS ||============================== // + + // paper & background + darkPaper: $darkPaper; + darkBackground: $darkBackground; + + // dark 800 & 900 + darkLevel1: $darkLevel1; + darkLevel2: $darkLevel2; + + // text variants + darkTextTitle: $darkTextTitle; + darkTextPrimary: $darkTextPrimary; + darkTextSecondary: $darkTextSecondary; + + // primary dark + darkPrimaryLight: $darkPrimaryLight; + darkPrimaryMain: $darkPrimaryMain; + darkPrimaryDark: $darkPrimaryDark; + darkPrimary200: $darkPrimary200; + darkPrimary800: $darkPrimary800; + + // secondary dark + darkSecondaryLight: $darkSecondaryLight; + darkSecondaryMain: $darkSecondaryMain; + darkSecondaryDark: $darkSecondaryDark; + darkSecondary200: $darkSecondary200; + darkSecondary800: $darkSecondary800; +} diff --git a/packages/ui/src/assets/scss/style.scss b/packages/ui/src/assets/scss/style.scss new file mode 100644 index 0000000000000000000000000000000000000000..a3a184de78122c57309c5400b78df01da61371df --- /dev/null +++ b/packages/ui/src/assets/scss/style.scss @@ -0,0 +1,122 @@ +// color variants +@import 'themes-vars.module.scss'; + +// third-party +@import '~react-perfect-scrollbar/dist/css/styles.css'; + +// ==============================|| LIGHT BOX ||============================== // +.fullscreen .react-images__blanket { + z-index: 1200; +} + +// ==============================|| PERFECT SCROLLBAR ||============================== // + +.scrollbar-container { + .ps__rail-y { + &:hover > .ps__thumb-y, + &:focus > .ps__thumb-y, + &.ps--clicking .ps__thumb-y { + background-color: $grey500; + width: 5px; + } + } + .ps__thumb-y { + background-color: $grey500; + border-radius: 6px; + width: 5px; + right: 0; + } +} + +.scrollbar-container.ps, +.scrollbar-container > .ps { + &.ps--active-y > .ps__rail-y { + width: 5px; + background-color: transparent !important; + z-index: 999; + &:hover, + &.ps--clicking { + width: 5px; + background-color: transparent; + } + } + &.ps--scrolling-y > .ps__rail-y, + &.ps--scrolling-x > .ps__rail-x { + opacity: 0.4; + background-color: transparent; + } +} + +// ==============================|| ANIMATION KEYFRAMES ||============================== // + +@keyframes wings { + 50% { + transform: translateY(-40px); + } + 100% { + transform: translateY(0px); + } +} + +@keyframes blink { + 50% { + opacity: 0; + } + 100% { + opacity: 1; + } +} + +@keyframes bounce { + 0%, + 20%, + 53%, + to { + animation-timing-function: cubic-bezier(0.215, 0.61, 0.355, 1); + transform: translateZ(0); + } + 40%, + 43% { + animation-timing-function: cubic-bezier(0.755, 0.05, 0.855, 0.06); + transform: translate3d(0, -5px, 0); + } + 70% { + animation-timing-function: cubic-bezier(0.755, 0.05, 0.855, 0.06); + transform: translate3d(0, -7px, 0); + } + 80% { + transition-timing-function: cubic-bezier(0.215, 0.61, 0.355, 1); + transform: translateZ(0); + } + 90% { + transform: translate3d(0, -2px, 0); + } +} + +@keyframes slideY { + 0%, + 50%, + 100% { + transform: translateY(0px); + } + 25% { + transform: translateY(-10px); + } + 75% { + transform: translateY(10px); + } +} + +@keyframes slideX { + 0%, + 50%, + 100% { + transform: translateX(0px); + } + 25% { + transform: translateX(-10px); + } + 75% { + transform: translateX(10px); + } +} diff --git a/packages/ui/src/config.js b/packages/ui/src/config.js new file mode 100644 index 0000000000000000000000000000000000000000..34e5f157a177fe7fe956a865c63e41c9892c3523 --- /dev/null +++ b/packages/ui/src/config.js @@ -0,0 +1,9 @@ +const config = { + // basename: only at build time to set, and Don't add '/' at end off BASENAME for breadcrumbs, also Don't put only '/' use blank('') instead, + basename: '', + defaultPath: '/chatflows', + fontFamily: `'Roboto', sans-serif`, + borderRadius: 12 +} + +export default config diff --git a/packages/ui/src/hooks/useApi.js b/packages/ui/src/hooks/useApi.js new file mode 100644 index 0000000000000000000000000000000000000000..932f0a6e04bded51ce59cc8f7092745e98b974bd --- /dev/null +++ b/packages/ui/src/hooks/useApi.js @@ -0,0 +1,26 @@ +import { useState } from 'react' + +export default (apiFunc) => { + const [data, setData] = useState(null) + const [error, setError] = useState(null) + const [loading, setLoading] = useState(false) + + const request = async (...args) => { + setLoading(true) + try { + const result = await apiFunc(...args) + setData(result.data) + } catch (err) { + setError(err || 'Unexpected Error!') + } finally { + setLoading(false) + } + } + + return { + data, + error, + loading, + request + } +} diff --git a/packages/ui/src/hooks/useConfirm.js b/packages/ui/src/hooks/useConfirm.js new file mode 100644 index 0000000000000000000000000000000000000000..8e00d21521f0b01011f403f6ac9b7e4034f25fbf --- /dev/null +++ b/packages/ui/src/hooks/useConfirm.js @@ -0,0 +1,37 @@ +import { useContext } from 'react' +import ConfirmContext from 'store/context/ConfirmContext' +import { HIDE_CONFIRM, SHOW_CONFIRM } from 'store/actions' + +let resolveCallback +const useConfirm = () => { + const [confirmState, dispatch] = useContext(ConfirmContext) + + const closeConfirm = () => { + dispatch({ + type: HIDE_CONFIRM + }) + } + + const onConfirm = () => { + closeConfirm() + resolveCallback(true) + } + + const onCancel = () => { + closeConfirm() + resolveCallback(false) + } + const confirm = (confirmPayload) => { + dispatch({ + type: SHOW_CONFIRM, + payload: confirmPayload + }) + return new Promise((res) => { + resolveCallback = res + }) + } + + return { confirm, onConfirm, onCancel, confirmState } +} + +export default useConfirm diff --git a/packages/ui/src/hooks/useScriptRef.js b/packages/ui/src/hooks/useScriptRef.js new file mode 100644 index 0000000000000000000000000000000000000000..451c2c04b1e46b40e66d4b4df8e9cffe4cb37a3f --- /dev/null +++ b/packages/ui/src/hooks/useScriptRef.js @@ -0,0 +1,18 @@ +import { useEffect, useRef } from 'react' + +// ==============================|| ELEMENT REFERENCE HOOKS ||============================== // + +const useScriptRef = () => { + const scripted = useRef(true) + + useEffect( + () => () => { + scripted.current = false + }, + [] + ) + + return scripted +} + +export default useScriptRef diff --git a/packages/ui/src/index.js b/packages/ui/src/index.js new file mode 100644 index 0000000000000000000000000000000000000000..1269f06563e5aac9254db90446ce80748c0576b7 --- /dev/null +++ b/packages/ui/src/index.js @@ -0,0 +1,33 @@ +import React from 'react' +import App from './App' +import { store } from 'store' +import { createRoot } from 'react-dom/client' + +// style + assets +import 'assets/scss/style.scss' + +// third party +import { BrowserRouter } from 'react-router-dom' +import { Provider } from 'react-redux' +import { SnackbarProvider } from 'notistack' +import ConfirmContextProvider from 'store/context/ConfirmContextProvider' +import { ReactFlowContext } from 'store/context/ReactFlowContext' + +const container = document.getElementById('root') +const root = createRoot(container) + +root.render( + + + + + + + + + + + + + +) diff --git a/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.css b/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.css new file mode 100644 index 0000000000000000000000000000000000000000..f6be27ab51c72d926c4bde14b5c2714038a9aac3 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.css @@ -0,0 +1,6 @@ +.ps__rail-x { + display: none !important; +} +.ps__thumb-x { + display: none !important; +} diff --git a/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.js b/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.js new file mode 100644 index 0000000000000000000000000000000000000000..41de3dd44e5c3cd5d46abc4ab71aac404dcb4e3a --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.js @@ -0,0 +1,264 @@ +import { useState, useRef, useEffect } from 'react' +import PropTypes from 'prop-types' +import { useSelector, useDispatch } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { + Box, + ButtonBase, + Avatar, + ClickAwayListener, + Divider, + List, + ListItemButton, + ListItemIcon, + ListItemText, + Paper, + Popper, + Typography +} from '@mui/material' + +// third-party +import PerfectScrollbar from 'react-perfect-scrollbar' + +// project imports +import MainCard from 'ui-component/cards/MainCard' +import Transitions from 'ui-component/extended/Transitions' +import { BackdropLoader } from 'ui-component/loading/BackdropLoader' +import AboutDialog from 'ui-component/dialog/AboutDialog' + +// assets +import { IconLogout, IconSettings, IconFileExport, IconFileDownload, IconInfoCircle } from '@tabler/icons' + +// API +import databaseApi from 'api/database' + +import { SET_MENU } from 'store/actions' + +import './index.css' + +// ==============================|| PROFILE MENU ||============================== // + +const ProfileSection = ({ username, handleLogout }) => { + const theme = useTheme() + const dispatch = useDispatch() + const navigate = useNavigate() + + const customization = useSelector((state) => state.customization) + + const [open, setOpen] = useState(false) + const [loading, setLoading] = useState(false) + const [aboutDialogOpen, setAboutDialogOpen] = useState(false) + + const anchorRef = useRef(null) + const uploadRef = useRef(null) + + const handleClose = (event) => { + if (anchorRef.current && anchorRef.current.contains(event.target)) { + return + } + setOpen(false) + } + + const handleToggle = () => { + setOpen((prevOpen) => !prevOpen) + } + + const handleExportDB = async () => { + setOpen(false) + try { + const response = await databaseApi.getExportDatabase() + const exportItems = response.data + let dataStr = JSON.stringify(exportItems) + let dataUri = 'data:application/json;charset=utf-8,' + encodeURIComponent(dataStr) + + let exportFileDefaultName = `DB.json` + + let linkElement = document.createElement('a') + linkElement.setAttribute('href', dataUri) + linkElement.setAttribute('download', exportFileDefaultName) + linkElement.click() + } catch (e) { + console.error(e) + } + } + + const handleFileUpload = (e) => { + if (!e.target.files) return + + const file = e.target.files[0] + const reader = new FileReader() + reader.onload = async (evt) => { + if (!evt?.target?.result) { + return + } + const { result } = evt.target + + if (result.includes(`"chatmessages":[`) && result.includes(`"chatflows":[`) && result.includes(`"apikeys":[`)) { + dispatch({ type: SET_MENU, opened: false }) + setLoading(true) + + try { + await databaseApi.createLoadDatabase(JSON.parse(result)) + setLoading(false) + navigate('/', { replace: true }) + navigate(0) + } catch (e) { + console.error(e) + setLoading(false) + } + } else { + alert('Incorrect Flowise Database Format') + } + } + reader.readAsText(file) + } + + const prevOpen = useRef(open) + useEffect(() => { + if (prevOpen.current === true && open === false) { + anchorRef.current.focus() + } + + prevOpen.current = open + }, [open]) + + return ( + <> + + + + + + + {({ TransitionProps }) => ( + + + + + {username && ( + + + {username} + + + )} + + + + + { + setOpen(false) + uploadRef.current.click() + }} + > + + + + Load Database} /> + + + + + + Export Database} /> + + { + setOpen(false) + setAboutDialogOpen(true) + }} + > + + + + About Flowise} /> + + {localStorage.getItem('username') && localStorage.getItem('password') && ( + + + + + Logout} /> + + )} + + + + + + + + )} + + handleFileUpload(e)} /> + + setAboutDialogOpen(false)} /> + + ) +} + +ProfileSection.propTypes = { + username: PropTypes.string, + handleLogout: PropTypes.func +} + +export default ProfileSection diff --git a/packages/ui/src/layout/MainLayout/Header/index.js b/packages/ui/src/layout/MainLayout/Header/index.js new file mode 100644 index 0000000000000000000000000000000000000000..9630bf9578b3f25cf853b5fc6d4e2ab4a3ff42f2 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Header/index.js @@ -0,0 +1,140 @@ +import PropTypes from 'prop-types' +import { useSelector, useDispatch } from 'react-redux' +import { useState } from 'react' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { Avatar, Box, ButtonBase, Switch } from '@mui/material' +import { styled } from '@mui/material/styles' + +// project imports +import LogoSection from '../LogoSection' +import ProfileSection from './ProfileSection' + +// assets +import { IconMenu2 } from '@tabler/icons' + +// store +import { SET_DARKMODE } from 'store/actions' + +// ==============================|| MAIN NAVBAR / HEADER ||============================== // + +const MaterialUISwitch = styled(Switch)(({ theme }) => ({ + width: 62, + height: 34, + padding: 7, + '& .MuiSwitch-switchBase': { + margin: 1, + padding: 0, + transform: 'translateX(6px)', + '&.Mui-checked': { + color: '#fff', + transform: 'translateX(22px)', + '& .MuiSwitch-thumb:before': { + backgroundImage: `url('data:image/svg+xml;utf8,')` + }, + '& + .MuiSwitch-track': { + opacity: 1, + backgroundColor: theme.palette.mode === 'dark' ? '#8796A5' : '#aab4be' + } + } + }, + '& .MuiSwitch-thumb': { + backgroundColor: theme.palette.mode === 'dark' ? '#003892' : '#001e3c', + width: 32, + height: 32, + '&:before': { + content: "''", + position: 'absolute', + width: '100%', + height: '100%', + left: 0, + top: 0, + backgroundRepeat: 'no-repeat', + backgroundPosition: 'center', + backgroundImage: `url('data:image/svg+xml;utf8,')` + } + }, + '& .MuiSwitch-track': { + opacity: 1, + backgroundColor: theme.palette.mode === 'dark' ? '#8796A5' : '#aab4be', + borderRadius: 20 / 2 + } +})) + +const Header = ({ handleLeftDrawerToggle }) => { + const theme = useTheme() + const navigate = useNavigate() + + const customization = useSelector((state) => state.customization) + + const [isDark, setIsDark] = useState(customization.isDarkMode) + const dispatch = useDispatch() + + const changeDarkMode = () => { + dispatch({ type: SET_DARKMODE, isDarkMode: !isDark }) + setIsDark((isDark) => !isDark) + localStorage.setItem('isDarkMode', !isDark) + } + + const signOutClicked = () => { + localStorage.removeItem('username') + localStorage.removeItem('password') + navigate('/', { replace: true }) + navigate(0) + } + + return ( + <> + {/* logo & toggler button */} + + + + + + + + + + + + + + + + ) +} + +Header.propTypes = { + handleLeftDrawerToggle: PropTypes.func +} + +export default Header diff --git a/packages/ui/src/layout/MainLayout/LogoSection/index.js b/packages/ui/src/layout/MainLayout/LogoSection/index.js new file mode 100644 index 0000000000000000000000000000000000000000..cf436396042a371ae5a984904be7d217d3486859 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/LogoSection/index.js @@ -0,0 +1,18 @@ +import { Link } from 'react-router-dom' + +// material-ui +import { ButtonBase } from '@mui/material' + +// project imports +import config from 'config' +import Logo from 'ui-component/extended/Logo' + +// ==============================|| MAIN LOGO ||============================== // + +const LogoSection = () => ( + + + +) + +export default LogoSection diff --git a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavCollapse/index.js b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavCollapse/index.js new file mode 100644 index 0000000000000000000000000000000000000000..4fb11edf51a9472430a50834e98013393e04aad5 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavCollapse/index.js @@ -0,0 +1,124 @@ +import PropTypes from 'prop-types' +import { useState } from 'react' +import { useSelector } from 'react-redux' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { Collapse, List, ListItemButton, ListItemIcon, ListItemText, Typography } from '@mui/material' + +// project imports +import NavItem from '../NavItem' + +// assets +import FiberManualRecordIcon from '@mui/icons-material/FiberManualRecord' +import { IconChevronDown, IconChevronUp } from '@tabler/icons' + +// ==============================|| SIDEBAR MENU LIST COLLAPSE ITEMS ||============================== // + +const NavCollapse = ({ menu, level }) => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + const [open, setOpen] = useState(false) + const [selected, setSelected] = useState(null) + + const handleClick = () => { + setOpen(!open) + setSelected(!selected ? menu.id : null) + } + + // menu collapse & item + const menus = menu.children?.map((item) => { + switch (item.type) { + case 'collapse': + return + case 'item': + return + default: + return ( + + Menu Items Error + + ) + } + }) + + const Icon = menu.icon + const menuIcon = menu.icon ? ( + + ) : ( + 0 ? 'inherit' : 'medium'} + /> + ) + + return ( + <> + 1 ? 'transparent !important' : 'inherit', + py: level > 1 ? 1 : 1.25, + pl: `${level * 24}px` + }} + selected={selected === menu.id} + onClick={handleClick} + > + {menuIcon} + + {menu.title} + + } + secondary={ + menu.caption && ( + + {menu.caption} + + ) + } + /> + {open ? ( + + ) : ( + + )} + + + + {menus} + + + + ) +} + +NavCollapse.propTypes = { + menu: PropTypes.object, + level: PropTypes.number +} + +export default NavCollapse diff --git a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.js b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.js new file mode 100644 index 0000000000000000000000000000000000000000..1f33210eff58619a2c75a7dbb7436a41eccf4669 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.js @@ -0,0 +1,61 @@ +import PropTypes from 'prop-types' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { Divider, List, Typography } from '@mui/material' + +// project imports +import NavItem from '../NavItem' +import NavCollapse from '../NavCollapse' + +// ==============================|| SIDEBAR MENU LIST GROUP ||============================== // + +const NavGroup = ({ item }) => { + const theme = useTheme() + + // menu list collapse & items + const items = item.children?.map((menu) => { + switch (menu.type) { + case 'collapse': + return + case 'item': + return + default: + return ( + + Menu Items Error + + ) + } + }) + + return ( + <> + + {item.title} + {item.caption && ( + + {item.caption} + + )} + + ) + } + > + {items} + + + {/* group divider */} + + + ) +} + +NavGroup.propTypes = { + item: PropTypes.object +} + +export default NavGroup diff --git a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.js b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.js new file mode 100644 index 0000000000000000000000000000000000000000..ebf2536d3bf3de71031c3568dfd211a8c262befc --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.js @@ -0,0 +1,150 @@ +import PropTypes from 'prop-types' +import { forwardRef, useEffect } from 'react' +import { Link } from 'react-router-dom' +import { useDispatch, useSelector } from 'react-redux' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { Avatar, Chip, ListItemButton, ListItemIcon, ListItemText, Typography, useMediaQuery } from '@mui/material' + +// project imports +import { MENU_OPEN, SET_MENU } from 'store/actions' +import config from 'config' + +// assets +import FiberManualRecordIcon from '@mui/icons-material/FiberManualRecord' + +// ==============================|| SIDEBAR MENU LIST ITEMS ||============================== // + +const NavItem = ({ item, level, navType, onClick, onUploadFile }) => { + const theme = useTheme() + const dispatch = useDispatch() + const customization = useSelector((state) => state.customization) + const matchesSM = useMediaQuery(theme.breakpoints.down('lg')) + + const Icon = item.icon + const itemIcon = item?.icon ? ( + + ) : ( + id === item?.id) > -1 ? 8 : 6, + height: customization.isOpen.findIndex((id) => id === item?.id) > -1 ? 8 : 6 + }} + fontSize={level > 0 ? 'inherit' : 'medium'} + /> + ) + + let itemTarget = '_self' + if (item.target) { + itemTarget = '_blank' + } + + let listItemProps = { + component: forwardRef(function ListItemPropsComponent(props, ref) { + return + }) + } + if (item?.external) { + listItemProps = { component: 'a', href: item.url, target: itemTarget } + } + if (item?.id === 'loadChatflow') { + listItemProps.component = 'label' + } + + const handleFileUpload = (e) => { + if (!e.target.files) return + + const file = e.target.files[0] + + const reader = new FileReader() + reader.onload = (evt) => { + if (!evt?.target?.result) { + return + } + const { result } = evt.target + onUploadFile(result) + } + reader.readAsText(file) + } + + const itemHandler = (id) => { + if (navType === 'SETTINGS' && id !== 'loadChatflow') { + onClick(id) + } else { + dispatch({ type: MENU_OPEN, id }) + if (matchesSM) dispatch({ type: SET_MENU, opened: false }) + } + } + + // active menu item on page load + useEffect(() => { + if (navType === 'MENU') { + const currentIndex = document.location.pathname + .toString() + .split('/') + .findIndex((id) => id === item.id) + if (currentIndex > -1) { + dispatch({ type: MENU_OPEN, id: item.id }) + } + if (!document.location.pathname.toString().split('/')[1]) { + itemHandler('chatflows') + } + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [navType]) + + return ( + 1 ? 'transparent !important' : 'inherit', + py: level > 1 ? 1 : 1.25, + pl: `${level * 24}px` + }} + selected={customization.isOpen.findIndex((id) => id === item.id) > -1} + onClick={() => itemHandler(item.id)} + > + {item.id === 'loadChatflow' && handleFileUpload(e)} />} + {itemIcon} + id === item.id) > -1 ? 'h5' : 'body1'} color='inherit'> + {item.title} + + } + secondary={ + item.caption && ( + + {item.caption} + + ) + } + /> + {item.chip && ( + {item.chip.avatar}} + /> + )} + + ) +} + +NavItem.propTypes = { + item: PropTypes.object, + level: PropTypes.number, + navType: PropTypes.string, + onClick: PropTypes.func, + onUploadFile: PropTypes.func +} + +export default NavItem diff --git a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.js b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.js new file mode 100644 index 0000000000000000000000000000000000000000..dbcf3deefac4ec44b0dbc130e7a4fa38672d7cd7 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.js @@ -0,0 +1,27 @@ +// material-ui +import { Typography } from '@mui/material' + +// project imports +import NavGroup from './NavGroup' +import menuItem from 'menu-items' + +// ==============================|| SIDEBAR MENU LIST ||============================== // + +const MenuList = () => { + const navItems = menuItem.items.map((item) => { + switch (item.type) { + case 'group': + return + default: + return ( + + Menu Items Error + + ) + } + }) + + return <>{navItems} +} + +export default MenuList diff --git a/packages/ui/src/layout/MainLayout/Sidebar/index.js b/packages/ui/src/layout/MainLayout/Sidebar/index.js new file mode 100644 index 0000000000000000000000000000000000000000..81c01009fcfc09e36bb4821f8fe0eb64222674dc --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Sidebar/index.js @@ -0,0 +1,85 @@ +import PropTypes from 'prop-types' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { Box, Drawer, useMediaQuery } from '@mui/material' + +// third-party +import PerfectScrollbar from 'react-perfect-scrollbar' +import { BrowserView, MobileView } from 'react-device-detect' + +// project imports +import MenuList from './MenuList' +import LogoSection from '../LogoSection' +import { drawerWidth } from 'store/constant' + +// ==============================|| SIDEBAR DRAWER ||============================== // + +const Sidebar = ({ drawerOpen, drawerToggle, window }) => { + const theme = useTheme() + const matchUpMd = useMediaQuery(theme.breakpoints.up('md')) + + const drawer = ( + <> + + + + + + + + + + + + + + + + + ) + + const container = window !== undefined ? () => window.document.body : undefined + + return ( + + + {drawer} + + + ) +} + +Sidebar.propTypes = { + drawerOpen: PropTypes.bool, + drawerToggle: PropTypes.func, + window: PropTypes.object +} + +export default Sidebar diff --git a/packages/ui/src/layout/MainLayout/index.js b/packages/ui/src/layout/MainLayout/index.js new file mode 100644 index 0000000000000000000000000000000000000000..71ead115ad9be2d3e71b0c29754d0e470636c448 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/index.js @@ -0,0 +1,107 @@ +import { useEffect } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { Outlet } from 'react-router-dom' + +// material-ui +import { styled, useTheme } from '@mui/material/styles' +import { AppBar, Box, CssBaseline, Toolbar, useMediaQuery } from '@mui/material' + +// project imports +import Header from './Header' +import Sidebar from './Sidebar' +import { drawerWidth } from 'store/constant' +import { SET_MENU } from 'store/actions' + +// styles +const Main = styled('main', { shouldForwardProp: (prop) => prop !== 'open' })(({ theme, open }) => ({ + ...theme.typography.mainContent, + ...(!open && { + borderBottomLeftRadius: 0, + borderBottomRightRadius: 0, + transition: theme.transitions.create('margin', { + easing: theme.transitions.easing.sharp, + duration: theme.transitions.duration.leavingScreen + }), + [theme.breakpoints.up('md')]: { + marginLeft: -(drawerWidth - 20), + width: `calc(100% - ${drawerWidth}px)` + }, + [theme.breakpoints.down('md')]: { + marginLeft: '20px', + width: `calc(100% - ${drawerWidth}px)`, + padding: '16px' + }, + [theme.breakpoints.down('sm')]: { + marginLeft: '10px', + width: `calc(100% - ${drawerWidth}px)`, + padding: '16px', + marginRight: '10px' + } + }), + ...(open && { + transition: theme.transitions.create('margin', { + easing: theme.transitions.easing.easeOut, + duration: theme.transitions.duration.enteringScreen + }), + marginLeft: 0, + borderBottomLeftRadius: 0, + borderBottomRightRadius: 0, + width: `calc(100% - ${drawerWidth}px)`, + [theme.breakpoints.down('md')]: { + marginLeft: '20px' + }, + [theme.breakpoints.down('sm')]: { + marginLeft: '10px' + } + }) +})) + +// ==============================|| MAIN LAYOUT ||============================== // + +const MainLayout = () => { + const theme = useTheme() + const matchDownMd = useMediaQuery(theme.breakpoints.down('lg')) + + // Handle left drawer + const leftDrawerOpened = useSelector((state) => state.customization.opened) + const dispatch = useDispatch() + const handleLeftDrawerToggle = () => { + dispatch({ type: SET_MENU, opened: !leftDrawerOpened }) + } + + useEffect(() => { + setTimeout(() => dispatch({ type: SET_MENU, opened: !matchDownMd }), 0) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [matchDownMd]) + + return ( + + + {/* header */} + + +
+ + + + {/* drawer */} + + + {/* main content */} +
+ +
+ + ) +} + +export default MainLayout diff --git a/packages/ui/src/layout/MinimalLayout/index.js b/packages/ui/src/layout/MinimalLayout/index.js new file mode 100644 index 0000000000000000000000000000000000000000..0982616fdff01486f9ca146f41766fbc1d5bc4d4 --- /dev/null +++ b/packages/ui/src/layout/MinimalLayout/index.js @@ -0,0 +1,11 @@ +import { Outlet } from 'react-router-dom' + +// ==============================|| MINIMAL LAYOUT ||============================== // + +const MinimalLayout = () => ( + <> + + +) + +export default MinimalLayout diff --git a/packages/ui/src/layout/NavMotion.js b/packages/ui/src/layout/NavMotion.js new file mode 100644 index 0000000000000000000000000000000000000000..c467b89e4ae79213ebbb07487726f49d6bccad87 --- /dev/null +++ b/packages/ui/src/layout/NavMotion.js @@ -0,0 +1,39 @@ +import PropTypes from 'prop-types' +import { motion } from 'framer-motion' + +// ==============================|| ANIMATION FOR CONTENT ||============================== // + +const NavMotion = ({ children }) => { + const motionVariants = { + initial: { + opacity: 0, + scale: 0.99 + }, + in: { + opacity: 1, + scale: 1 + }, + out: { + opacity: 0, + scale: 1.01 + } + } + + const motionTransition = { + type: 'tween', + ease: 'anticipate', + duration: 0.4 + } + + return ( + + {children} + + ) +} + +NavMotion.propTypes = { + children: PropTypes.node +} + +export default NavMotion diff --git a/packages/ui/src/layout/NavigationScroll.js b/packages/ui/src/layout/NavigationScroll.js new file mode 100644 index 0000000000000000000000000000000000000000..d2354b9f29578dacb994acf0b77b0d900e80de62 --- /dev/null +++ b/packages/ui/src/layout/NavigationScroll.js @@ -0,0 +1,26 @@ +import PropTypes from 'prop-types' +import { useEffect } from 'react' +import { useLocation } from 'react-router-dom' + +// ==============================|| NAVIGATION SCROLL TO TOP ||============================== // + +const NavigationScroll = ({ children }) => { + const location = useLocation() + const { pathname } = location + + useEffect(() => { + window.scrollTo({ + top: 0, + left: 0, + behavior: 'smooth' + }) + }, [pathname]) + + return children || null +} + +NavigationScroll.propTypes = { + children: PropTypes.node +} + +export default NavigationScroll diff --git a/packages/ui/src/menu-items/dashboard.js b/packages/ui/src/menu-items/dashboard.js new file mode 100644 index 0000000000000000000000000000000000000000..f1cd5062ee821ee4475bc80355e855547719952a --- /dev/null +++ b/packages/ui/src/menu-items/dashboard.js @@ -0,0 +1,41 @@ +// assets +import { IconHierarchy, IconBuildingStore, IconKey } from '@tabler/icons' + +// constant +const icons = { IconHierarchy, IconBuildingStore, IconKey } + +// ==============================|| DASHBOARD MENU ITEMS ||============================== // + +const dashboard = { + id: 'dashboard', + title: '', + type: 'group', + children: [ + { + id: 'chatflows', + title: 'Chatflows', + type: 'item', + url: '/chatflows', + icon: icons.IconHierarchy, + breadcrumbs: true + }, + { + id: 'marketplaces', + title: 'Marketplaces', + type: 'item', + url: '/marketplaces', + icon: icons.IconBuildingStore, + breadcrumbs: true + }, + { + id: 'apikey', + title: 'API Keys', + type: 'item', + url: '/apikey', + icon: icons.IconKey, + breadcrumbs: true + } + ] +} + +export default dashboard diff --git a/packages/ui/src/menu-items/index.js b/packages/ui/src/menu-items/index.js new file mode 100644 index 0000000000000000000000000000000000000000..bad835afbb6f208346132962a10b72e02c2d002f --- /dev/null +++ b/packages/ui/src/menu-items/index.js @@ -0,0 +1,9 @@ +import dashboard from './dashboard' + +// ==============================|| MENU ITEMS ||============================== // + +const menuItems = { + items: [dashboard] +} + +export default menuItems diff --git a/packages/ui/src/menu-items/settings.js b/packages/ui/src/menu-items/settings.js new file mode 100644 index 0000000000000000000000000000000000000000..77b9ebf5d69b30e608b111df5664bace590c11ed --- /dev/null +++ b/packages/ui/src/menu-items/settings.js @@ -0,0 +1,45 @@ +// assets +import { IconTrash, IconFileUpload, IconFileExport, IconCopy } from '@tabler/icons' + +// constant +const icons = { IconTrash, IconFileUpload, IconFileExport, IconCopy } + +// ==============================|| SETTINGS MENU ITEMS ||============================== // + +const settings = { + id: 'settings', + title: '', + type: 'group', + children: [ + { + id: 'duplicateChatflow', + title: 'Duplicate Chatflow', + type: 'item', + url: '', + icon: icons.IconCopy + }, + { + id: 'loadChatflow', + title: 'Load Chatflow', + type: 'item', + url: '', + icon: icons.IconFileUpload + }, + { + id: 'exportChatflow', + title: 'Export Chatflow', + type: 'item', + url: '', + icon: icons.IconFileExport + }, + { + id: 'deleteChatflow', + title: 'Delete Chatflow', + type: 'item', + url: '', + icon: icons.IconTrash + } + ] +} + +export default settings diff --git a/packages/ui/src/routes/CanvasRoutes.js b/packages/ui/src/routes/CanvasRoutes.js new file mode 100644 index 0000000000000000000000000000000000000000..e37f25d2c3691cdd27cdb668fc41ab9dd9d46160 --- /dev/null +++ b/packages/ui/src/routes/CanvasRoutes.js @@ -0,0 +1,32 @@ +import { lazy } from 'react' + +// project imports +import Loadable from 'ui-component/loading/Loadable' +import MinimalLayout from 'layout/MinimalLayout' + +// canvas routing +const Canvas = Loadable(lazy(() => import('views/canvas'))) +const MarketplaceCanvas = Loadable(lazy(() => import('views/marketplaces/MarketplaceCanvas'))) + +// ==============================|| CANVAS ROUTING ||============================== // + +const CanvasRoutes = { + path: '/', + element: , + children: [ + { + path: '/canvas', + element: + }, + { + path: '/canvas/:id', + element: + }, + { + path: '/marketplace/:id', + element: + } + ] +} + +export default CanvasRoutes diff --git a/packages/ui/src/routes/MainRoutes.js b/packages/ui/src/routes/MainRoutes.js new file mode 100644 index 0000000000000000000000000000000000000000..5353e41a87233b9157c1cbe5883e276560c0b18c --- /dev/null +++ b/packages/ui/src/routes/MainRoutes.js @@ -0,0 +1,41 @@ +import { lazy } from 'react' + +// project imports +import MainLayout from 'layout/MainLayout' +import Loadable from 'ui-component/loading/Loadable' + +// chatflows routing +const Chatflows = Loadable(lazy(() => import('views/chatflows'))) + +// marketplaces routing +const Marketplaces = Loadable(lazy(() => import('views/marketplaces'))) + +// apikey routing +const APIKey = Loadable(lazy(() => import('views/apikey'))) + +// ==============================|| MAIN ROUTING ||============================== // + +const MainRoutes = { + path: '/', + element: , + children: [ + { + path: '/', + element: + }, + { + path: '/chatflows', + element: + }, + { + path: '/marketplaces', + element: + }, + { + path: '/apikey', + element: + } + ] +} + +export default MainRoutes diff --git a/packages/ui/src/routes/index.js b/packages/ui/src/routes/index.js new file mode 100644 index 0000000000000000000000000000000000000000..15fe4dcab63477ab8452bff5b09a5280a78a05ed --- /dev/null +++ b/packages/ui/src/routes/index.js @@ -0,0 +1,12 @@ +import { useRoutes } from 'react-router-dom' + +// routes +import MainRoutes from './MainRoutes' +import CanvasRoutes from './CanvasRoutes' +import config from 'config' + +// ==============================|| ROUTING RENDER ||============================== // + +export default function ThemeRoutes() { + return useRoutes([MainRoutes, CanvasRoutes], config.basename) +} diff --git a/packages/ui/src/serviceWorker.js b/packages/ui/src/serviceWorker.js new file mode 100644 index 0000000000000000000000000000000000000000..9a44c6656f9227115c21576ba0f676a75faf9c25 --- /dev/null +++ b/packages/ui/src/serviceWorker.js @@ -0,0 +1,132 @@ +// This optional code is used to register a service worker. +// register() is not called by default. + +// This lets the app load faster on subsequent visits in production, and gives +// it offline capabilities. However, it also means that developers (and users) +// will only see deployed updates on subsequent visits to a page, after all the +// existing tabs open on the page have been closed, since previously cached +// resources are updated in the background. + +// To learn more about the benefits of this model and instructions on how to +// opt-in, read https://bit.ly/CRA-PWA + +const isLocalhost = Boolean( + window.location.hostname === 'localhost' || + // [::1] is the IPv6 localhost address. + window.location.hostname === '[::1]' || + // 127.0.0.0/8 are considered localhost for IPv4. + window.location.hostname.match(/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/) +) + +function registerValidSW(swUrl, config) { + navigator.serviceWorker + .register(swUrl) + .then((registration) => { + registration.onupdatefound = () => { + const installingWorker = registration.installing + if (installingWorker == null) { + return + } + installingWorker.onstatechange = () => { + if (installingWorker.state === 'installed') { + if (navigator.serviceWorker.controller) { + // At this point, the updated precached content has been fetched, + // but the previous service worker will still serve the older + // content until all client tabs are closed. + console.info( + 'New content is available and will be used when all tabs for this page are closed. See https://bit.ly/CRA-PWA.' + ) + + // Execute callback + if (config && config.onUpdate) { + config.onUpdate(registration) + } + } else { + // At this point, everything has been precached. + // It's the perfect time to display a + // "Content is cached for offline use." message. + console.info('Content is cached for offline use.') + + // Execute callback + if (config && config.onSuccess) { + config.onSuccess(registration) + } + } + } + } + } + }) + .catch((error) => { + console.error('Error during service worker registration:', error) + }) +} + +function checkValidServiceWorker(swUrl, config) { + // Check if the service worker can be found. If it can't reload the page. + fetch(swUrl, { + headers: { 'Service-Worker': 'script' } + }) + .then((response) => { + // Ensure service worker exists, and that we really are getting a JS file. + const contentType = response.headers.get('content-type') + if (response.status === 404 || (contentType != null && contentType.indexOf('javascript') === -1)) { + // No service worker found. Probably a different app. Reload the page. + navigator.serviceWorker.ready.then((registration) => { + registration.unregister().then(() => { + window.location.reload() + }) + }) + } else { + // Service worker found. Proceed as normal. + registerValidSW(swUrl, config) + } + }) + .catch(() => { + console.info('No internet connection found. App is running in offline mode.') + }) +} + +export function register(config) { + if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) { + // The URL constructor is available in all browsers that support SW. + const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href) + if (publicUrl.origin !== window.location.origin) { + // Our service worker won't work if PUBLIC_URL is on a different origin + // from what our page is served on. This might happen if a CDN is used to + // serve assets; see https://github.com/facebook/create-react-app/issues/2374 + return + } + + window.addEventListener('load', () => { + const swUrl = `${process.env.PUBLIC_URL}/service-worker.js` + + if (isLocalhost) { + // This is running on localhost. Let's check if a service worker still exists or not. + checkValidServiceWorker(swUrl, config) + + // Add some additional logging to localhost, pointing developers to the + // service worker/PWA documentation. + navigator.serviceWorker.ready.then(() => { + console.info( + 'This web app is being served cache-first by a service worker. To learn more, visit https://bit.ly/CRA-PWA' + ) + }) + } else { + // Is not localhost. Just register service worker + registerValidSW(swUrl, config) + } + }) + } +} + +export function unregister() { + if ('serviceWorker' in navigator) { + navigator.serviceWorker.ready + .then((registration) => { + registration.unregister() + }) + .catch((error) => { + console.error(error.message) + }) + } +} diff --git a/packages/ui/src/store/actions.js b/packages/ui/src/store/actions.js new file mode 100644 index 0000000000000000000000000000000000000000..306c5cb0a7fee6ca46d0e1e51eef0c37bf21c56c --- /dev/null +++ b/packages/ui/src/store/actions.js @@ -0,0 +1,45 @@ +// action - customization reducer +export const SET_MENU = '@customization/SET_MENU' +export const MENU_TOGGLE = '@customization/MENU_TOGGLE' +export const MENU_OPEN = '@customization/MENU_OPEN' +export const SET_FONT_FAMILY = '@customization/SET_FONT_FAMILY' +export const SET_BORDER_RADIUS = '@customization/SET_BORDER_RADIUS' +export const SET_LAYOUT = '@customization/SET_LAYOUT ' +export const SET_DARKMODE = '@customization/SET_DARKMODE' + +// action - canvas reducer +export const SET_DIRTY = '@canvas/SET_DIRTY' +export const REMOVE_DIRTY = '@canvas/REMOVE_DIRTY' +export const SET_CHATFLOW = '@canvas/SET_CHATFLOW' + +// action - notifier reducer +export const ENQUEUE_SNACKBAR = 'ENQUEUE_SNACKBAR' +export const CLOSE_SNACKBAR = 'CLOSE_SNACKBAR' +export const REMOVE_SNACKBAR = 'REMOVE_SNACKBAR' + +// action - dialog reducer +export const SHOW_CONFIRM = 'SHOW_CONFIRM' +export const HIDE_CONFIRM = 'HIDE_CONFIRM' + +export const enqueueSnackbar = (notification) => { + const key = notification.options && notification.options.key + + return { + type: ENQUEUE_SNACKBAR, + notification: { + ...notification, + key: key || new Date().getTime() + Math.random() + } + } +} + +export const closeSnackbar = (key) => ({ + type: CLOSE_SNACKBAR, + dismissAll: !key, // dismiss all if no key has been defined + key +}) + +export const removeSnackbar = (key) => ({ + type: REMOVE_SNACKBAR, + key +}) diff --git a/packages/ui/src/store/constant.js b/packages/ui/src/store/constant.js new file mode 100644 index 0000000000000000000000000000000000000000..f4729e16120f9b219056a92d2f9e278403e1c20e --- /dev/null +++ b/packages/ui/src/store/constant.js @@ -0,0 +1,7 @@ +// constant +export const gridSpacing = 3 +export const drawerWidth = 260 +export const appDrawerWidth = 320 +export const maxScroll = 100000 +export const baseURL = process.env.NODE_ENV === 'production' ? window.location.origin : window.location.origin.replace(':8080', ':7860') +export const uiBaseURL = window.location.origin diff --git a/packages/ui/src/store/context/ConfirmContext.js b/packages/ui/src/store/context/ConfirmContext.js new file mode 100644 index 0000000000000000000000000000000000000000..0ca68b10148bcdff00c599b92cf9c93fb6e39ad7 --- /dev/null +++ b/packages/ui/src/store/context/ConfirmContext.js @@ -0,0 +1,5 @@ +import React from 'react' + +const ConfirmContext = React.createContext() + +export default ConfirmContext diff --git a/packages/ui/src/store/context/ConfirmContextProvider.js b/packages/ui/src/store/context/ConfirmContextProvider.js new file mode 100644 index 0000000000000000000000000000000000000000..c7f40cbb74562bc6854d17253d6479136307afaa --- /dev/null +++ b/packages/ui/src/store/context/ConfirmContextProvider.js @@ -0,0 +1,16 @@ +import { useReducer } from 'react' +import PropTypes from 'prop-types' +import alertReducer, { initialState } from '../reducers/dialogReducer' +import ConfirmContext from './ConfirmContext' + +const ConfirmContextProvider = ({ children }) => { + const [state, dispatch] = useReducer(alertReducer, initialState) + + return {children} +} + +ConfirmContextProvider.propTypes = { + children: PropTypes.any +} + +export default ConfirmContextProvider diff --git a/packages/ui/src/store/context/ReactFlowContext.js b/packages/ui/src/store/context/ReactFlowContext.js new file mode 100644 index 0000000000000000000000000000000000000000..4c35d7020684045f89bc17d7f111ab471c9ba591 --- /dev/null +++ b/packages/ui/src/store/context/ReactFlowContext.js @@ -0,0 +1,126 @@ +import { createContext, useState } from 'react' +import PropTypes from 'prop-types' +import { getUniqueNodeId } from 'utils/genericHelper' +import { cloneDeep } from 'lodash' + +const initialValue = { + reactFlowInstance: null, + setReactFlowInstance: () => {}, + duplicateNode: () => {}, + deleteNode: () => {}, + deleteEdge: () => {} +} + +export const flowContext = createContext(initialValue) + +export const ReactFlowContext = ({ children }) => { + const [reactFlowInstance, setReactFlowInstance] = useState(null) + + const deleteNode = (nodeid) => { + deleteConnectedInput(nodeid, 'node') + reactFlowInstance.setNodes(reactFlowInstance.getNodes().filter((n) => n.id !== nodeid)) + reactFlowInstance.setEdges(reactFlowInstance.getEdges().filter((ns) => ns.source !== nodeid && ns.target !== nodeid)) + } + + const deleteEdge = (edgeid) => { + deleteConnectedInput(edgeid, 'edge') + reactFlowInstance.setEdges(reactFlowInstance.getEdges().filter((edge) => edge.id !== edgeid)) + } + + const deleteConnectedInput = (id, type) => { + const connectedEdges = + type === 'node' + ? reactFlowInstance.getEdges().filter((edge) => edge.source === id) + : reactFlowInstance.getEdges().filter((edge) => edge.id === id) + + for (const edge of connectedEdges) { + const targetNodeId = edge.target + const sourceNodeId = edge.source + const targetInput = edge.targetHandle.split('-')[2] + + reactFlowInstance.setNodes((nds) => + nds.map((node) => { + if (node.id === targetNodeId) { + let value + const inputAnchor = node.data.inputAnchors.find((ancr) => ancr.name === targetInput) + const inputParam = node.data.inputParams.find((param) => param.name === targetInput) + + if (inputAnchor && inputAnchor.list) { + const values = node.data.inputs[targetInput] || [] + value = values.filter((item) => !item.includes(sourceNodeId)) + } else if (inputParam && inputParam.acceptVariable) { + value = node.data.inputs[targetInput].replace(`{{${sourceNodeId}.data.instance}}`, '') || '' + } else { + value = '' + } + node.data = { + ...node.data, + inputs: { + ...node.data.inputs, + [targetInput]: value + } + } + } + return node + }) + ) + } + } + + const duplicateNode = (id) => { + const nodes = reactFlowInstance.getNodes() + const originalNode = nodes.find((n) => n.id === id) + if (originalNode) { + const newNodeId = getUniqueNodeId(originalNode.data, nodes) + const clonedNode = cloneDeep(originalNode) + + const duplicatedNode = { + ...clonedNode, + id: newNodeId, + position: { + x: clonedNode.position.x + 400, + y: clonedNode.position.y + }, + positionAbsolute: { + x: clonedNode.positionAbsolute.x + 400, + y: clonedNode.positionAbsolute.y + }, + data: { + ...clonedNode.data, + id: newNodeId + }, + selected: false + } + + const dataKeys = ['inputParams', 'inputAnchors', 'outputAnchors'] + + for (const key of dataKeys) { + for (const item of duplicatedNode.data[key]) { + if (item.id) { + item.id = item.id.replace(id, newNodeId) + } + } + } + + reactFlowInstance.setNodes([...nodes, duplicatedNode]) + } + } + + return ( + + {children} + + ) +} + +ReactFlowContext.propTypes = { + children: PropTypes.any +} diff --git a/packages/ui/src/store/index.js b/packages/ui/src/store/index.js new file mode 100644 index 0000000000000000000000000000000000000000..f62c608796946bb9a98f41b72f221107c3042e13 --- /dev/null +++ b/packages/ui/src/store/index.js @@ -0,0 +1,9 @@ +import { createStore } from 'redux' +import reducer from './reducer' + +// ==============================|| REDUX - MAIN STORE ||============================== // + +const store = createStore(reducer) +const persister = 'Free' + +export { store, persister } diff --git a/packages/ui/src/store/reducer.js b/packages/ui/src/store/reducer.js new file mode 100644 index 0000000000000000000000000000000000000000..b464e57196c0534106c6bd4dbdff819a670075fb --- /dev/null +++ b/packages/ui/src/store/reducer.js @@ -0,0 +1,18 @@ +import { combineReducers } from 'redux' + +// reducer import +import customizationReducer from './reducers/customizationReducer' +import canvasReducer from './reducers/canvasReducer' +import notifierReducer from './reducers/notifierReducer' +import dialogReducer from './reducers/dialogReducer' + +// ==============================|| COMBINE REDUCER ||============================== // + +const reducer = combineReducers({ + customization: customizationReducer, + canvas: canvasReducer, + notifier: notifierReducer, + dialog: dialogReducer +}) + +export default reducer diff --git a/packages/ui/src/store/reducers/canvasReducer.js b/packages/ui/src/store/reducers/canvasReducer.js new file mode 100644 index 0000000000000000000000000000000000000000..e98805bb142158cce68abe28ca5a926d5b8a9e92 --- /dev/null +++ b/packages/ui/src/store/reducers/canvasReducer.js @@ -0,0 +1,33 @@ +// action - state management +import * as actionTypes from '../actions' + +export const initialState = { + isDirty: false, + chatflow: null +} + +// ==============================|| CANVAS REDUCER ||============================== // + +const canvasReducer = (state = initialState, action) => { + switch (action.type) { + case actionTypes.SET_DIRTY: + return { + ...state, + isDirty: true + } + case actionTypes.REMOVE_DIRTY: + return { + ...state, + isDirty: false + } + case actionTypes.SET_CHATFLOW: + return { + ...state, + chatflow: action.chatflow + } + default: + return state + } +} + +export default canvasReducer diff --git a/packages/ui/src/store/reducers/customizationReducer.js b/packages/ui/src/store/reducers/customizationReducer.js new file mode 100644 index 0000000000000000000000000000000000000000..475804c4ae1ccf64108b6019b48349969e6a7493 --- /dev/null +++ b/packages/ui/src/store/reducers/customizationReducer.js @@ -0,0 +1,57 @@ +// project imports +import config from 'config' + +// action - state management +import * as actionTypes from '../actions' + +export const initialState = { + isOpen: [], // for active default menu + fontFamily: config.fontFamily, + borderRadius: config.borderRadius, + opened: true, + isHorizontal: localStorage.getItem('isHorizontal') === 'true' ? true : false, + isDarkMode: localStorage.getItem('isDarkMode') === 'true' ? true : false +} + +// ==============================|| CUSTOMIZATION REDUCER ||============================== // + +const customizationReducer = (state = initialState, action) => { + let id + switch (action.type) { + case actionTypes.MENU_OPEN: + id = action.id + return { + ...state, + isOpen: [id] + } + case actionTypes.SET_MENU: + return { + ...state, + opened: action.opened + } + case actionTypes.SET_FONT_FAMILY: + return { + ...state, + fontFamily: action.fontFamily + } + case actionTypes.SET_BORDER_RADIUS: + return { + ...state, + borderRadius: action.borderRadius + } + case actionTypes.SET_LAYOUT: + return { + ...state, + isHorizontal: action.isHorizontal + } + case actionTypes.SET_DARKMODE: + return { + ...state, + isDarkMode: action.isDarkMode + } + default: + return state + } +} + +export default customizationReducer diff --git a/packages/ui/src/store/reducers/dialogReducer.js b/packages/ui/src/store/reducers/dialogReducer.js new file mode 100644 index 0000000000000000000000000000000000000000..ded576228b241ba41a3f465be0a2db9b9f3c014f --- /dev/null +++ b/packages/ui/src/store/reducers/dialogReducer.js @@ -0,0 +1,28 @@ +import { SHOW_CONFIRM, HIDE_CONFIRM } from '../actions' + +export const initialState = { + show: false, + title: '', + description: '', + confirmButtonName: 'OK', + cancelButtonName: 'Cancel' +} + +const alertReducer = (state = initialState, action) => { + switch (action.type) { + case SHOW_CONFIRM: + return { + show: true, + title: action.payload.title, + description: action.payload.description, + confirmButtonName: action.payload.confirmButtonName, + cancelButtonName: action.payload.cancelButtonName + } + case HIDE_CONFIRM: + return initialState + default: + return state + } +} + +export default alertReducer diff --git a/packages/ui/src/store/reducers/notifierReducer.js b/packages/ui/src/store/reducers/notifierReducer.js new file mode 100644 index 0000000000000000000000000000000000000000..d6b1b2dd17026cd33aa1447b66dfc85eeb59de49 --- /dev/null +++ b/packages/ui/src/store/reducers/notifierReducer.js @@ -0,0 +1,40 @@ +import { ENQUEUE_SNACKBAR, CLOSE_SNACKBAR, REMOVE_SNACKBAR } from '../actions' + +export const initialState = { + notifications: [] +} + +const notifierReducer = (state = initialState, action) => { + switch (action.type) { + case ENQUEUE_SNACKBAR: + return { + ...state, + notifications: [ + ...state.notifications, + { + key: action.key, + ...action.notification + } + ] + } + + case CLOSE_SNACKBAR: + return { + ...state, + notifications: state.notifications.map((notification) => + action.dismissAll || notification.key === action.key ? { ...notification, dismissed: true } : { ...notification } + ) + } + + case REMOVE_SNACKBAR: + return { + ...state, + notifications: state.notifications.filter((notification) => notification.key !== action.key) + } + + default: + return state + } +} + +export default notifierReducer diff --git a/packages/ui/src/themes/compStyleOverride.js b/packages/ui/src/themes/compStyleOverride.js new file mode 100644 index 0000000000000000000000000000000000000000..b7ebc8b21c31d118df9a6208289496192ae5e1aa --- /dev/null +++ b/packages/ui/src/themes/compStyleOverride.js @@ -0,0 +1,237 @@ +export default function componentStyleOverrides(theme) { + const bgColor = theme.colors?.grey50 + return { + MuiCssBaseline: { + styleOverrides: { + body: { + scrollbarWidth: 'thin', + scrollbarColor: theme?.customization?.isDarkMode + ? `${theme.colors?.grey500} ${theme.colors?.darkPrimaryMain}` + : `${theme.colors?.grey300} ${theme.paper}`, + '&::-webkit-scrollbar, & *::-webkit-scrollbar': { + width: 12, + height: 12, + backgroundColor: theme?.customization?.isDarkMode ? theme.colors?.darkPrimaryMain : theme.paper + }, + '&::-webkit-scrollbar-thumb, & *::-webkit-scrollbar-thumb': { + borderRadius: 8, + backgroundColor: theme?.customization?.isDarkMode ? theme.colors?.grey500 : theme.colors?.grey300, + minHeight: 24, + border: `3px solid ${theme?.customization?.isDarkMode ? theme.colors?.darkPrimaryMain : theme.paper}` + }, + '&::-webkit-scrollbar-thumb:focus, & *::-webkit-scrollbar-thumb:focus': { + backgroundColor: theme?.customization?.isDarkMode ? theme.colors?.darkPrimary200 : theme.colors?.grey500 + }, + '&::-webkit-scrollbar-thumb:active, & *::-webkit-scrollbar-thumb:active': { + backgroundColor: theme?.customization?.isDarkMode ? theme.colors?.darkPrimary200 : theme.colors?.grey500 + }, + '&::-webkit-scrollbar-thumb:hover, & *::-webkit-scrollbar-thumb:hover': { + backgroundColor: theme?.customization?.isDarkMode ? theme.colors?.darkPrimary200 : theme.colors?.grey500 + }, + '&::-webkit-scrollbar-corner, & *::-webkit-scrollbar-corner': { + backgroundColor: theme?.customization?.isDarkMode ? theme.colors?.darkPrimaryMain : theme.paper + } + } + } + }, + MuiButton: { + styleOverrides: { + root: { + fontWeight: 500, + borderRadius: '4px' + } + } + }, + MuiSvgIcon: { + styleOverrides: { + root: { + color: theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit', + background: theme?.customization?.isDarkMode ? theme.colors?.darkPrimaryLight : 'inherit' + } + } + }, + MuiPaper: { + defaultProps: { + elevation: 0 + }, + styleOverrides: { + root: { + backgroundImage: 'none' + }, + rounded: { + borderRadius: `${theme?.customization?.borderRadius}px` + } + } + }, + MuiCardHeader: { + styleOverrides: { + root: { + color: theme.colors?.textDark, + padding: '24px' + }, + title: { + fontSize: '1.125rem' + } + } + }, + MuiCardContent: { + styleOverrides: { + root: { + padding: '24px' + } + } + }, + MuiCardActions: { + styleOverrides: { + root: { + padding: '24px' + } + } + }, + MuiListItemButton: { + styleOverrides: { + root: { + color: theme.darkTextPrimary, + paddingTop: '10px', + paddingBottom: '10px', + '&.Mui-selected': { + color: theme.menuSelected, + backgroundColor: theme.menuSelectedBack, + '&:hover': { + backgroundColor: theme.menuSelectedBack + }, + '& .MuiListItemIcon-root': { + color: theme.menuSelected + } + }, + '&:hover': { + backgroundColor: theme.menuSelectedBack, + color: theme.menuSelected, + '& .MuiListItemIcon-root': { + color: theme.menuSelected + } + } + } + } + }, + MuiListItemIcon: { + styleOverrides: { + root: { + color: theme.darkTextPrimary, + minWidth: '36px' + } + } + }, + MuiListItemText: { + styleOverrides: { + primary: { + color: theme.textDark + } + } + }, + MuiInputBase: { + styleOverrides: { + input: { + color: theme.textDark, + '&::placeholder': { + color: theme.darkTextSecondary, + fontSize: '0.875rem' + } + } + } + }, + MuiOutlinedInput: { + styleOverrides: { + root: { + background: theme?.customization?.isDarkMode ? theme.colors?.darkPrimary800 : bgColor, + borderRadius: `${theme?.customization?.borderRadius}px`, + '& .MuiOutlinedInput-notchedOutline': { + borderColor: theme.colors?.grey400 + }, + '&:hover $notchedOutline': { + borderColor: theme.colors?.primaryLight + }, + '&.MuiInputBase-multiline': { + padding: 1 + } + }, + input: { + fontWeight: 500, + background: theme?.customization?.isDarkMode ? theme.colors?.darkPrimary800 : bgColor, + padding: '15.5px 14px', + borderRadius: `${theme?.customization?.borderRadius}px`, + '&.MuiInputBase-inputSizeSmall': { + padding: '10px 14px', + '&.MuiInputBase-inputAdornedStart': { + paddingLeft: 0 + } + } + }, + inputAdornedStart: { + paddingLeft: 4 + }, + notchedOutline: { + borderRadius: `${theme?.customization?.borderRadius}px` + } + } + }, + MuiSlider: { + styleOverrides: { + root: { + '&.Mui-disabled': { + color: theme.colors?.grey300 + } + }, + mark: { + backgroundColor: theme.paper, + width: '4px' + }, + valueLabel: { + color: theme?.colors?.primaryLight + } + } + }, + MuiDivider: { + styleOverrides: { + root: { + borderColor: theme.divider, + opacity: 1 + } + } + }, + MuiAvatar: { + styleOverrides: { + root: { + color: theme.colors?.primaryDark, + background: theme.colors?.primary200 + } + } + }, + MuiChip: { + styleOverrides: { + root: { + '&.MuiChip-deletable .MuiChip-deleteIcon': { + color: 'inherit' + } + } + } + }, + MuiTooltip: { + styleOverrides: { + tooltip: { + color: theme?.customization?.isDarkMode ? theme.colors?.paper : theme.paper, + background: theme.colors?.grey700 + } + } + }, + MuiAutocomplete: { + styleOverrides: { + option: { + '&:hover': { + background: theme?.customization?.isDarkMode ? '#233345 !important' : '' + } + } + } + } + } +} diff --git a/packages/ui/src/themes/index.js b/packages/ui/src/themes/index.js new file mode 100644 index 0000000000000000000000000000000000000000..ad15b104ad0ef4f15dc3c2be176c7d5a7dd9ef39 --- /dev/null +++ b/packages/ui/src/themes/index.js @@ -0,0 +1,70 @@ +import { createTheme } from '@mui/material/styles' + +// assets +import colors from 'assets/scss/_themes-vars.module.scss' + +// project imports +import componentStyleOverrides from './compStyleOverride' +import themePalette from './palette' +import themeTypography from './typography' + +/** + * Represent theme style and structure as per Material-UI + * @param {JsonObject} customization customization parameter object + */ + +export const theme = (customization) => { + const color = colors + + const themeOption = customization.isDarkMode + ? { + colors: color, + heading: color.paper, + paper: color.darkPrimaryLight, + backgroundDefault: color.darkPaper, + background: color.darkPrimaryLight, + darkTextPrimary: color.paper, + darkTextSecondary: color.paper, + textDark: color.paper, + menuSelected: color.darkSecondaryDark, + menuSelectedBack: color.darkSecondaryLight, + divider: color.darkPaper, + customization + } + : { + colors: color, + heading: color.grey900, + paper: color.paper, + backgroundDefault: color.paper, + background: color.primaryLight, + darkTextPrimary: color.grey700, + darkTextSecondary: color.grey500, + textDark: color.grey900, + menuSelected: color.secondaryDark, + menuSelectedBack: color.secondaryLight, + divider: color.grey200, + customization + } + + const themeOptions = { + direction: 'ltr', + palette: themePalette(themeOption), + mixins: { + toolbar: { + minHeight: '48px', + padding: '16px', + '@media (min-width: 600px)': { + minHeight: '48px' + } + } + }, + typography: themeTypography(themeOption) + } + + const themes = createTheme(themeOptions) + themes.components = componentStyleOverrides(themeOption) + + return themes +} + +export default theme diff --git a/packages/ui/src/themes/palette.js b/packages/ui/src/themes/palette.js new file mode 100644 index 0000000000000000000000000000000000000000..9e7b7620e55dd506b33b2e24da8c245a41e2ca78 --- /dev/null +++ b/packages/ui/src/themes/palette.js @@ -0,0 +1,95 @@ +/** + * Color intention that you want to used in your theme + * @param {JsonObject} theme Theme customization object + */ + +export default function themePalette(theme) { + return { + mode: theme?.customization?.navType, + common: { + black: theme.colors?.darkPaper, + dark: theme.colors?.darkPrimaryMain + }, + primary: { + light: theme.customization.isDarkMode ? theme.colors?.darkPrimaryLight : theme.colors?.primaryLight, + main: theme.colors?.primaryMain, + dark: theme.customization.isDarkMode ? theme.colors?.darkPrimaryDark : theme.colors?.primaryDark, + 200: theme.customization.isDarkMode ? theme.colors?.darkPrimary200 : theme.colors?.primary200, + 800: theme.customization.isDarkMode ? theme.colors?.darkPrimary800 : theme.colors?.primary800 + }, + secondary: { + light: theme.customization.isDarkMode ? theme.colors?.darkSecondaryLight : theme.colors?.secondaryLight, + main: theme.customization.isDarkMode ? theme.colors?.darkSecondaryMain : theme.colors?.secondaryMain, + dark: theme.customization.isDarkMode ? theme.colors?.darkSecondaryDark : theme.colors?.secondaryDark, + 200: theme.colors?.secondary200, + 800: theme.colors?.secondary800 + }, + error: { + light: theme.colors?.errorLight, + main: theme.colors?.errorMain, + dark: theme.colors?.errorDark + }, + orange: { + light: theme.colors?.orangeLight, + main: theme.colors?.orangeMain, + dark: theme.colors?.orangeDark + }, + warning: { + light: theme.colors?.warningLight, + main: theme.colors?.warningMain, + dark: theme.colors?.warningDark + }, + success: { + light: theme.colors?.successLight, + 200: theme.colors?.success200, + main: theme.colors?.successMain, + dark: theme.colors?.successDark + }, + grey: { + 50: theme.colors?.grey50, + 100: theme.colors?.grey100, + 200: theme.colors?.grey200, + 300: theme.colors?.grey300, + 500: theme.darkTextSecondary, + 600: theme.heading, + 700: theme.darkTextPrimary, + 900: theme.textDark + }, + dark: { + light: theme.colors?.darkTextPrimary, + main: theme.colors?.darkLevel1, + dark: theme.colors?.darkLevel2, + 800: theme.colors?.darkBackground, + 900: theme.colors?.darkPaper + }, + text: { + primary: theme.darkTextPrimary, + secondary: theme.darkTextSecondary, + dark: theme.textDark, + hint: theme.colors?.grey100 + }, + background: { + paper: theme.paper, + default: theme.backgroundDefault + }, + card: { + main: theme.customization.isDarkMode ? theme.colors?.darkPrimaryMain : theme.colors?.paper, + light: theme.customization.isDarkMode ? theme.colors?.darkPrimary200 : theme.colors?.paper, + hover: theme.customization.isDarkMode ? theme.colors?.darkPrimary800 : theme.colors?.paper + }, + asyncSelect: { + main: theme.customization.isDarkMode ? theme.colors?.darkPrimary800 : theme.colors?.grey50 + }, + canvasHeader: { + deployLight: theme.colors?.primaryLight, + deployDark: theme.colors?.primaryDark, + saveLight: theme.colors?.secondaryLight, + saveDark: theme.colors?.secondaryDark, + settingsLight: theme.colors?.grey300, + settingsDark: theme.colors?.grey700 + }, + codeEditor: { + main: theme.customization.isDarkMode ? theme.colors?.darkPrimary800 : theme.colors?.primaryLight + } + } +} diff --git a/packages/ui/src/themes/typography.js b/packages/ui/src/themes/typography.js new file mode 100644 index 0000000000000000000000000000000000000000..d6a3f636ce35aa9b482adc6eea74f89e9198b54d --- /dev/null +++ b/packages/ui/src/themes/typography.js @@ -0,0 +1,133 @@ +/** + * Typography used in theme + * @param {JsonObject} theme theme customization object + */ + +export default function themeTypography(theme) { + return { + fontFamily: theme?.customization?.fontFamily, + h6: { + fontWeight: 500, + color: theme.heading, + fontSize: '0.75rem' + }, + h5: { + fontSize: '0.875rem', + color: theme.heading, + fontWeight: 500 + }, + h4: { + fontSize: '1rem', + color: theme.heading, + fontWeight: 600 + }, + h3: { + fontSize: '1.25rem', + color: theme.heading, + fontWeight: 600 + }, + h2: { + fontSize: '1.5rem', + color: theme.heading, + fontWeight: 700 + }, + h1: { + fontSize: '2.125rem', + color: theme.heading, + fontWeight: 700 + }, + subtitle1: { + fontSize: '0.875rem', + fontWeight: 500, + color: theme.textDark + }, + subtitle2: { + fontSize: '0.75rem', + fontWeight: 400, + color: theme.darkTextSecondary + }, + caption: { + fontSize: '0.75rem', + color: theme.darkTextSecondary, + fontWeight: 400 + }, + body1: { + fontSize: '0.875rem', + fontWeight: 400, + lineHeight: '1.334em' + }, + body2: { + letterSpacing: '0em', + fontWeight: 400, + lineHeight: '1.5em', + color: theme.darkTextPrimary + }, + button: { + textTransform: 'capitalize' + }, + customInput: { + marginTop: 1, + marginBottom: 1, + '& > label': { + top: 23, + left: 0, + color: theme.grey500, + '&[data-shrink="false"]': { + top: 5 + } + }, + '& > div > input': { + padding: '30.5px 14px 11.5px !important' + }, + '& legend': { + display: 'none' + }, + '& fieldset': { + top: 0 + } + }, + mainContent: { + backgroundColor: theme.background, + width: '100%', + minHeight: 'calc(100vh - 75px)', + flexGrow: 1, + padding: '20px', + marginTop: '75px', + marginRight: '20px', + borderRadius: `${theme?.customization?.borderRadius}px` + }, + menuCaption: { + fontSize: '0.875rem', + fontWeight: 500, + color: theme.heading, + padding: '6px', + textTransform: 'capitalize', + marginTop: '10px' + }, + subMenuCaption: { + fontSize: '0.6875rem', + fontWeight: 500, + color: theme.darkTextSecondary, + textTransform: 'capitalize' + }, + commonAvatar: { + cursor: 'pointer', + borderRadius: '8px' + }, + smallAvatar: { + width: '22px', + height: '22px', + fontSize: '1rem' + }, + mediumAvatar: { + width: '34px', + height: '34px', + fontSize: '1.2rem' + }, + largeAvatar: { + width: '44px', + height: '44px', + fontSize: '1.5rem' + } + } +} diff --git a/packages/ui/src/ui-component/button/AnimateButton.js b/packages/ui/src/ui-component/button/AnimateButton.js new file mode 100644 index 0000000000000000000000000000000000000000..ce2d3fb4eae3fbdc27548b9e6a63af2a28eee0ac --- /dev/null +++ b/packages/ui/src/ui-component/button/AnimateButton.js @@ -0,0 +1,97 @@ +import PropTypes from 'prop-types' +import { forwardRef } from 'react' +// third-party +import { motion, useCycle } from 'framer-motion' + +// ==============================|| ANIMATION BUTTON ||============================== // + +const AnimateButton = forwardRef(function AnimateButton({ children, type, direction, offset, scale }, ref) { + let offset1 + let offset2 + switch (direction) { + case 'up': + case 'left': + offset1 = offset + offset2 = 0 + break + case 'right': + case 'down': + default: + offset1 = 0 + offset2 = offset + break + } + + const [x, cycleX] = useCycle(offset1, offset2) + const [y, cycleY] = useCycle(offset1, offset2) + + switch (type) { + case 'rotate': + return ( + + {children} + + ) + case 'slide': + if (direction === 'up' || direction === 'down') { + return ( + cycleY()} + onHoverStart={() => cycleY()} + > + {children} + + ) + } + return ( + cycleX()} onHoverStart={() => cycleX()}> + {children} + + ) + + case 'scale': + default: + if (typeof scale === 'number') { + scale = { + hover: scale, + tap: scale + } + } + return ( + + {children} + + ) + } +}) + +AnimateButton.propTypes = { + children: PropTypes.node, + offset: PropTypes.number, + type: PropTypes.oneOf(['slide', 'scale', 'rotate']), + direction: PropTypes.oneOf(['up', 'down', 'left', 'right']), + scale: PropTypes.oneOfType([PropTypes.number, PropTypes.object]) +} + +AnimateButton.defaultProps = { + type: 'scale', + offset: 10, + direction: 'right', + scale: { + hover: 1, + tap: 0.9 + } +} + +export default AnimateButton diff --git a/packages/ui/src/ui-component/button/StyledButton.js b/packages/ui/src/ui-component/button/StyledButton.js new file mode 100644 index 0000000000000000000000000000000000000000..6e0c707864e5165ef9f1f46f048ebc3350f20b1e --- /dev/null +++ b/packages/ui/src/ui-component/button/StyledButton.js @@ -0,0 +1,11 @@ +import { styled } from '@mui/material/styles' +import { Button } from '@mui/material' + +export const StyledButton = styled(Button)(({ theme, color = 'primary' }) => ({ + color: 'white', + backgroundColor: theme.palette[color].main, + '&:hover': { + backgroundColor: theme.palette[color].main, + backgroundImage: `linear-gradient(rgb(0 0 0/10%) 0 0)` + } +})) diff --git a/packages/ui/src/ui-component/button/StyledFab.js b/packages/ui/src/ui-component/button/StyledFab.js new file mode 100644 index 0000000000000000000000000000000000000000..d1f5ac4767ee4038c3d369b6948b3aa5324f01f9 --- /dev/null +++ b/packages/ui/src/ui-component/button/StyledFab.js @@ -0,0 +1,11 @@ +import { styled } from '@mui/material/styles' +import { Fab } from '@mui/material' + +export const StyledFab = styled(Fab)(({ theme, color = 'primary' }) => ({ + color: 'white', + backgroundColor: theme.palette[color].main, + '&:hover': { + backgroundColor: theme.palette[color].main, + backgroundImage: `linear-gradient(rgb(0 0 0/10%) 0 0)` + } +})) diff --git a/packages/ui/src/ui-component/cards/ItemCard.js b/packages/ui/src/ui-component/cards/ItemCard.js new file mode 100644 index 0000000000000000000000000000000000000000..506947ce6991d02fb2e689d1a9949e91befaad3f --- /dev/null +++ b/packages/ui/src/ui-component/cards/ItemCard.js @@ -0,0 +1,116 @@ +import PropTypes from 'prop-types' + +// material-ui +import { styled, useTheme } from '@mui/material/styles' +import { Box, Grid, Chip, Typography } from '@mui/material' + +// project imports +import MainCard from 'ui-component/cards/MainCard' +import SkeletonChatflowCard from 'ui-component/cards/Skeleton/ChatflowCard' + +const CardWrapper = styled(MainCard)(({ theme }) => ({ + background: theme.palette.card.main, + color: theme.darkTextPrimary, + overflow: 'auto', + position: 'relative', + boxShadow: '0 2px 14px 0 rgb(32 40 45 / 8%)', + cursor: 'pointer', + '&:hover': { + background: theme.palette.card.hover, + boxShadow: '0 2px 14px 0 rgb(32 40 45 / 20%)' + }, + maxHeight: '300px', + maxWidth: '300px', + overflowWrap: 'break-word', + whiteSpace: 'pre-line' +})) + +// ===========================|| CONTRACT CARD ||=========================== // + +const ItemCard = ({ isLoading, data, images, onClick }) => { + const theme = useTheme() + + const chipSX = { + height: 24, + padding: '0 6px' + } + + const activeChatflowSX = { + ...chipSX, + color: 'white', + backgroundColor: theme.palette.success.dark + } + + return ( + <> + {isLoading ? ( + + ) : ( + + + +
+ + {data.name} + +
+ {data.description && ( + + {data.description} + + )} + + {data.deployed && ( + + + + )} + + {images && ( +
+ {images.map((img) => ( +
+ +
+ ))} +
+ )} +
+
+
+ )} + + ) +} + +ItemCard.propTypes = { + isLoading: PropTypes.bool, + data: PropTypes.object, + images: PropTypes.array, + onClick: PropTypes.func +} + +export default ItemCard diff --git a/packages/ui/src/ui-component/cards/MainCard.js b/packages/ui/src/ui-component/cards/MainCard.js new file mode 100644 index 0000000000000000000000000000000000000000..302b15dae532358024f2db656fb31c5be51b8c3a --- /dev/null +++ b/packages/ui/src/ui-component/cards/MainCard.js @@ -0,0 +1,79 @@ +import PropTypes from 'prop-types' +import { forwardRef } from 'react' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { Card, CardContent, CardHeader, Divider, Typography } from '@mui/material' + +// constant +const headerSX = { + '& .MuiCardHeader-action': { mr: 0 } +} + +// ==============================|| CUSTOM MAIN CARD ||============================== // + +const MainCard = forwardRef(function MainCard( + { + border = true, + boxShadow, + children, + content = true, + contentClass = '', + contentSX = {}, + darkTitle, + secondary, + shadow, + sx = {}, + title, + ...others + }, + ref +) { + const theme = useTheme() + + return ( + + {/* card header and action */} + {!darkTitle && title && } + {darkTitle && title && {title}} action={secondary} />} + + {/* content & header divider */} + {title && } + + {/* card content */} + {content && ( + + {children} + + )} + {!content && children} + + ) +}) + +MainCard.propTypes = { + border: PropTypes.bool, + boxShadow: PropTypes.bool, + children: PropTypes.node, + content: PropTypes.bool, + contentClass: PropTypes.string, + contentSX: PropTypes.object, + darkTitle: PropTypes.bool, + secondary: PropTypes.oneOfType([PropTypes.node, PropTypes.string, PropTypes.object]), + shadow: PropTypes.string, + sx: PropTypes.object, + title: PropTypes.oneOfType([PropTypes.node, PropTypes.string, PropTypes.object]) +} + +export default MainCard diff --git a/packages/ui/src/ui-component/cards/Skeleton/ChatflowCard.js b/packages/ui/src/ui-component/cards/Skeleton/ChatflowCard.js new file mode 100644 index 0000000000000000000000000000000000000000..955fb09b1a4e1051b268a409134c41b4570d007c --- /dev/null +++ b/packages/ui/src/ui-component/cards/Skeleton/ChatflowCard.js @@ -0,0 +1,32 @@ +// material-ui +import { Card, CardContent, Grid } from '@mui/material' +import Skeleton from '@mui/material/Skeleton' + +// ==============================|| SKELETON - BRIDGE CARD ||============================== // + +const ChatflowCard = () => ( + + + + + + + + + + + + + + + + + + + + + + +) + +export default ChatflowCard diff --git a/packages/ui/src/ui-component/checkbox/Checkbox.js b/packages/ui/src/ui-component/checkbox/Checkbox.js new file mode 100644 index 0000000000000000000000000000000000000000..9c16a43a98ab0060a693709a5d110b4b9c49580a --- /dev/null +++ b/packages/ui/src/ui-component/checkbox/Checkbox.js @@ -0,0 +1,34 @@ +import { useState } from 'react' +import PropTypes from 'prop-types' +import { FormControlLabel, Checkbox } from '@mui/material' + +export const CheckboxInput = ({ value, label, onChange, disabled = false }) => { + const [myValue, setMyValue] = useState(value) + + return ( + <> + { + setMyValue(event.target.checked) + onChange(event.target.checked) + }} + /> + } + label={label} + /> + + ) +} + +CheckboxInput.propTypes = { + value: PropTypes.bool, + label: PropTypes.string, + onChange: PropTypes.func, + disabled: PropTypes.bool +} diff --git a/packages/ui/src/ui-component/dialog/APICodeDialog.js b/packages/ui/src/ui-component/dialog/APICodeDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..e2e7438d77ec1a12906f0b51201f44434e704d0d --- /dev/null +++ b/packages/ui/src/ui-component/dialog/APICodeDialog.js @@ -0,0 +1,683 @@ +import { createPortal } from 'react-dom' +import { useNavigate } from 'react-router-dom' +import { useState, useEffect } from 'react' +import { useDispatch } from 'react-redux' +import PropTypes from 'prop-types' + +import { Tabs, Tab, Dialog, DialogContent, DialogTitle, Box } from '@mui/material' +import { CopyBlock, atomOneDark } from 'react-code-blocks' + +// Project import +import { Dropdown } from 'ui-component/dropdown/Dropdown' + +// Const +import { baseURL } from 'store/constant' +import { SET_CHATFLOW } from 'store/actions' + +// Images +import pythonSVG from 'assets/images/python.svg' +import javascriptSVG from 'assets/images/javascript.svg' +import cURLSVG from 'assets/images/cURL.svg' +import EmbedSVG from 'assets/images/embed.svg' + +// API +import apiKeyApi from 'api/apikey' +import chatflowsApi from 'api/chatflows' +import configApi from 'api/config' + +// Hooks +import useApi from 'hooks/useApi' +import { CheckboxInput } from 'ui-component/checkbox/Checkbox' +import { TableViewOnly } from 'ui-component/table/Table' + +function TabPanel(props) { + const { children, value, index, ...other } = props + return ( + + ) +} + +TabPanel.propTypes = { + children: PropTypes.node, + index: PropTypes.number.isRequired, + value: PropTypes.number.isRequired +} + +function a11yProps(index) { + return { + id: `attachment-tab-${index}`, + 'aria-controls': `attachment-tabpanel-${index}` + } +} + +const unshiftFiles = (configData) => { + const filesConfig = configData.find((config) => config.name === 'files') + if (filesConfig) { + configData = configData.filter((config) => config.name !== 'files') + configData.unshift(filesConfig) + } + return configData +} + +const getConfigExamplesForJS = (configData, bodyType) => { + let finalStr = '' + configData = unshiftFiles(configData) + const loop = Math.min(configData.length, 4) + for (let i = 0; i < loop; i += 1) { + const config = configData[i] + let exampleVal = `"example"` + if (config.type === 'string') exampleVal = `"example"` + else if (config.type === 'boolean') exampleVal = `true` + else if (config.type === 'number') exampleVal = `1` + else if (config.name === 'files') exampleVal = `input.files[0]` + finalStr += bodyType === 'json' ? `\n "${config.name}": ${exampleVal},` : `formData.append("${config.name}", ${exampleVal})\n` + if (i === loop - 1 && bodyType !== 'json') `formData.append("question", "Hey, how are you?")\n` + } + return finalStr +} + +const getConfigExamplesForPython = (configData, bodyType) => { + let finalStr = '' + configData = unshiftFiles(configData) + const loop = Math.min(configData.length, 4) + for (let i = 0; i < loop; i += 1) { + const config = configData[i] + let exampleVal = `"example"` + if (config.type === 'string') exampleVal = `"example"` + else if (config.type === 'boolean') exampleVal = `true` + else if (config.type === 'number') exampleVal = `1` + else if (config.name === 'files') exampleVal = `('example${config.type}', open('example${config.type}', 'rb'))` + finalStr += bodyType === 'json' ? `\n "${config.name}": ${exampleVal},` : `\n "${config.name}": ${exampleVal},` + if (i === loop - 1 && bodyType !== 'json') finalStr += `\n "question": "Hey, how are you?"\n` + } + return finalStr +} + +const getConfigExamplesForCurl = (configData, bodyType) => { + let finalStr = '' + configData = unshiftFiles(configData) + const loop = Math.min(configData.length, 4) + for (let i = 0; i < loop; i += 1) { + const config = configData[i] + let exampleVal = `example` + if (config.type === 'string') exampleVal = bodyType === 'json' ? `"example"` : `example` + else if (config.type === 'boolean') exampleVal = `true` + else if (config.type === 'number') exampleVal = `1` + else if (config.name === 'files') exampleVal = `@/home/user1/Desktop/example${config.type}` + finalStr += bodyType === 'json' ? `"${config.name}": ${exampleVal}` : `\n -F "${config.name}=${exampleVal}"` + if (i === loop - 1) finalStr += bodyType === 'json' ? ` }` : ` \\\n -F "question=Hey, how are you?"` + else finalStr += bodyType === 'json' ? `, ` : ` \\` + } + return finalStr +} + +const embedCode = (chatflowid) => { + return `` +} + +const embedCodeCustomization = (chatflowid) => { + return `` +} + +const APICodeDialog = ({ show, dialogProps, onCancel }) => { + const portalElement = document.getElementById('portal') + const navigate = useNavigate() + const dispatch = useDispatch() + const codes = ['Embed', 'Python', 'JavaScript', 'cURL'] + const [value, setValue] = useState(0) + const [keyOptions, setKeyOptions] = useState([]) + const [apiKeys, setAPIKeys] = useState([]) + const [chatflowApiKeyId, setChatflowApiKeyId] = useState('') + const [selectedApiKey, setSelectedApiKey] = useState({}) + const [checkboxVal, setCheckbox] = useState(false) + const [embedChatCheckboxVal, setEmbedChatCheckbox] = useState(false) + + const getAllAPIKeysApi = useApi(apiKeyApi.getAllAPIKeys) + const updateChatflowApi = useApi(chatflowsApi.updateChatflow) + const getConfigApi = useApi(configApi.getConfig) + + const onCheckBoxChanged = (newVal) => { + setCheckbox(newVal) + if (newVal) { + getConfigApi.request(dialogProps.chatflowid) + } + } + + const onCheckBoxEmbedChatChanged = (newVal) => { + setEmbedChatCheckbox(newVal) + } + + const onApiKeySelected = (keyValue) => { + if (keyValue === 'addnewkey') { + navigate('/apikey') + return + } + setChatflowApiKeyId(keyValue) + setSelectedApiKey(apiKeys.find((key) => key.id === keyValue)) + const updateBody = { + apikeyid: keyValue + } + updateChatflowApi.request(dialogProps.chatflowid, updateBody) + } + + useEffect(() => { + if (updateChatflowApi.data) { + dispatch({ type: SET_CHATFLOW, chatflow: updateChatflowApi.data }) + } + }, [updateChatflowApi.data, dispatch]) + + const handleChange = (event, newValue) => { + setValue(newValue) + } + + const getCode = (codeLang) => { + if (codeLang === 'Python') { + return `import requests + +API_URL = "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}" + +def query(payload): + response = requests.post(API_URL, json=payload) + return response.json() + +output = query({ + "question": "Hey, how are you?", +}) +` + } else if (codeLang === 'JavaScript') { + return `async function query(data) { + const response = await fetch( + "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}", + { + method: "POST", + body: data + } + ); + const result = await response.json(); + return result; +} + +query({"question": "Hey, how are you?"}).then((response) => { + console.log(response); +}); +` + } else if (codeLang === 'cURL') { + return `curl ${baseURL}/api/v1/prediction/${dialogProps.chatflowid} \\ + -X POST \\ + -d '{"question": "Hey, how are you?"}'` + } else if (codeLang === 'Embed') { + return embedCode(dialogProps.chatflowid) + } + return '' + } + + const getCodeWithAuthorization = (codeLang) => { + if (codeLang === 'Python') { + return `import requests + +API_URL = "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}" +headers = {"Authorization": "Bearer ${selectedApiKey?.apiKey}"} + +def query(payload): + response = requests.post(API_URL, headers=headers, json=payload) + return response.json() + +output = query({ + "question": "Hey, how are you?", +}) +` + } else if (codeLang === 'JavaScript') { + return `async function query(data) { + const response = await fetch( + "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}", + { + headers: { Authorization: "Bearer ${selectedApiKey?.apiKey}" }, + method: "POST", + body: data + } + ); + const result = await response.json(); + return result; +} + +query({"question": "Hey, how are you?"}).then((response) => { + console.log(response); +}); +` + } else if (codeLang === 'cURL') { + return `curl ${baseURL}/api/v1/prediction/${dialogProps.chatflowid} \\ + -X POST \\ + -d '{"question": "Hey, how are you?"}' \\ + -H "Authorization: Bearer ${selectedApiKey?.apiKey}"` + } else if (codeLang === 'Embed') { + return embedCode(dialogProps.chatflowid) + } + return '' + } + + const getLang = (codeLang) => { + if (codeLang === 'Python') { + return 'python' + } else if (codeLang === 'JavaScript' || codeLang === 'Embed') { + return 'javascript' + } else if (codeLang === 'cURL') { + return 'bash' + } + return 'python' + } + + const getSVG = (codeLang) => { + if (codeLang === 'Python') { + return pythonSVG + } else if (codeLang === 'JavaScript') { + return javascriptSVG + } else if (codeLang === 'Embed') { + return EmbedSVG + } else if (codeLang === 'cURL') { + return cURLSVG + } + return pythonSVG + } + + // ----------------------------CONFIG FORM DATA --------------------------// + + const getConfigCodeWithFormData = (codeLang, configData) => { + if (codeLang === 'Python') { + return `import requests + +API_URL = "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}" + +# use form data to upload files +form_data = {${getConfigExamplesForPython(configData, 'formData')}} + +def query(form_data): + response = requests.post(API_URL, files=form_data) + return response.json() + +output = query(form_data) +` + } else if (codeLang === 'JavaScript') { + return `// use FormData to upload files +let formData = new FormData(); +${getConfigExamplesForJS(configData, 'formData')} +async function query(formData) { + const response = await fetch( + "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}", + { + method: "POST", + body: formData + } + ); + const result = await response.json(); + return result; +} + +query(formData).then((response) => { + console.log(response); +}); +` + } else if (codeLang === 'cURL') { + return `curl ${baseURL}/api/v1/prediction/${dialogProps.chatflowid} \\ + -X POST \\${getConfigExamplesForCurl(configData, 'formData')}` + } + return '' + } + + // ----------------------------CONFIG FORM DATA with AUTH--------------------------// + + const getConfigCodeWithFormDataWithAuth = (codeLang, configData) => { + if (codeLang === 'Python') { + return `import requests + +API_URL = "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}" +headers = {"Authorization": "Bearer ${selectedApiKey?.apiKey}"} + +# use form data to upload files +form_data = {${getConfigExamplesForPython(configData, 'formData')}} + +def query(form_data): + response = requests.post(API_URL, headers=headers, files=form_data) + return response.json() + +output = query(form_data) +` + } else if (codeLang === 'JavaScript') { + return `// use FormData to upload files +let formData = new FormData(); +${getConfigExamplesForJS(configData, 'formData')} +async function query(formData) { + const response = await fetch( + "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}", + { + headers: { Authorization: "Bearer ${selectedApiKey?.apiKey}" }, + method: "POST", + body: formData + } + ); + const result = await response.json(); + return result; +} + +query(formData).then((response) => { + console.log(response); +}); +` + } else if (codeLang === 'cURL') { + return `curl ${baseURL}/api/v1/prediction/${dialogProps.chatflowid} \\ + -X POST \\${getConfigExamplesForCurl(configData, 'formData')} \\ + -H "Authorization: Bearer ${selectedApiKey?.apiKey}"` + } + return '' + } + + // ----------------------------CONFIG JSON--------------------------// + + const getConfigCode = (codeLang, configData) => { + if (codeLang === 'Python') { + return `import requests + +API_URL = "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}" + +def query(payload): + response = requests.post(API_URL, json=payload) + return response.json() + +output = query({ + "question": "Hey, how are you?", + "overrideConfig": {${getConfigExamplesForPython(configData, 'json')} + } +}) +` + } else if (codeLang === 'JavaScript') { + return `async function query(data) { + const response = await fetch( + "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}", + { + method: "POST", + body: data + } + ); + const result = await response.json(); + return result; +} + +query({ + "question": "Hey, how are you?", + "overrideConfig": {${getConfigExamplesForJS(configData, 'json')} + } +}).then((response) => { + console.log(response); +}); +` + } else if (codeLang === 'cURL') { + return `curl ${baseURL}/api/v1/prediction/${dialogProps.chatflowid} \\ + -X POST \\ + -d '{"question": "Hey, how are you?", "overrideConfig": {${getConfigExamplesForCurl(configData, 'json')}}'` + } + return '' + } + + // ----------------------------CONFIG JSON with AUTH--------------------------// + + const getConfigCodeWithAuthorization = (codeLang, configData) => { + if (codeLang === 'Python') { + return `import requests + +API_URL = "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}" +headers = {"Authorization": "Bearer ${selectedApiKey?.apiKey}"} + +def query(payload): + response = requests.post(API_URL, headers=headers, json=payload) + return response.json() + +output = query({ + "question": "Hey, how are you?", + "overrideConfig": {${getConfigExamplesForPython(configData, 'json')} + } +}) +` + } else if (codeLang === 'JavaScript') { + return `async function query(data) { + const response = await fetch( + "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}", + { + headers: { Authorization: "Bearer ${selectedApiKey?.apiKey}" }, + method: "POST", + body: data + } + ); + const result = await response.json(); + return result; +} + +query({ + "question": "Hey, how are you?", + "overrideConfig": {${getConfigExamplesForJS(configData, 'json')} + } +}).then((response) => { + console.log(response); +}); +` + } else if (codeLang === 'cURL') { + return `curl ${baseURL}/api/v1/prediction/${dialogProps.chatflowid} \\ + -X POST \\ + -d '{"question": "Hey, how are you?", "overrideConfig": {${getConfigExamplesForCurl(configData, 'json')}}' \\ + -H "Authorization: Bearer ${selectedApiKey?.apiKey}"` + } + return '' + } + + useEffect(() => { + if (getAllAPIKeysApi.data) { + const options = [ + { + label: 'No Authorization', + name: '' + } + ] + for (const key of getAllAPIKeysApi.data) { + options.push({ + label: key.keyName, + name: key.id + }) + } + options.push({ + label: '- Add New Key -', + name: 'addnewkey' + }) + setKeyOptions(options) + setAPIKeys(getAllAPIKeysApi.data) + + if (dialogProps.chatflowApiKeyId) { + setChatflowApiKeyId(dialogProps.chatflowApiKeyId) + setSelectedApiKey(getAllAPIKeysApi.data.find((key) => key.id === dialogProps.chatflowApiKeyId)) + } + } + }, [dialogProps, getAllAPIKeysApi.data]) + + useEffect(() => { + if (show) { + getAllAPIKeysApi.request() + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [show]) + + const component = show ? ( + + + {dialogProps.title} + + +
+
+ + {codes.map((codeLang, index) => ( + + } + iconPosition='start' + key={index} + label={codeLang} + {...a11yProps(index)} + > + ))} + +
+ {value !== 0 && ( +
+ onApiKeySelected(newValue)} + value={dialogProps.chatflowApiKeyId ?? chatflowApiKeyId ?? 'Choose an API key'} + /> +
+ )} +
+
+ {codes.map((codeLang, index) => ( + + {value === 0 && ( + <> + + Paste this anywhere in the {``} tag of your html file. +

+ You can also specify a  + + version + + : {`https://cdn.jsdelivr.net/npm/flowise-embed@/dist/web.js`} +

+
+
+ + )} + + {value !== 0 && } + {value !== 0 && checkboxVal && getConfigApi.data && getConfigApi.data.length > 0 && ( + <> + + + + )} + {value === 0 && ( + + )} + {value === 0 && embedChatCheckboxVal && ( + + )} +
+ ))} +
+
+ ) : null + + return createPortal(component, portalElement) +} + +APICodeDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func +} + +export default APICodeDialog diff --git a/packages/ui/src/ui-component/dialog/AboutDialog.js b/packages/ui/src/ui-component/dialog/AboutDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..54c077d1895b321325a07a7acea1ef1432ab234d --- /dev/null +++ b/packages/ui/src/ui-component/dialog/AboutDialog.js @@ -0,0 +1,85 @@ +import { createPortal } from 'react-dom' +import { useState, useEffect } from 'react' +import PropTypes from 'prop-types' +import { Dialog, DialogContent, DialogTitle, TableContainer, Table, TableHead, TableRow, TableCell, TableBody, Paper } from '@mui/material' +import moment from 'moment' +import axios from 'axios' + +const fetchLatestVer = async ({ api }) => { + let apiReturn = await axios + .get(api) + .then(async function (response) { + return response.data + }) + .catch(function (error) { + console.error(error) + }) + return apiReturn +} + +const AboutDialog = ({ show, onCancel }) => { + const portalElement = document.getElementById('portal') + + const [data, setData] = useState({}) + + useEffect(() => { + if (show) { + const fetchData = async (api) => { + let response = await fetchLatestVer({ api }) + setData(response) + } + + fetchData('https://api.github.com/repos/FlowiseAI/Flowise/releases/latest') + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [show]) + + const component = show ? ( + + + Flowise Version + + + {data && ( + + + + + Latest Version + Published At + + + + + + + {data.name} + + + {moment(data.published_at).fromNow()} + + +
+
+ )} +
+
+ ) : null + + return createPortal(component, portalElement) +} + +AboutDialog.propTypes = { + show: PropTypes.bool, + onCancel: PropTypes.func +} + +export default AboutDialog diff --git a/packages/ui/src/ui-component/dialog/AdditionalParamsDialog.js b/packages/ui/src/ui-component/dialog/AdditionalParamsDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..66a1eaf642d4045e38a4756c83fa13ebc5d748ca --- /dev/null +++ b/packages/ui/src/ui-component/dialog/AdditionalParamsDialog.js @@ -0,0 +1,64 @@ +import { createPortal } from 'react-dom' +import { useState, useEffect } from 'react' +import PropTypes from 'prop-types' +import { Dialog, DialogContent } from '@mui/material' +import PerfectScrollbar from 'react-perfect-scrollbar' +import NodeInputHandler from 'views/canvas/NodeInputHandler' + +const AdditionalParamsDialog = ({ show, dialogProps, onCancel }) => { + const portalElement = document.getElementById('portal') + + const [inputParams, setInputParams] = useState([]) + const [data, setData] = useState({}) + + useEffect(() => { + if (dialogProps.inputParams) setInputParams(dialogProps.inputParams) + if (dialogProps.data) setData(dialogProps.data) + + return () => { + setInputParams([]) + setData({}) + } + }, [dialogProps]) + + const component = show ? ( + + + + {inputParams.map((inputParam, index) => ( + + ))} + + + + ) : null + + return createPortal(component, portalElement) +} + +AdditionalParamsDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func +} + +export default AdditionalParamsDialog diff --git a/packages/ui/src/ui-component/dialog/ConfirmDialog.js b/packages/ui/src/ui-component/dialog/ConfirmDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..6f8712f5bf850e3a269315d2a5edb0e4eb49d66b --- /dev/null +++ b/packages/ui/src/ui-component/dialog/ConfirmDialog.js @@ -0,0 +1,37 @@ +import { createPortal } from 'react-dom' +import { Button, Dialog, DialogActions, DialogContent, DialogTitle } from '@mui/material' +import useConfirm from 'hooks/useConfirm' +import { StyledButton } from 'ui-component/button/StyledButton' + +const ConfirmDialog = () => { + const { onConfirm, onCancel, confirmState } = useConfirm() + const portalElement = document.getElementById('portal') + + const component = confirmState.show ? ( + + + {confirmState.title} + + + {confirmState.description} + + + + + {confirmState.confirmButtonName} + + + + ) : null + + return createPortal(component, portalElement) +} + +export default ConfirmDialog diff --git a/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.css b/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.css new file mode 100644 index 0000000000000000000000000000000000000000..d0e2ba2612ced6659dfa660f0518f58f41592356 --- /dev/null +++ b/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.css @@ -0,0 +1,6 @@ +.editor__textarea { + outline: 0; +} +.editor__textarea::placeholder { + color: rgba(120, 120, 120, 0.5); +} diff --git a/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.js b/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..199b130675a1afcd9110f20bc3cd13b8ba690fa8 --- /dev/null +++ b/packages/ui/src/ui-component/dialog/EditPromptValuesDialog.js @@ -0,0 +1,256 @@ +import { createPortal } from 'react-dom' +import { useState, useEffect } from 'react' +import { useSelector } from 'react-redux' +import PropTypes from 'prop-types' +import { + Button, + Dialog, + DialogActions, + DialogContent, + Box, + List, + ListItemButton, + ListItem, + ListItemAvatar, + ListItemText, + Typography, + Stack +} from '@mui/material' +import { useTheme } from '@mui/material/styles' +import PerfectScrollbar from 'react-perfect-scrollbar' +import { StyledButton } from 'ui-component/button/StyledButton' +import { DarkCodeEditor } from 'ui-component/editor/DarkCodeEditor' +import { LightCodeEditor } from 'ui-component/editor/LightCodeEditor' + +import './EditPromptValuesDialog.css' +import { baseURL } from 'store/constant' + +const EditPromptValuesDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const languageType = 'json' + + const [inputValue, setInputValue] = useState('') + const [inputParam, setInputParam] = useState(null) + const [textCursorPosition, setTextCursorPosition] = useState({}) + + useEffect(() => { + if (dialogProps.value) setInputValue(dialogProps.value) + if (dialogProps.inputParam) setInputParam(dialogProps.inputParam) + + return () => { + setInputValue('') + setInputParam(null) + setTextCursorPosition({}) + } + }, [dialogProps]) + + const onMouseUp = (e) => { + if (e.target && e.target.selectionEnd && e.target.value) { + const cursorPosition = e.target.selectionEnd + const textBeforeCursorPosition = e.target.value.substring(0, cursorPosition) + const textAfterCursorPosition = e.target.value.substring(cursorPosition, e.target.value.length) + const body = { + textBeforeCursorPosition, + textAfterCursorPosition + } + setTextCursorPosition(body) + } else { + setTextCursorPosition({}) + } + } + + const onSelectOutputResponseClick = (node, isUserQuestion = false) => { + let variablePath = isUserQuestion ? `question` : `${node.id}.data.instance` + if (textCursorPosition) { + let newInput = '' + if (textCursorPosition.textBeforeCursorPosition === undefined && textCursorPosition.textAfterCursorPosition === undefined) + newInput = `${inputValue}${`{{${variablePath}}}`}` + else newInput = `${textCursorPosition.textBeforeCursorPosition}{{${variablePath}}}${textCursorPosition.textAfterCursorPosition}` + setInputValue(newInput) + } + } + + const component = show ? ( + + +
+ {inputParam && inputParam.type === 'string' && ( +
+ + {inputParam.label} + + + {customization.isDarkMode ? ( + setInputValue(code)} + placeholder={inputParam.placeholder} + type={languageType} + onMouseUp={(e) => onMouseUp(e)} + onBlur={(e) => onMouseUp(e)} + style={{ + fontSize: '0.875rem', + minHeight: 'calc(100vh - 220px)', + width: '100%' + }} + /> + ) : ( + setInputValue(code)} + placeholder={inputParam.placeholder} + type={languageType} + onMouseUp={(e) => onMouseUp(e)} + onBlur={(e) => onMouseUp(e)} + style={{ + fontSize: '0.875rem', + minHeight: 'calc(100vh - 220px)', + width: '100%' + }} + /> + )} + +
+ )} + {!dialogProps.disabled && inputParam && inputParam.acceptVariable && ( +
+ + Select Variable + + + + + onSelectOutputResponseClick(null, true)} + > + + +
+ AI +
+
+ +
+
+ {dialogProps.availableNodesForVariable && + dialogProps.availableNodesForVariable.length > 0 && + dialogProps.availableNodesForVariable.map((node, index) => { + const selectedOutputAnchor = node.data.outputAnchors[0].options.find( + (ancr) => ancr.name === node.data.outputs['output'] + ) + return ( + onSelectOutputResponseClick(node)} + > + + +
+ {node.data.name} +
+
+ +
+
+ ) + })} +
+
+
+
+ )} +
+
+ + + onConfirm(inputValue, inputParam.name)}> + {dialogProps.confirmButtonName} + + +
+ ) : null + + return createPortal(component, portalElement) +} + +EditPromptValuesDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default EditPromptValuesDialog diff --git a/packages/ui/src/ui-component/dialog/LoginDialog.js b/packages/ui/src/ui-component/dialog/LoginDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..926a6467540e33b541785c1b19a8951fb6994eaf --- /dev/null +++ b/packages/ui/src/ui-component/dialog/LoginDialog.js @@ -0,0 +1,70 @@ +import { createPortal } from 'react-dom' +import { useState } from 'react' +import PropTypes from 'prop-types' + +import { Dialog, DialogActions, DialogContent, Typography, DialogTitle } from '@mui/material' +import { StyledButton } from 'ui-component/button/StyledButton' +import { Input } from 'ui-component/input/Input' + +const LoginDialog = ({ show, dialogProps, onConfirm }) => { + const portalElement = document.getElementById('portal') + const usernameInput = { + label: 'Username', + name: 'username', + type: 'string', + placeholder: 'john doe' + } + const passwordInput = { + label: 'Password', + name: 'password', + type: 'password' + } + const [usernameVal, setUsernameVal] = useState('') + const [passwordVal, setPasswordVal] = useState('') + + const component = show ? ( + { + if (e.key === 'Enter') { + onConfirm(usernameVal, passwordVal) + } + }} + open={show} + fullWidth + maxWidth='xs' + aria-labelledby='alert-dialog-title' + aria-describedby='alert-dialog-description' + > + + {dialogProps.title} + + + Username + setUsernameVal(newValue)} + value={usernameVal} + showDialog={false} + /> +
+ Password + setPasswordVal(newValue)} value={passwordVal} /> +
+ + onConfirm(usernameVal, passwordVal)}> + {dialogProps.confirmButtonName} + + +
+ ) : null + + return createPortal(component, portalElement) +} + +LoginDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onConfirm: PropTypes.func +} + +export default LoginDialog diff --git a/packages/ui/src/ui-component/dialog/SaveChatflowDialog.js b/packages/ui/src/ui-component/dialog/SaveChatflowDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..24a7f2cfb596846184ff506d78f669d1d43ba89f --- /dev/null +++ b/packages/ui/src/ui-component/dialog/SaveChatflowDialog.js @@ -0,0 +1,61 @@ +import { createPortal } from 'react-dom' +import { useState, useEffect } from 'react' +import PropTypes from 'prop-types' + +import { Button, Dialog, DialogActions, DialogContent, OutlinedInput, DialogTitle } from '@mui/material' +import { StyledButton } from 'ui-component/button/StyledButton' + +const SaveChatflowDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const [chatflowName, setChatflowName] = useState('') + const [isReadyToSave, setIsReadyToSave] = useState(false) + + useEffect(() => { + if (chatflowName) setIsReadyToSave(true) + else setIsReadyToSave(false) + }, [chatflowName]) + + const component = show ? ( + + + {dialogProps.title} + + + setChatflowName(e.target.value)} + /> + + + + onConfirm(chatflowName)}> + {dialogProps.confirmButtonName} + + + + ) : null + + return createPortal(component, portalElement) +} + +SaveChatflowDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default SaveChatflowDialog diff --git a/packages/ui/src/ui-component/dialog/SourceDocDialog.js b/packages/ui/src/ui-component/dialog/SourceDocDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..a088a6c49e8a5b998d9d6380540e7cebca3483e2 --- /dev/null +++ b/packages/ui/src/ui-component/dialog/SourceDocDialog.js @@ -0,0 +1,57 @@ +import { createPortal } from 'react-dom' +import { useState, useEffect } from 'react' +import { useSelector } from 'react-redux' +import PropTypes from 'prop-types' +import { Dialog, DialogContent, DialogTitle } from '@mui/material' +import ReactJson from 'react-json-view' + +const SourceDocDialog = ({ show, dialogProps, onCancel }) => { + const portalElement = document.getElementById('portal') + const customization = useSelector((state) => state.customization) + + const [data, setData] = useState({}) + + useEffect(() => { + if (dialogProps.data) setData(dialogProps.data) + + return () => { + setData({}) + } + }, [dialogProps]) + + const component = show ? ( + + + Source Document + + + + + + ) : null + + return createPortal(component, portalElement) +} + +SourceDocDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func +} + +export default SourceDocDialog diff --git a/packages/ui/src/ui-component/dropdown/Dropdown.js b/packages/ui/src/ui-component/dropdown/Dropdown.js new file mode 100644 index 0000000000000000000000000000000000000000..12d10bef549d7b4c893303c3f9c870f6a8377a5c --- /dev/null +++ b/packages/ui/src/ui-component/dropdown/Dropdown.js @@ -0,0 +1,65 @@ +import { useState } from 'react' +import { useSelector } from 'react-redux' + +import { Popper, FormControl, TextField, Box, Typography } from '@mui/material' +import Autocomplete, { autocompleteClasses } from '@mui/material/Autocomplete' +import { styled } from '@mui/material/styles' +import PropTypes from 'prop-types' + +const StyledPopper = styled(Popper)({ + boxShadow: '0px 8px 10px -5px rgb(0 0 0 / 20%), 0px 16px 24px 2px rgb(0 0 0 / 14%), 0px 6px 30px 5px rgb(0 0 0 / 12%)', + borderRadius: '10px', + [`& .${autocompleteClasses.listbox}`]: { + boxSizing: 'border-box', + '& ul': { + padding: 10, + margin: 10 + } + } +}) + +export const Dropdown = ({ name, value, options, onSelect, disabled = false, disableClearable = false }) => { + const customization = useSelector((state) => state.customization) + const findMatchingOptions = (options = [], value) => options.find((option) => option.name === value) + const getDefaultOptionValue = () => '' + let [internalValue, setInternalValue] = useState(value ?? 'choose an option') + + return ( + + { + const value = selection ? selection.name : '' + setInternalValue(value) + onSelect(value) + }} + PopperComponent={StyledPopper} + renderInput={(params) => } + renderOption={(props, option) => ( + +
+ {option.label} + {option.description && ( + {option.description} + )} +
+
+ )} + /> +
+ ) +} + +Dropdown.propTypes = { + name: PropTypes.string, + value: PropTypes.string, + options: PropTypes.array, + onSelect: PropTypes.func, + disabled: PropTypes.bool, + disableClearable: PropTypes.bool +} diff --git a/packages/ui/src/ui-component/editor/DarkCodeEditor.js b/packages/ui/src/ui-component/editor/DarkCodeEditor.js new file mode 100644 index 0000000000000000000000000000000000000000..3925f4a665db8a0e09efad2f7e39a1d0731249be --- /dev/null +++ b/packages/ui/src/ui-component/editor/DarkCodeEditor.js @@ -0,0 +1,42 @@ +import Editor from 'react-simple-code-editor' +import { highlight, languages } from 'prismjs/components/prism-core' +import 'prismjs/components/prism-clike' +import 'prismjs/components/prism-javascript' +import 'prismjs/components/prism-json' +import 'prismjs/components/prism-markup' +import './prism-dark.css' +import PropTypes from 'prop-types' +import { useTheme } from '@mui/material/styles' + +export const DarkCodeEditor = ({ value, placeholder, disabled = false, type, style, onValueChange, onMouseUp, onBlur }) => { + const theme = useTheme() + + return ( + highlight(code, type === 'json' ? languages.json : languages.js)} + padding={10} + onValueChange={onValueChange} + onMouseUp={onMouseUp} + onBlur={onBlur} + style={{ + ...style, + background: theme.palette.codeEditor.main + }} + textareaClassName='editor__textarea' + /> + ) +} + +DarkCodeEditor.propTypes = { + value: PropTypes.string, + placeholder: PropTypes.string, + disabled: PropTypes.bool, + type: PropTypes.string, + style: PropTypes.object, + onValueChange: PropTypes.func, + onMouseUp: PropTypes.func, + onBlur: PropTypes.func +} diff --git a/packages/ui/src/ui-component/editor/LightCodeEditor.js b/packages/ui/src/ui-component/editor/LightCodeEditor.js new file mode 100644 index 0000000000000000000000000000000000000000..86f7057df087324e2e5ff02d7f803a4c70005122 --- /dev/null +++ b/packages/ui/src/ui-component/editor/LightCodeEditor.js @@ -0,0 +1,42 @@ +import Editor from 'react-simple-code-editor' +import { highlight, languages } from 'prismjs/components/prism-core' +import 'prismjs/components/prism-clike' +import 'prismjs/components/prism-javascript' +import 'prismjs/components/prism-json' +import 'prismjs/components/prism-markup' +import './prism-light.css' +import PropTypes from 'prop-types' +import { useTheme } from '@mui/material/styles' + +export const LightCodeEditor = ({ value, placeholder, disabled = false, type, style, onValueChange, onMouseUp, onBlur }) => { + const theme = useTheme() + + return ( + highlight(code, type === 'json' ? languages.json : languages.js)} + padding={10} + onValueChange={onValueChange} + onMouseUp={onMouseUp} + onBlur={onBlur} + style={{ + ...style, + background: theme.palette.card.main + }} + textareaClassName='editor__textarea' + /> + ) +} + +LightCodeEditor.propTypes = { + value: PropTypes.string, + placeholder: PropTypes.string, + disabled: PropTypes.bool, + type: PropTypes.string, + style: PropTypes.object, + onValueChange: PropTypes.func, + onMouseUp: PropTypes.func, + onBlur: PropTypes.func +} diff --git a/packages/ui/src/ui-component/editor/prism-dark.css b/packages/ui/src/ui-component/editor/prism-dark.css new file mode 100644 index 0000000000000000000000000000000000000000..c4bfb41329068c5bb929ae0affdee4c6c2a4ff4f --- /dev/null +++ b/packages/ui/src/ui-component/editor/prism-dark.css @@ -0,0 +1,275 @@ +pre[class*='language-'], +code[class*='language-'] { + color: #d4d4d4; + font-size: 13px; + text-shadow: none; + font-family: Menlo, Monaco, Consolas, 'Andale Mono', 'Ubuntu Mono', 'Courier New', monospace; + direction: ltr; + text-align: left; + white-space: pre; + word-spacing: normal; + word-break: normal; + line-height: 1.5; + -moz-tab-size: 4; + -o-tab-size: 4; + tab-size: 4; + -webkit-hyphens: none; + -moz-hyphens: none; + -ms-hyphens: none; + hyphens: none; +} + +pre[class*='language-']::selection, +code[class*='language-']::selection, +pre[class*='language-'] *::selection, +code[class*='language-'] *::selection { + text-shadow: none; + background: #264f78; +} + +@media print { + pre[class*='language-'], + code[class*='language-'] { + text-shadow: none; + } +} + +pre[class*='language-'] { + padding: 1em; + margin: 0.5em 0; + overflow: auto; + background: #1e1e1e; +} + +:not(pre) > code[class*='language-'] { + padding: 0.1em 0.3em; + border-radius: 0.3em; + color: #db4c69; + background: #1e1e1e; +} +/********************************************************* +* Tokens +*/ +.namespace { + opacity: 0.7; +} + +.token.doctype .token.doctype-tag { + color: #569cd6; +} + +.token.doctype .token.name { + color: #9cdcfe; +} + +.token.comment, +.token.prolog { + color: #6a9955; +} + +.token.punctuation, +.language-html .language-css .token.punctuation, +.language-html .language-javascript .token.punctuation { + color: #d4d4d4; +} + +.token.property, +.token.tag, +.token.boolean, +.token.number, +.token.constant, +.token.symbol, +.token.inserted, +.token.unit { + color: #b5cea8; +} + +.token.selector, +.token.attr-name, +.token.string, +.token.char, +.token.builtin, +.token.deleted { + color: #ce9178; +} + +.language-css .token.string.url { + text-decoration: underline; +} + +.token.operator, +.token.entity { + color: #d4d4d4; +} + +.token.operator.arrow { + color: #569cd6; +} + +.token.atrule { + color: #ce9178; +} + +.token.atrule .token.rule { + color: #c586c0; +} + +.token.atrule .token.url { + color: #9cdcfe; +} + +.token.atrule .token.url .token.function { + color: #dcdcaa; +} + +.token.atrule .token.url .token.punctuation { + color: #d4d4d4; +} + +.token.keyword { + color: #569cd6; +} + +.token.keyword.module, +.token.keyword.control-flow { + color: #c586c0; +} + +.token.function, +.token.function .token.maybe-class-name { + color: #dcdcaa; +} + +.token.regex { + color: #d16969; +} + +.token.important { + color: #569cd6; +} + +.token.italic { + font-style: italic; +} + +.token.constant { + color: #9cdcfe; +} + +.token.class-name, +.token.maybe-class-name { + color: #4ec9b0; +} + +.token.console { + color: #9cdcfe; +} + +.token.parameter { + color: #9cdcfe; +} + +.token.interpolation { + color: #9cdcfe; +} + +.token.punctuation.interpolation-punctuation { + color: #569cd6; +} + +.token.boolean { + color: #569cd6; +} + +.token.property, +.token.variable, +.token.imports .token.maybe-class-name, +.token.exports .token.maybe-class-name { + color: #9cdcfe; +} + +.token.selector { + color: #d7ba7d; +} + +.token.escape { + color: #d7ba7d; +} + +.token.tag { + color: #569cd6; +} + +.token.tag .token.punctuation { + color: #808080; +} + +.token.cdata { + color: #808080; +} + +.token.attr-name { + color: #9cdcfe; +} + +.token.attr-value, +.token.attr-value .token.punctuation { + color: #ce9178; +} + +.token.attr-value .token.punctuation.attr-equals { + color: #d4d4d4; +} + +.token.entity { + color: #569cd6; +} + +.token.namespace { + color: #4ec9b0; +} +/********************************************************* +* Language Specific +*/ + +pre[class*='language-javascript'], +code[class*='language-javascript'], +pre[class*='language-jsx'], +code[class*='language-jsx'], +pre[class*='language-typescript'], +code[class*='language-typescript'], +pre[class*='language-tsx'], +code[class*='language-tsx'] { + color: #9cdcfe; +} + +pre[class*='language-css'], +code[class*='language-css'] { + color: #ce9178; +} + +pre[class*='language-html'], +code[class*='language-html'] { + color: #d4d4d4; +} + +.language-regex .token.anchor { + color: #dcdcaa; +} + +.language-html .token.punctuation { + color: #808080; +} +/********************************************************* +* Line highlighting +*/ +pre[class*='language-'] > code[class*='language-'] { + position: relative; + z-index: 1; +} + +.line-highlight.line-highlight { + background: #f7ebc6; + box-shadow: inset 5px 0 0 #f7d87c; + z-index: 0; +} diff --git a/packages/ui/src/ui-component/editor/prism-light.css b/packages/ui/src/ui-component/editor/prism-light.css new file mode 100644 index 0000000000000000000000000000000000000000..95d6d6eba985dce42c8f2580fdc404d2a7245d36 --- /dev/null +++ b/packages/ui/src/ui-component/editor/prism-light.css @@ -0,0 +1,207 @@ +code[class*='language-'], +pre[class*='language-'] { + text-align: left; + white-space: pre; + word-spacing: normal; + word-break: normal; + word-wrap: normal; + color: #90a4ae; + background: #fafafa; + font-family: Roboto Mono, monospace; + font-size: 1em; + line-height: 1.5em; + + -moz-tab-size: 4; + -o-tab-size: 4; + tab-size: 4; + + -webkit-hyphens: none; + -moz-hyphens: none; + -ms-hyphens: none; + hyphens: none; +} + +code[class*='language-']::-moz-selection, +pre[class*='language-']::-moz-selection, +code[class*='language-'] ::-moz-selection, +pre[class*='language-'] ::-moz-selection { + background: #cceae7; + color: #263238; +} + +code[class*='language-']::selection, +pre[class*='language-']::selection, +code[class*='language-'] ::selection, +pre[class*='language-'] ::selection { + background: #cceae7; + color: #263238; +} + +:not(pre) > code[class*='language-'] { + white-space: normal; + border-radius: 0.2em; + padding: 0.1em; +} + +pre[class*='language-'] { + overflow: auto; + position: relative; + margin: 0.5em 0; + padding: 1.25em 1em; +} + +.language-css > code, +.language-sass > code, +.language-scss > code { + color: #f76d47; +} + +[class*='language-'] .namespace { + opacity: 0.7; +} + +.token.atrule { + color: #7c4dff; +} + +.token.attr-name { + color: #39adb5; +} + +.token.attr-value { + color: #f6a434; +} + +.token.attribute { + color: #f6a434; +} + +.token.boolean { + color: #7c4dff; +} + +.token.builtin { + color: #39adb5; +} + +.token.cdata { + color: #39adb5; +} + +.token.char { + color: #39adb5; +} + +.token.class { + color: #39adb5; +} + +.token.class-name { + color: #6182b8; +} + +.token.comment { + color: #aabfc9; +} + +.token.constant { + color: #7c4dff; +} + +.token.deleted { + color: #e53935; +} + +.token.doctype { + color: #aabfc9; +} + +.token.entity { + color: #e53935; +} + +.token.function { + color: #7c4dff; +} + +.token.hexcode { + color: #f76d47; +} + +.token.id { + color: #7c4dff; + font-weight: bold; +} + +.token.important { + color: #7c4dff; + font-weight: bold; +} + +.token.inserted { + color: #39adb5; +} + +.token.keyword { + color: #7c4dff; +} + +.token.number { + color: #f76d47; +} + +.token.operator { + color: #39adb5; +} + +.token.prolog { + color: #aabfc9; +} + +.token.property { + color: #39adb5; +} + +.token.pseudo-class { + color: #f6a434; +} + +.token.pseudo-element { + color: #f6a434; +} + +.token.punctuation { + color: #39adb5; +} + +.token.regex { + color: #6182b8; +} + +.token.selector { + color: #e53935; +} + +.token.string { + color: #f6a434; +} + +.token.symbol { + color: #7c4dff; +} + +.token.tag { + color: #e53935; +} + +.token.unit { + color: #f76d47; +} + +.token.url { + color: #e53935; +} + +.token.variable { + color: #e53935; +} diff --git a/packages/ui/src/ui-component/extended/Avatar.js b/packages/ui/src/ui-component/extended/Avatar.js new file mode 100644 index 0000000000000000000000000000000000000000..197a4b30f4102202ba8b2c720a19409422a2d7c1 --- /dev/null +++ b/packages/ui/src/ui-component/extended/Avatar.js @@ -0,0 +1,72 @@ +import PropTypes from 'prop-types' + +// material-ui +import { useTheme } from '@mui/material/styles' +import MuiAvatar from '@mui/material/Avatar' + +// ==============================|| AVATAR ||============================== // + +const Avatar = ({ color, outline, size, sx, ...others }) => { + const theme = useTheme() + + const colorSX = color && !outline && { color: theme.palette.background.paper, bgcolor: `${color}.main` } + const outlineSX = outline && { + color: color ? `${color}.main` : `primary.main`, + bgcolor: theme.palette.background.paper, + border: '2px solid', + borderColor: color ? `${color}.main` : `primary.main` + } + let sizeSX = {} + switch (size) { + case 'badge': + sizeSX = { + width: theme.spacing(3.5), + height: theme.spacing(3.5) + } + break + case 'xs': + sizeSX = { + width: theme.spacing(4.25), + height: theme.spacing(4.25) + } + break + case 'sm': + sizeSX = { + width: theme.spacing(5), + height: theme.spacing(5) + } + break + case 'lg': + sizeSX = { + width: theme.spacing(9), + height: theme.spacing(9) + } + break + case 'xl': + sizeSX = { + width: theme.spacing(10.25), + height: theme.spacing(10.25) + } + break + case 'md': + sizeSX = { + width: theme.spacing(7.5), + height: theme.spacing(7.5) + } + break + default: + sizeSX = {} + } + + return +} + +Avatar.propTypes = { + className: PropTypes.string, + color: PropTypes.string, + outline: PropTypes.bool, + size: PropTypes.string, + sx: PropTypes.object +} + +export default Avatar diff --git a/packages/ui/src/ui-component/extended/Breadcrumbs.js b/packages/ui/src/ui-component/extended/Breadcrumbs.js new file mode 100644 index 0000000000000000000000000000000000000000..1fca5d9be977c8465b9c74fab7e84be3ba6940d5 --- /dev/null +++ b/packages/ui/src/ui-component/extended/Breadcrumbs.js @@ -0,0 +1,184 @@ +import PropTypes from 'prop-types' +import { useEffect, useState } from 'react' +import { Link } from 'react-router-dom' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { Box, Card, Divider, Grid, Typography } from '@mui/material' +import MuiBreadcrumbs from '@mui/material/Breadcrumbs' + +// project imports +import config from 'config' +import { gridSpacing } from 'store/constant' + +// assets +import { IconTallymark1 } from '@tabler/icons' +import AccountTreeTwoToneIcon from '@mui/icons-material/AccountTreeTwoTone' +import HomeIcon from '@mui/icons-material/Home' +import HomeTwoToneIcon from '@mui/icons-material/HomeTwoTone' + +const linkSX = { + display: 'flex', + color: 'grey.900', + textDecoration: 'none', + alignContent: 'center', + alignItems: 'center' +} + +// ==============================|| BREADCRUMBS ||============================== // + +const Breadcrumbs = ({ card, divider, icon, icons, maxItems, navigation, rightAlign, separator, title, titleBottom, ...others }) => { + const theme = useTheme() + + const iconStyle = { + marginRight: theme.spacing(0.75), + marginTop: `-${theme.spacing(0.25)}`, + width: '1rem', + height: '1rem', + color: theme.palette.secondary.main + } + + const [main, setMain] = useState() + const [item, setItem] = useState() + + // set active item state + const getCollapse = (menu) => { + if (menu.children) { + menu.children.filter((collapse) => { + if (collapse.type && collapse.type === 'collapse') { + getCollapse(collapse) + } else if (collapse.type && collapse.type === 'item') { + if (document.location.pathname === config.basename + collapse.url) { + setMain(menu) + setItem(collapse) + } + } + return false + }) + } + } + + useEffect(() => { + navigation?.items?.map((menu) => { + if (menu.type && menu.type === 'group') { + getCollapse(menu) + } + return false + }) + }) + + // item separator + const SeparatorIcon = separator + const separatorIcon = separator ? : + + let mainContent + let itemContent + let breadcrumbContent = + let itemTitle = '' + let CollapseIcon + let ItemIcon + + // collapse item + if (main && main.type === 'collapse') { + CollapseIcon = main.icon ? main.icon : AccountTreeTwoToneIcon + mainContent = ( + + {icons && } + {main.title} + + ) + } + + // items + if (item && item.type === 'item') { + itemTitle = item.title + + ItemIcon = item.icon ? item.icon : AccountTreeTwoToneIcon + itemContent = ( + + {icons && } + {itemTitle} + + ) + + // main + if (item.breadcrumbs !== false) { + breadcrumbContent = ( + + + + {title && !titleBottom && ( + + + {item.title} + + + )} + + + + {icons && } + {icon && } + {!icon && 'Dashboard'} + + {mainContent} + {itemContent} + + + {title && titleBottom && ( + + + {item.title} + + + )} + + + {card === false && divider !== false && } + + ) + } + } + + return breadcrumbContent +} + +Breadcrumbs.propTypes = { + card: PropTypes.bool, + divider: PropTypes.bool, + icon: PropTypes.bool, + icons: PropTypes.bool, + maxItems: PropTypes.number, + navigation: PropTypes.object, + rightAlign: PropTypes.bool, + separator: PropTypes.oneOfType([PropTypes.func, PropTypes.object]), + title: PropTypes.bool, + titleBottom: PropTypes.bool +} + +export default Breadcrumbs diff --git a/packages/ui/src/ui-component/extended/Logo.js b/packages/ui/src/ui-component/extended/Logo.js new file mode 100644 index 0000000000000000000000000000000000000000..e3a323c21b18106d3ce1ff533e64413378a5dc2d --- /dev/null +++ b/packages/ui/src/ui-component/extended/Logo.js @@ -0,0 +1,22 @@ +import logo from 'assets/images/flowise_logo.png' +import logoDark from 'assets/images/flowise_logo_dark.png' + +import { useSelector } from 'react-redux' + +// ==============================|| LOGO ||============================== // + +const Logo = () => { + const customization = useSelector((state) => state.customization) + + return ( +
+ Flowise +
+ ) +} + +export default Logo diff --git a/packages/ui/src/ui-component/extended/Transitions.js b/packages/ui/src/ui-component/extended/Transitions.js new file mode 100644 index 0000000000000000000000000000000000000000..4942dee993f64fa0086379ad59ad9838380d1642 --- /dev/null +++ b/packages/ui/src/ui-component/extended/Transitions.js @@ -0,0 +1,107 @@ +import PropTypes from 'prop-types' +import { forwardRef } from 'react' + +// material-ui +import { Collapse, Fade, Box, Grow, Slide, Zoom } from '@mui/material' + +// ==============================|| TRANSITIONS ||============================== // + +const Transitions = forwardRef(function Transitions({ children, position, type, direction, ...others }, ref) { + let positionSX = { + transformOrigin: '0 0 0' + } + + switch (position) { + case 'top-right': + positionSX = { + transformOrigin: 'top right' + } + break + case 'top': + positionSX = { + transformOrigin: 'top' + } + break + case 'bottom-left': + positionSX = { + transformOrigin: 'bottom left' + } + break + case 'bottom-right': + positionSX = { + transformOrigin: 'bottom right' + } + break + case 'bottom': + positionSX = { + transformOrigin: 'bottom' + } + break + case 'top-left': + default: + positionSX = { + transformOrigin: '0 0 0' + } + break + } + + return ( + + {type === 'grow' && ( + + {children} + + )} + {type === 'collapse' && ( + + {children} + + )} + {type === 'fade' && ( + + {children} + + )} + {type === 'slide' && ( + + {children} + + )} + {type === 'zoom' && ( + + {children} + + )} + + ) +}) + +Transitions.propTypes = { + children: PropTypes.node, + type: PropTypes.oneOf(['grow', 'fade', 'collapse', 'slide', 'zoom']), + position: PropTypes.oneOf(['top-left', 'top-right', 'top', 'bottom-left', 'bottom-right', 'bottom']), + direction: PropTypes.oneOf(['up', 'down', 'left', 'right']) +} + +Transitions.defaultProps = { + type: 'grow', + position: 'top-left', + direction: 'up' +} + +export default Transitions diff --git a/packages/ui/src/ui-component/file/File.js b/packages/ui/src/ui-component/file/File.js new file mode 100644 index 0000000000000000000000000000000000000000..93e1bb91f09184461bef182479ea66c1f4b79049 --- /dev/null +++ b/packages/ui/src/ui-component/file/File.js @@ -0,0 +1,88 @@ +import { useState } from 'react' +import PropTypes from 'prop-types' +import { useTheme } from '@mui/material/styles' +import { FormControl, Button } from '@mui/material' +import { IconUpload } from '@tabler/icons' +import { getFileName } from 'utils/genericHelper' + +export const File = ({ value, fileType, onChange, disabled = false }) => { + const theme = useTheme() + + const [myValue, setMyValue] = useState(value ?? '') + + const handleFileUpload = async (e) => { + if (!e.target.files) return + + if (e.target.files.length === 1) { + const file = e.target.files[0] + const { name } = file + + const reader = new FileReader() + reader.onload = (evt) => { + if (!evt?.target?.result) { + return + } + const { result } = evt.target + + const value = result + `,filename:${name}` + + setMyValue(value) + onChange(value) + } + reader.readAsDataURL(file) + } else if (e.target.files.length > 0) { + let files = Array.from(e.target.files).map((file) => { + const reader = new FileReader() + const { name } = file + + return new Promise((resolve) => { + reader.onload = (evt) => { + if (!evt?.target?.result) { + return + } + const { result } = evt.target + const value = result + `,filename:${name}` + resolve(value) + } + reader.readAsDataURL(file) + }) + }) + + const res = await Promise.all(files) + setMyValue(JSON.stringify(res)) + onChange(JSON.stringify(res)) + } + } + + return ( + + + {myValue ? getFileName(myValue) : 'Choose a file to upload'} + + + + ) +} + +File.propTypes = { + value: PropTypes.string, + fileType: PropTypes.string, + onChange: PropTypes.func, + disabled: PropTypes.bool +} diff --git a/packages/ui/src/ui-component/input/Input.js b/packages/ui/src/ui-component/input/Input.js new file mode 100644 index 0000000000000000000000000000000000000000..1861bf6559c2e6316ffd41170b8f857a935ddc48 --- /dev/null +++ b/packages/ui/src/ui-component/input/Input.js @@ -0,0 +1,70 @@ +import { useState } from 'react' +import PropTypes from 'prop-types' +import { FormControl, OutlinedInput } from '@mui/material' +import EditPromptValuesDialog from 'ui-component/dialog/EditPromptValuesDialog' + +export const Input = ({ inputParam, value, onChange, disabled = false, showDialog, dialogProps, onDialogCancel, onDialogConfirm }) => { + const [myValue, setMyValue] = useState(value ?? '') + + const getInputType = (type) => { + switch (type) { + case 'string': + return 'text' + case 'password': + return 'password' + case 'number': + return 'number' + default: + return 'text' + } + } + + return ( + <> + + { + setMyValue(e.target.value) + onChange(e.target.value) + }} + inputProps={{ + style: { + height: inputParam.rows ? '90px' : 'inherit' + } + }} + /> + + {showDialog && ( + { + setMyValue(newValue) + onDialogConfirm(newValue, inputParamName) + }} + > + )} + + ) +} + +Input.propTypes = { + inputParam: PropTypes.object, + value: PropTypes.string, + onChange: PropTypes.func, + disabled: PropTypes.bool, + showDialog: PropTypes.bool, + dialogProps: PropTypes.object, + onDialogCancel: PropTypes.func, + onDialogConfirm: PropTypes.func +} diff --git a/packages/ui/src/ui-component/json/JsonEditor.js b/packages/ui/src/ui-component/json/JsonEditor.js new file mode 100644 index 0000000000000000000000000000000000000000..06442df27c234c5e36338c095b4258c35c3b852b --- /dev/null +++ b/packages/ui/src/ui-component/json/JsonEditor.js @@ -0,0 +1,64 @@ +import { useState } from 'react' +import PropTypes from 'prop-types' +import { FormControl } from '@mui/material' +import ReactJson from 'react-json-view' + +export const JsonEditorInput = ({ value, onChange, disabled = false, isDarkMode = false }) => { + const [myValue, setMyValue] = useState(value ? JSON.parse(value) : {}) + + const onClipboardCopy = (e) => { + const src = e.src + if (Array.isArray(src) || typeof src === 'object') { + navigator.clipboard.writeText(JSON.stringify(src, null, ' ')) + } else { + navigator.clipboard.writeText(src) + } + } + + return ( + <> + + {disabled && ( + onClipboardCopy(e)} + quotesOnKeys={false} + displayDataTypes={false} + /> + )} + {!disabled && ( + onClipboardCopy(e)} + onEdit={(edit) => { + setMyValue(edit.updated_src) + onChange(JSON.stringify(edit.updated_src)) + }} + onAdd={() => { + //console.log(add) + }} + onDelete={(deleteobj) => { + setMyValue(deleteobj.updated_src) + onChange(JSON.stringify(deleteobj.updated_src)) + }} + /> + )} + + + ) +} + +JsonEditorInput.propTypes = { + value: PropTypes.string, + onChange: PropTypes.func, + disabled: PropTypes.bool, + isDarkMode: PropTypes.bool +} diff --git a/packages/ui/src/ui-component/loading/BackdropLoader.js b/packages/ui/src/ui-component/loading/BackdropLoader.js new file mode 100644 index 0000000000000000000000000000000000000000..d88f618f644e44cc10b2aadcf1243609a98447b4 --- /dev/null +++ b/packages/ui/src/ui-component/loading/BackdropLoader.js @@ -0,0 +1,16 @@ +import PropTypes from 'prop-types' +import { Backdrop, CircularProgress } from '@mui/material' + +export const BackdropLoader = ({ open }) => { + return ( +
+ theme.zIndex.drawer + 1 }} open={open}> + + +
+ ) +} + +BackdropLoader.propTypes = { + open: PropTypes.bool +} diff --git a/packages/ui/src/ui-component/loading/Loadable.js b/packages/ui/src/ui-component/loading/Loadable.js new file mode 100644 index 0000000000000000000000000000000000000000..462d6b8f236cbc9355fb3ce09472a64a7ae5468c --- /dev/null +++ b/packages/ui/src/ui-component/loading/Loadable.js @@ -0,0 +1,17 @@ +import { Suspense } from 'react' + +// project imports +import Loader from './Loader' + +// ==============================|| LOADABLE - LAZY LOADING ||============================== // + +const Loadable = (Component) => + function WithLoader(props) { + return ( + }> + + + ) + } + +export default Loadable diff --git a/packages/ui/src/ui-component/loading/Loader.js b/packages/ui/src/ui-component/loading/Loader.js new file mode 100644 index 0000000000000000000000000000000000000000..6c29446afc1af64b927362bdcef07645cc34ed65 --- /dev/null +++ b/packages/ui/src/ui-component/loading/Loader.js @@ -0,0 +1,21 @@ +// material-ui +import LinearProgress from '@mui/material/LinearProgress' +import { styled } from '@mui/material/styles' + +// styles +const LoaderWrapper = styled('div')({ + position: 'fixed', + top: 0, + left: 0, + zIndex: 1301, + width: '100%' +}) + +// ==============================|| LOADER ||============================== // +const Loader = () => ( + + + +) + +export default Loader diff --git a/packages/ui/src/ui-component/markdown/CodeBlock.js b/packages/ui/src/ui-component/markdown/CodeBlock.js new file mode 100644 index 0000000000000000000000000000000000000000..77caa346cd5a0c7aca65a0534c7f1b399b2eb8dd --- /dev/null +++ b/packages/ui/src/ui-component/markdown/CodeBlock.js @@ -0,0 +1,123 @@ +import { IconClipboard, IconDownload } from '@tabler/icons' +import { memo, useState } from 'react' +import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter' +import { oneDark } from 'react-syntax-highlighter/dist/esm/styles/prism' +import PropTypes from 'prop-types' +import { Box, IconButton, Popover, Typography } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +const programmingLanguages = { + javascript: '.js', + python: '.py', + java: '.java', + c: '.c', + cpp: '.cpp', + 'c++': '.cpp', + 'c#': '.cs', + ruby: '.rb', + php: '.php', + swift: '.swift', + 'objective-c': '.m', + kotlin: '.kt', + typescript: '.ts', + go: '.go', + perl: '.pl', + rust: '.rs', + scala: '.scala', + haskell: '.hs', + lua: '.lua', + shell: '.sh', + sql: '.sql', + html: '.html', + css: '.css' +} + +export const CodeBlock = memo(({ language, chatflowid, isDialog, value }) => { + const theme = useTheme() + const [anchorEl, setAnchorEl] = useState(null) + const openPopOver = Boolean(anchorEl) + + const handleClosePopOver = () => { + setAnchorEl(null) + } + + const copyToClipboard = (event) => { + if (!navigator.clipboard || !navigator.clipboard.writeText) { + return + } + + navigator.clipboard.writeText(value) + setAnchorEl(event.currentTarget) + setTimeout(() => { + handleClosePopOver() + }, 1500) + } + + const downloadAsFile = () => { + const fileExtension = programmingLanguages[language] || '.file' + const suggestedFileName = `file-${chatflowid}${fileExtension}` + const fileName = suggestedFileName + + if (!fileName) { + // user pressed cancel on prompt + return + } + + const blob = new Blob([value], { type: 'text/plain' }) + const url = URL.createObjectURL(blob) + const link = document.createElement('a') + link.download = fileName + link.href = url + link.style.display = 'none' + document.body.appendChild(link) + link.click() + document.body.removeChild(link) + URL.revokeObjectURL(url) + } + + return ( +
+ +
+ {language} +
+ + + + + + Copied! + + + + + +
+
+ + + {value} + +
+ ) +}) +CodeBlock.displayName = 'CodeBlock' + +CodeBlock.propTypes = { + language: PropTypes.string, + chatflowid: PropTypes.string, + isDialog: PropTypes.bool, + value: PropTypes.string +} diff --git a/packages/ui/src/ui-component/markdown/MemoizedReactMarkdown.js b/packages/ui/src/ui-component/markdown/MemoizedReactMarkdown.js new file mode 100644 index 0000000000000000000000000000000000000000..f9770a9f3f2cc40cfab01dd0787fdebc680b00f3 --- /dev/null +++ b/packages/ui/src/ui-component/markdown/MemoizedReactMarkdown.js @@ -0,0 +1,4 @@ +import { memo } from 'react' +import ReactMarkdown from 'react-markdown' + +export const MemoizedReactMarkdown = memo(ReactMarkdown, (prevProps, nextProps) => prevProps.children === nextProps.children) diff --git a/packages/ui/src/ui-component/switch/Switch.js b/packages/ui/src/ui-component/switch/Switch.js new file mode 100644 index 0000000000000000000000000000000000000000..04ea17048eb5545399794ded40bee3ddfec6dc93 --- /dev/null +++ b/packages/ui/src/ui-component/switch/Switch.js @@ -0,0 +1,28 @@ +import { useState } from 'react' +import PropTypes from 'prop-types' +import { FormControl, Switch } from '@mui/material' + +export const SwitchInput = ({ value, onChange, disabled = false }) => { + const [myValue, setMyValue] = useState(!!value ?? false) + + return ( + <> + + { + setMyValue(event.target.checked) + onChange(event.target.checked) + }} + /> + + + ) +} + +SwitchInput.propTypes = { + value: PropTypes.string, + onChange: PropTypes.func, + disabled: PropTypes.bool +} diff --git a/packages/ui/src/ui-component/table/Table.js b/packages/ui/src/ui-component/table/Table.js new file mode 100644 index 0000000000000000000000000000000000000000..a6ab312e184aca344677ec0e13402d015a56ae09 --- /dev/null +++ b/packages/ui/src/ui-component/table/Table.js @@ -0,0 +1,34 @@ +import PropTypes from 'prop-types' +import { TableContainer, Table, TableHead, TableCell, TableRow, TableBody, Paper } from '@mui/material' + +export const TableViewOnly = ({ columns, rows }) => { + return ( + <> + + + + + {columns.map((col, index) => ( + {col.charAt(0).toUpperCase() + col.slice(1)} + ))} + + + + {rows.map((row, index) => ( + + {Object.keys(row).map((key, index) => ( + {row[key]} + ))} + + ))} + +
+
+ + ) +} + +TableViewOnly.propTypes = { + rows: PropTypes.array, + columns: PropTypes.array +} diff --git a/packages/ui/src/ui-component/tooltip/TooltipWithParser.js b/packages/ui/src/ui-component/tooltip/TooltipWithParser.js new file mode 100644 index 0000000000000000000000000000000000000000..7362b55c088e2010ed836b475afc0c561dcd6a4e --- /dev/null +++ b/packages/ui/src/ui-component/tooltip/TooltipWithParser.js @@ -0,0 +1,30 @@ +import { Info } from '@mui/icons-material' +import { IconButton, Tooltip } from '@mui/material' +import parser from 'html-react-parser' +import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' + +export const TooltipWithParser = ({ title, style }) => { + const customization = useSelector((state) => state.customization) + + return ( + + + + + + ) +} + +TooltipWithParser.propTypes = { + title: PropTypes.node, + style: PropTypes.any +} diff --git a/packages/ui/src/utils/genericHelper.js b/packages/ui/src/utils/genericHelper.js new file mode 100644 index 0000000000000000000000000000000000000000..fac832259eedbd402ebc8df2a2be9672a75446e6 --- /dev/null +++ b/packages/ui/src/utils/genericHelper.js @@ -0,0 +1,336 @@ +import moment from 'moment' + +export const getUniqueNodeId = (nodeData, nodes) => { + // Get amount of same nodes + let totalSameNodes = 0 + for (let i = 0; i < nodes.length; i += 1) { + const node = nodes[i] + if (node.data.name === nodeData.name) { + totalSameNodes += 1 + } + } + + // Get unique id + let nodeId = `${nodeData.name}_${totalSameNodes}` + for (let i = 0; i < nodes.length; i += 1) { + const node = nodes[i] + if (node.id === nodeId) { + totalSameNodes += 1 + nodeId = `${nodeData.name}_${totalSameNodes}` + } + } + return nodeId +} + +export const initializeDefaultNodeData = (nodeParams) => { + const initialValues = {} + + for (let i = 0; i < nodeParams.length; i += 1) { + const input = nodeParams[i] + initialValues[input.name] = input.default || '' + } + + return initialValues +} + +export const initNode = (nodeData, newNodeId) => { + const inputAnchors = [] + const inputParams = [] + const incoming = nodeData.inputs ? nodeData.inputs.length : 0 + const outgoing = 1 + + const whitelistTypes = ['options', 'string', 'number', 'boolean', 'password', 'json', 'code', 'date', 'file', 'folder'] + + for (let i = 0; i < incoming; i += 1) { + const newInput = { + ...nodeData.inputs[i], + id: `${newNodeId}-input-${nodeData.inputs[i].name}-${nodeData.inputs[i].type}` + } + if (whitelistTypes.includes(nodeData.inputs[i].type)) { + inputParams.push(newInput) + } else { + inputAnchors.push(newInput) + } + } + + const outputAnchors = [] + for (let i = 0; i < outgoing; i += 1) { + if (nodeData.outputs && nodeData.outputs.length) { + const options = [] + for (let j = 0; j < nodeData.outputs.length; j += 1) { + let baseClasses = '' + let type = '' + + const outputBaseClasses = nodeData.outputs[j].baseClasses ?? [] + if (outputBaseClasses.length > 1) { + baseClasses = outputBaseClasses.join('|') + type = outputBaseClasses.join(' | ') + } else if (outputBaseClasses.length === 1) { + baseClasses = outputBaseClasses[0] + type = outputBaseClasses[0] + } + + const newOutputOption = { + id: `${newNodeId}-output-${nodeData.outputs[j].name}-${baseClasses}`, + name: nodeData.outputs[j].name, + label: nodeData.outputs[j].label, + type + } + options.push(newOutputOption) + } + const newOutput = { + name: 'output', + label: 'Output', + type: 'options', + options, + default: nodeData.outputs[0].name + } + outputAnchors.push(newOutput) + } else { + const newOutput = { + id: `${newNodeId}-output-${nodeData.name}-${nodeData.baseClasses.join('|')}`, + name: nodeData.name, + label: nodeData.type, + type: nodeData.baseClasses.join(' | ') + } + outputAnchors.push(newOutput) + } + } + + /* Initial + inputs = [ + { + label: 'field_label_1', + name: 'string' + }, + { + label: 'field_label_2', + name: 'CustomType' + } + ] + + => Convert to inputs, inputParams, inputAnchors + + => inputs = { 'field': 'defaultvalue' } // Turn into inputs object with default values + + => // For inputs that are part of whitelistTypes + inputParams = [ + { + label: 'field_label_1', + name: 'string' + } + ] + + => // For inputs that are not part of whitelistTypes + inputAnchors = [ + { + label: 'field_label_2', + name: 'CustomType' + } + ] + */ + if (nodeData.inputs) { + nodeData.inputAnchors = inputAnchors + nodeData.inputParams = inputParams + nodeData.inputs = initializeDefaultNodeData(nodeData.inputs) + } else { + nodeData.inputAnchors = [] + nodeData.inputParams = [] + nodeData.inputs = {} + } + + if (nodeData.outputs) { + nodeData.outputs = initializeDefaultNodeData(outputAnchors) + } else { + nodeData.outputs = {} + } + + nodeData.outputAnchors = outputAnchors + nodeData.id = newNodeId + + return nodeData +} + +export const getEdgeLabelName = (source) => { + const sourceSplit = source.split('-') + if (sourceSplit.length && sourceSplit[0].includes('ifElse')) { + const outputAnchorsIndex = sourceSplit[sourceSplit.length - 1] + return outputAnchorsIndex === '0' ? 'true' : 'false' + } + return '' +} + +export const isValidConnection = (connection, reactFlowInstance) => { + const sourceHandle = connection.sourceHandle + const targetHandle = connection.targetHandle + const target = connection.target + + //sourceHandle: "llmChain_0-output-llmChain-BaseChain" + //targetHandle: "mrlkAgentLLM_0-input-model-BaseLanguageModel" + + const sourceTypes = sourceHandle.split('-')[sourceHandle.split('-').length - 1].split('|') + const targetTypes = targetHandle.split('-')[targetHandle.split('-').length - 1].split('|') + + if (targetTypes.some((t) => sourceTypes.includes(t))) { + let targetNode = reactFlowInstance.getNode(target) + + if (!targetNode) { + if (!reactFlowInstance.getEdges().find((e) => e.targetHandle === targetHandle)) { + return true + } + } else { + const targetNodeInputAnchor = + targetNode.data.inputAnchors.find((ancr) => ancr.id === targetHandle) || + targetNode.data.inputParams.find((ancr) => ancr.id === targetHandle) + if ( + (targetNodeInputAnchor && + !targetNodeInputAnchor?.list && + !reactFlowInstance.getEdges().find((e) => e.targetHandle === targetHandle)) || + targetNodeInputAnchor?.list + ) { + return true + } + } + } + return false +} + +export const convertDateStringToDateObject = (dateString) => { + if (dateString === undefined || !dateString) return undefined + + const date = moment(dateString) + if (!date.isValid) return undefined + + // Sat Sep 24 2022 07:30:14 + return new Date(date.year(), date.month(), date.date(), date.hours(), date.minutes()) +} + +export const getFileName = (fileBase64) => { + let fileNames = [] + if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) { + const files = JSON.parse(fileBase64) + for (const file of files) { + const splitDataURI = file.split(',') + const filename = splitDataURI[splitDataURI.length - 1].split(':')[1] + fileNames.push(filename) + } + return fileNames.join(', ') + } else { + const splitDataURI = fileBase64.split(',') + const filename = splitDataURI[splitDataURI.length - 1].split(':')[1] + return filename + } +} + +export const getFolderName = (base64ArrayStr) => { + try { + const base64Array = JSON.parse(base64ArrayStr) + const filenames = [] + for (let i = 0; i < base64Array.length; i += 1) { + const fileBase64 = base64Array[i] + const splitDataURI = fileBase64.split(',') + const filename = splitDataURI[splitDataURI.length - 1].split(':')[1] + filenames.push(filename) + } + return filenames.length ? filenames.join(',') : '' + } catch (e) { + return '' + } +} + +export const generateExportFlowData = (flowData) => { + const nodes = flowData.nodes + const edges = flowData.edges + + for (let i = 0; i < nodes.length; i += 1) { + nodes[i].selected = false + const node = nodes[i] + + const newNodeData = { + id: node.data.id, + label: node.data.label, + name: node.data.name, + type: node.data.type, + baseClasses: node.data.baseClasses, + category: node.data.category, + description: node.data.description, + inputParams: node.data.inputParams, + inputAnchors: node.data.inputAnchors, + inputs: {}, + outputAnchors: node.data.outputAnchors, + outputs: node.data.outputs, + selected: false + } + + // Remove password, file & folder + if (node.data.inputs && Object.keys(node.data.inputs).length) { + const nodeDataInputs = {} + for (const input in node.data.inputs) { + const inputParam = node.data.inputParams.find((inp) => inp.name === input) + if (inputParam && inputParam.type === 'password') continue + if (inputParam && inputParam.type === 'file') continue + if (inputParam && inputParam.type === 'folder') continue + nodeDataInputs[input] = node.data.inputs[input] + } + newNodeData.inputs = nodeDataInputs + } + + nodes[i].data = newNodeData + } + const exportJson = { + nodes, + edges + } + return exportJson +} + +export const getAvailableNodesForVariable = (nodes, edges, target, targetHandle) => { + // example edge id = "llmChain_0-llmChain_0-output-outputPrediction-string-llmChain_1-llmChain_1-input-promptValues-string" + // {source} -{sourceHandle} -{target} -{targetHandle} + const parentNodes = [] + const inputEdges = edges.filter((edg) => edg.target === target && edg.targetHandle === targetHandle) + if (inputEdges && inputEdges.length) { + for (let j = 0; j < inputEdges.length; j += 1) { + const node = nodes.find((nd) => nd.id === inputEdges[j].source) + parentNodes.push(node) + } + } + return parentNodes +} + +export const rearrangeToolsOrdering = (newValues, sourceNodeId) => { + // RequestsGet and RequestsPost have to be in order before other tools + newValues.push(`{{${sourceNodeId}.data.instance}}`) + + const sortKey = (item) => { + if (item.includes('requestsGet') || item.includes('readFile')) { + return 0 + } else if (item.includes('requestsPost') || item.includes('writeFile')) { + return 1 + } else { + return 2 + } + } + + newValues.sort((a, b) => sortKey(a) - sortKey(b)) +} + +export const throttle = (func, limit) => { + let lastFunc + let lastRan + + return (...args) => { + if (!lastRan) { + func(...args) + lastRan = Date.now() + } else { + clearTimeout(lastFunc) + lastFunc = setTimeout(() => { + if (Date.now() - lastRan >= limit) { + func(...args) + lastRan = Date.now() + } + }, limit - (Date.now() - lastRan)) + } + } +} diff --git a/packages/ui/src/utils/useNotifier.js b/packages/ui/src/utils/useNotifier.js new file mode 100644 index 0000000000000000000000000000000000000000..2ea8bf7f806d51deac12e35c0fd73244824a2398 --- /dev/null +++ b/packages/ui/src/utils/useNotifier.js @@ -0,0 +1,56 @@ +import React from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { useSnackbar } from 'notistack' +import { removeSnackbar } from 'store/actions' + +let displayed = [] + +const useNotifier = () => { + const dispatch = useDispatch() + const notifier = useSelector((state) => state.notifier) + const { notifications } = notifier + + const { enqueueSnackbar, closeSnackbar } = useSnackbar() + + const storeDisplayed = (id) => { + displayed = [...displayed, id] + } + + const removeDisplayed = (id) => { + displayed = [...displayed.filter((key) => id !== key)] + } + + React.useEffect(() => { + notifications.forEach(({ key, message, options = {}, dismissed = false }) => { + if (dismissed) { + // dismiss snackbar using notistack + closeSnackbar(key) + return + } + + // do nothing if snackbar is already displayed + if (displayed.includes(key)) return + + // display snackbar using notistack + enqueueSnackbar(message, { + key, + ...options, + onClose: (event, reason, myKey) => { + if (options.onClose) { + options.onClose(event, reason, myKey) + } + }, + onExited: (event, myKey) => { + // remove this snackbar from redux store + dispatch(removeSnackbar(myKey)) + removeDisplayed(myKey) + } + }) + + // keep track of snackbars that we've displayed + storeDisplayed(key) + }) + }, [notifications, closeSnackbar, enqueueSnackbar, dispatch]) +} + +export default useNotifier diff --git a/packages/ui/src/utils/usePrompt.js b/packages/ui/src/utils/usePrompt.js new file mode 100644 index 0000000000000000000000000000000000000000..4108556cf301119b8907aeddfc8c9ce4fee860e7 --- /dev/null +++ b/packages/ui/src/utils/usePrompt.js @@ -0,0 +1,37 @@ +import { useCallback, useContext, useEffect } from 'react' +import { UNSAFE_NavigationContext as NavigationContext } from 'react-router-dom' + +// https://stackoverflow.com/questions/71572678/react-router-v-6-useprompt-typescript + +export function useBlocker(blocker, when = true) { + const { navigator } = useContext(NavigationContext) + + useEffect(() => { + if (!when) return + + const unblock = navigator.block((tx) => { + const autoUnblockingTx = { + ...tx, + retry() { + unblock() + tx.retry() + } + } + + blocker(autoUnblockingTx) + }) + + return unblock + }, [navigator, blocker, when]) +} + +export function usePrompt(message, when = true) { + const blocker = useCallback( + (tx) => { + if (window.confirm(message)) tx.retry() + }, + [message] + ) + + useBlocker(blocker, when) +} diff --git a/packages/ui/src/views/apikey/APIKeyDialog.js b/packages/ui/src/views/apikey/APIKeyDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..69d729875fb8ce11d1cacbbfce75d64a5cffba01 --- /dev/null +++ b/packages/ui/src/views/apikey/APIKeyDialog.js @@ -0,0 +1,231 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from 'store/actions' + +import { + Box, + Typography, + Button, + Dialog, + DialogActions, + DialogContent, + DialogTitle, + Stack, + IconButton, + OutlinedInput, + Popover +} from '@mui/material' +import { useTheme } from '@mui/material/styles' +import { StyledButton } from 'ui-component/button/StyledButton' + +// Icons +import { IconX, IconCopy } from '@tabler/icons' + +// API +import apikeyApi from 'api/apikey' + +// utils +import useNotifier from 'utils/useNotifier' + +const APIKeyDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const theme = useTheme() + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [keyName, setKeyName] = useState('') + const [anchorEl, setAnchorEl] = useState(null) + const openPopOver = Boolean(anchorEl) + + useEffect(() => { + if (dialogProps.type === 'EDIT' && dialogProps.key) { + setKeyName(dialogProps.key.keyName) + } else if (dialogProps.type === 'ADD') { + setKeyName('') + } + }, [dialogProps]) + + const handleClosePopOver = () => { + setAnchorEl(null) + } + + const addNewKey = async () => { + try { + const createResp = await apikeyApi.createNewAPI({ keyName }) + if (createResp.data) { + enqueueSnackbar({ + message: 'New API key added', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + enqueueSnackbar({ + message: `Failed to add new API key: ${errorData}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const saveKey = async () => { + try { + const saveResp = await apikeyApi.updateAPI(dialogProps.key.id, { keyName }) + if (saveResp.data) { + enqueueSnackbar({ + message: 'API Key saved', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + enqueueSnackbar({ + message: `Failed to save API key: ${errorData}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + + {dialogProps.title} + + + {dialogProps.type === 'EDIT' && ( + + API Key + + + {dialogProps.key.apiKey} + + { + navigator.clipboard.writeText(dialogProps.key.apiKey) + setAnchorEl(event.currentTarget) + setTimeout(() => { + handleClosePopOver() + }, 1500) + }} + > + + + + + Copied! + + + + + )} + + + + Key Name + + setKeyName(e.target.value)} + /> + + + + (dialogProps.type === 'ADD' ? addNewKey() : saveKey())}> + {dialogProps.confirmButtonName} + + + + ) : null + + return createPortal(component, portalElement) +} + +APIKeyDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default APIKeyDialog diff --git a/packages/ui/src/views/apikey/index.js b/packages/ui/src/views/apikey/index.js new file mode 100644 index 0000000000000000000000000000000000000000..a2b2e639f098520d4baea5b68cb5e4a32670973d --- /dev/null +++ b/packages/ui/src/views/apikey/index.js @@ -0,0 +1,279 @@ +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from 'store/actions' + +// material-ui +import { + Button, + Box, + Stack, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + Paper, + IconButton, + Popover, + Typography +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from 'ui-component/cards/MainCard' +import { StyledButton } from 'ui-component/button/StyledButton' +import APIKeyDialog from './APIKeyDialog' +import ConfirmDialog from 'ui-component/dialog/ConfirmDialog' + +// API +import apiKeyApi from 'api/apikey' + +// Hooks +import useApi from 'hooks/useApi' +import useConfirm from 'hooks/useConfirm' + +// utils +import useNotifier from 'utils/useNotifier' + +// Icons +import { IconTrash, IconEdit, IconCopy, IconX, IconPlus, IconEye, IconEyeOff } from '@tabler/icons' +import APIEmptySVG from 'assets/images/api_empty.svg' + +// ==============================|| APIKey ||============================== // + +const APIKey = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + const dispatch = useDispatch() + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [showDialog, setShowDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + const [apiKeys, setAPIKeys] = useState([]) + const [anchorEl, setAnchorEl] = useState(null) + const [showApiKeys, setShowApiKeys] = useState([]) + const openPopOver = Boolean(anchorEl) + + const { confirm } = useConfirm() + + const getAllAPIKeysApi = useApi(apiKeyApi.getAllAPIKeys) + + const onShowApiKeyClick = (apikey) => { + const index = showApiKeys.indexOf(apikey) + if (index > -1) { + //showApiKeys.splice(index, 1) + const newShowApiKeys = showApiKeys.filter(function (item) { + return item !== apikey + }) + setShowApiKeys(newShowApiKeys) + } else { + setShowApiKeys((prevkeys) => [...prevkeys, apikey]) + } + } + + const handleClosePopOver = () => { + setAnchorEl(null) + } + + const addNew = () => { + const dialogProp = { + title: 'Add New API Key', + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Add' + } + setDialogProps(dialogProp) + setShowDialog(true) + } + + const edit = (key) => { + const dialogProp = { + title: 'Edit API Key', + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + key + } + setDialogProps(dialogProp) + setShowDialog(true) + } + + const deleteKey = async (key) => { + const confirmPayload = { + title: `Delete`, + description: `Delete key ${key.keyName}?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await apiKeyApi.deleteAPI(key.id) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'API key deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + enqueueSnackbar({ + message: `Failed to delete API key: ${errorData}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + } + + const onConfirm = () => { + setShowDialog(false) + getAllAPIKeysApi.request() + } + + useEffect(() => { + getAllAPIKeysApi.request() + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getAllAPIKeysApi.data) { + setAPIKeys(getAllAPIKeysApi.data) + } + }, [getAllAPIKeysApi.data]) + + return ( + <> + + +

API Keys 

+ + + }> + Create Key + +
+ {apiKeys.length <= 0 && ( + + + APIEmptySVG + +
No API Keys Yet
+
+ )} + {apiKeys.length > 0 && ( + + + + + Key Name + API Key + Created + + + + + + {apiKeys.map((key, index) => ( + + + {key.keyName} + + + {showApiKeys.includes(key.apiKey) + ? key.apiKey + : `${key.apiKey.substring(0, 2)}${'•'.repeat(18)}${key.apiKey.substring( + key.apiKey.length - 5 + )}`} + { + navigator.clipboard.writeText(key.apiKey) + setAnchorEl(event.currentTarget) + setTimeout(() => { + handleClosePopOver() + }, 1500) + }} + > + + + onShowApiKeyClick(key.apiKey)}> + {showApiKeys.includes(key.apiKey) ? : } + + + + Copied! + + + + {key.createdAt} + + edit(key)}> + + + + + deleteKey(key)}> + + + + + ))} + +
+
+ )} +
+ setShowDialog(false)} + onConfirm={onConfirm} + > + + + ) +} + +export default APIKey diff --git a/packages/ui/src/views/canvas/AddNodes.js b/packages/ui/src/views/canvas/AddNodes.js new file mode 100644 index 0000000000000000000000000000000000000000..50978700fb14ee6194f3448ac1cc72a5aebf372c --- /dev/null +++ b/packages/ui/src/views/canvas/AddNodes.js @@ -0,0 +1,297 @@ +import { useState, useRef, useEffect } from 'react' +import { useSelector } from 'react-redux' +import PropTypes from 'prop-types' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { + Accordion, + AccordionSummary, + AccordionDetails, + Box, + ClickAwayListener, + Divider, + InputAdornment, + List, + ListItemButton, + ListItem, + ListItemAvatar, + ListItemText, + OutlinedInput, + Paper, + Popper, + Stack, + Typography +} from '@mui/material' +import ExpandMoreIcon from '@mui/icons-material/ExpandMore' + +// third-party +import PerfectScrollbar from 'react-perfect-scrollbar' + +// project imports +import MainCard from 'ui-component/cards/MainCard' +import Transitions from 'ui-component/extended/Transitions' +import { StyledFab } from 'ui-component/button/StyledFab' + +// icons +import { IconPlus, IconSearch, IconMinus } from '@tabler/icons' + +// const +import { baseURL } from 'store/constant' + +// ==============================|| ADD NODES||============================== // + +const AddNodes = ({ nodesData, node }) => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + const [searchValue, setSearchValue] = useState('') + const [nodes, setNodes] = useState({}) + const [open, setOpen] = useState(false) + const [categoryExpanded, setCategoryExpanded] = useState({}) + + const anchorRef = useRef(null) + const prevOpen = useRef(open) + const ps = useRef() + + const scrollTop = () => { + const curr = ps.current + if (curr) { + curr.scrollTop = 0 + } + } + + const filterSearch = (value) => { + setSearchValue(value) + setTimeout(() => { + if (value) { + const returnData = nodesData.filter((nd) => nd.name.toLowerCase().includes(value.toLowerCase())) + groupByCategory(returnData, true) + scrollTop() + } else if (value === '') { + groupByCategory(nodesData) + scrollTop() + } + }, 500) + } + + const groupByCategory = (nodes, isFilter) => { + const accordianCategories = {} + const result = nodes.reduce(function (r, a) { + r[a.category] = r[a.category] || [] + r[a.category].push(a) + accordianCategories[a.category] = isFilter ? true : false + return r + }, Object.create(null)) + setNodes(result) + setCategoryExpanded(accordianCategories) + } + + const handleAccordionChange = (category) => (event, isExpanded) => { + const accordianCategories = { ...categoryExpanded } + accordianCategories[category] = isExpanded + setCategoryExpanded(accordianCategories) + } + + const handleClose = (event) => { + if (anchorRef.current && anchorRef.current.contains(event.target)) { + return + } + setOpen(false) + } + + const handleToggle = () => { + setOpen((prevOpen) => !prevOpen) + } + + const onDragStart = (event, node) => { + event.dataTransfer.setData('application/reactflow', JSON.stringify(node)) + event.dataTransfer.effectAllowed = 'move' + } + + useEffect(() => { + if (prevOpen.current === true && open === false) { + anchorRef.current.focus() + } + + prevOpen.current = open + }, [open]) + + useEffect(() => { + if (node) setOpen(false) + }, [node]) + + useEffect(() => { + if (nodesData) groupByCategory(nodesData) + }, [nodesData]) + + return ( + <> + + {open ? : } + + + {({ TransitionProps }) => ( + + + + + + + Add Nodes + + filterSearch(e.target.value)} + placeholder='Search nodes' + startAdornment={ + + + + } + aria-describedby='search-helper-text' + inputProps={{ + 'aria-label': 'weight' + }} + /> + + + { + ps.current = el + }} + style={{ height: '100%', maxHeight: 'calc(100vh - 320px)', overflowX: 'hidden' }} + > + + + {Object.keys(nodes) + .sort() + .map((category) => ( + + } + aria-controls={`nodes-accordian-${category}`} + id={`nodes-accordian-header-${category}`} + > + {category} + + + {nodes[category].map((node, index) => ( +
onDragStart(event, node)} + draggable + > + + + +
+ {node.name} +
+
+ +
+
+ {index === nodes[category].length - 1 ? null : } +
+ ))} +
+
+ ))} +
+
+
+
+
+
+
+ )} +
+ + ) +} + +AddNodes.propTypes = { + nodesData: PropTypes.array, + node: PropTypes.object +} + +export default AddNodes diff --git a/packages/ui/src/views/canvas/ButtonEdge.js b/packages/ui/src/views/canvas/ButtonEdge.js new file mode 100644 index 0000000000000000000000000000000000000000..0d819f81c0d8c3f120f51502f74e619fb0946c58 --- /dev/null +++ b/packages/ui/src/views/canvas/ButtonEdge.js @@ -0,0 +1,77 @@ +import { getBezierPath, EdgeText } from 'reactflow' +import PropTypes from 'prop-types' +import { useDispatch } from 'react-redux' +import { useContext } from 'react' +import { SET_DIRTY } from 'store/actions' +import { flowContext } from 'store/context/ReactFlowContext' + +import './index.css' + +const foreignObjectSize = 40 + +const ButtonEdge = ({ id, sourceX, sourceY, targetX, targetY, sourcePosition, targetPosition, style = {}, data, markerEnd }) => { + const [edgePath, edgeCenterX, edgeCenterY] = getBezierPath({ + sourceX, + sourceY, + sourcePosition, + targetX, + targetY, + targetPosition + }) + + const { deleteEdge } = useContext(flowContext) + + const dispatch = useDispatch() + + const onEdgeClick = (evt, id) => { + evt.stopPropagation() + deleteEdge(id) + dispatch({ type: SET_DIRTY }) + } + + return ( + <> + + {data && data.label && ( + + )} + +
+ +
+
+ + ) +} + +ButtonEdge.propTypes = { + id: PropTypes.string, + sourceX: PropTypes.number, + sourceY: PropTypes.number, + targetX: PropTypes.number, + targetY: PropTypes.number, + sourcePosition: PropTypes.any, + targetPosition: PropTypes.any, + style: PropTypes.object, + data: PropTypes.object, + markerEnd: PropTypes.any +} + +export default ButtonEdge diff --git a/packages/ui/src/views/canvas/CanvasHeader.js b/packages/ui/src/views/canvas/CanvasHeader.js new file mode 100644 index 0000000000000000000000000000000000000000..1f4a1f93bc452b15a6d639310476a54802823c09 --- /dev/null +++ b/packages/ui/src/views/canvas/CanvasHeader.js @@ -0,0 +1,352 @@ +import PropTypes from 'prop-types' +import { useNavigate } from 'react-router-dom' +import { useSelector } from 'react-redux' +import { useEffect, useRef, useState } from 'react' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { Avatar, Box, ButtonBase, Typography, Stack, TextField } from '@mui/material' + +// icons +import { IconSettings, IconChevronLeft, IconDeviceFloppy, IconPencil, IconCheck, IconX, IconCode } from '@tabler/icons' + +// project imports +import Settings from 'views/settings' +import SaveChatflowDialog from 'ui-component/dialog/SaveChatflowDialog' +import APICodeDialog from 'ui-component/dialog/APICodeDialog' + +// API +import chatflowsApi from 'api/chatflows' + +// Hooks +import useApi from 'hooks/useApi' + +// utils +import { generateExportFlowData } from 'utils/genericHelper' +import { uiBaseURL } from 'store/constant' + +// ==============================|| CANVAS HEADER ||============================== // + +const CanvasHeader = ({ chatflow, handleSaveFlow, handleDeleteFlow, handleLoadFlow }) => { + const theme = useTheme() + const navigate = useNavigate() + const flowNameRef = useRef() + const settingsRef = useRef() + + const [isEditingFlowName, setEditingFlowName] = useState(null) + const [flowName, setFlowName] = useState('') + const [isSettingsOpen, setSettingsOpen] = useState(false) + const [flowDialogOpen, setFlowDialogOpen] = useState(false) + const [apiDialogOpen, setAPIDialogOpen] = useState(false) + const [apiDialogProps, setAPIDialogProps] = useState({}) + + const updateChatflowApi = useApi(chatflowsApi.updateChatflow) + const canvas = useSelector((state) => state.canvas) + + const onSettingsItemClick = (setting) => { + setSettingsOpen(false) + + if (setting === 'deleteChatflow') { + handleDeleteFlow() + } else if (setting === 'duplicateChatflow') { + try { + localStorage.setItem('duplicatedFlowData', chatflow.flowData) + window.open(`${uiBaseURL}/canvas`, '_blank') + } catch (e) { + console.error(e) + } + } else if (setting === 'exportChatflow') { + try { + const flowData = JSON.parse(chatflow.flowData) + let dataStr = JSON.stringify(generateExportFlowData(flowData)) + let dataUri = 'data:application/json;charset=utf-8,' + encodeURIComponent(dataStr) + + let exportFileDefaultName = `${chatflow.name} Chatflow.json` + + let linkElement = document.createElement('a') + linkElement.setAttribute('href', dataUri) + linkElement.setAttribute('download', exportFileDefaultName) + linkElement.click() + } catch (e) { + console.error(e) + } + } + } + + const onUploadFile = (file) => { + setSettingsOpen(false) + handleLoadFlow(file) + } + + const submitFlowName = () => { + if (chatflow.id) { + const updateBody = { + name: flowNameRef.current.value + } + updateChatflowApi.request(chatflow.id, updateBody) + } + } + + const onAPIDialogClick = () => { + let isFormDataRequired = false + + try { + const flowData = JSON.parse(chatflow.flowData) + const nodes = flowData.nodes + for (const node of nodes) { + if (node.data.inputParams.find((param) => param.type === 'file')) { + isFormDataRequired = true + break + } + } + } catch (e) { + console.error(e) + } + + setAPIDialogProps({ + title: 'Embed in website or use as API', + chatflowid: chatflow.id, + chatflowApiKeyId: chatflow.apikeyid, + isFormDataRequired + }) + setAPIDialogOpen(true) + } + + const onSaveChatflowClick = () => { + if (chatflow.id) handleSaveFlow(flowName) + else setFlowDialogOpen(true) + } + + const onConfirmSaveName = (flowName) => { + setFlowDialogOpen(false) + handleSaveFlow(flowName) + } + + useEffect(() => { + if (updateChatflowApi.data) { + setFlowName(updateChatflowApi.data.name) + } + setEditingFlowName(false) + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [updateChatflowApi.data]) + + useEffect(() => { + if (chatflow) { + setFlowName(chatflow.name) + } + }, [chatflow]) + + return ( + <> + + + + window.history.state && window.history.state.idx > 0 ? navigate(-1) : navigate('/', { replace: true }) + } + > + + + + + + {!isEditingFlowName && ( + + + {canvas.isDirty && *} {flowName} + + {chatflow?.id && ( + + setEditingFlowName(true)} + > + + + + )} + + )} + {isEditingFlowName && ( + + + + + + + + + setEditingFlowName(false)} + > + + + + + )} + + + {chatflow?.id && ( + + + + + + )} + + + + + + + setSettingsOpen(!isSettingsOpen)} + > + + + + + setSettingsOpen(false)} + onSettingsItemClick={onSettingsItemClick} + onUploadFile={onUploadFile} + /> + setFlowDialogOpen(false)} + onConfirm={onConfirmSaveName} + /> + setAPIDialogOpen(false)} /> + + ) +} + +CanvasHeader.propTypes = { + chatflow: PropTypes.object, + handleSaveFlow: PropTypes.func, + handleDeleteFlow: PropTypes.func, + handleLoadFlow: PropTypes.func +} + +export default CanvasHeader diff --git a/packages/ui/src/views/canvas/CanvasNode.js b/packages/ui/src/views/canvas/CanvasNode.js new file mode 100644 index 0000000000000000000000000000000000000000..9263d4b6acc758c573321a0519594eb9850bd7d8 --- /dev/null +++ b/packages/ui/src/views/canvas/CanvasNode.js @@ -0,0 +1,183 @@ +import PropTypes from 'prop-types' +import { useContext, useState } from 'react' + +// material-ui +import { styled, useTheme } from '@mui/material/styles' +import { IconButton, Box, Typography, Divider, Button } from '@mui/material' + +// project imports +import MainCard from 'ui-component/cards/MainCard' +import NodeInputHandler from './NodeInputHandler' +import NodeOutputHandler from './NodeOutputHandler' +import AdditionalParamsDialog from 'ui-component/dialog/AdditionalParamsDialog' + +// const +import { baseURL } from 'store/constant' +import { IconTrash, IconCopy } from '@tabler/icons' +import { flowContext } from 'store/context/ReactFlowContext' + +const CardWrapper = styled(MainCard)(({ theme }) => ({ + background: theme.palette.card.main, + color: theme.darkTextPrimary, + border: 'solid 1px', + borderColor: theme.palette.primary[200] + 75, + width: '300px', + height: 'auto', + padding: '10px', + boxShadow: '0 2px 14px 0 rgb(32 40 45 / 8%)', + '&:hover': { + borderColor: theme.palette.primary.main + } +})) + +// ===========================|| CANVAS NODE ||=========================== // + +const CanvasNode = ({ data }) => { + const theme = useTheme() + const { deleteNode, duplicateNode } = useContext(flowContext) + + const [showDialog, setShowDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + + const onDialogClicked = () => { + const dialogProps = { + data, + inputParams: data.inputParams.filter((param) => param.additionalParams), + confirmButtonName: 'Save', + cancelButtonName: 'Cancel' + } + setDialogProps(dialogProps) + setShowDialog(true) + } + + return ( + <> + + +
+ +
+ Notification +
+
+ + + {data.label} + + +
+ { + duplicateNode(data.id) + }} + sx={{ height: 35, width: 35, '&:hover': { color: theme?.palette.primary.main } }} + color={theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit'} + > + + + { + deleteNode(data.id) + }} + sx={{ height: 35, width: 35, mr: 1, '&:hover': { color: 'red' } }} + color={theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit'} + > + + +
+ {(data.inputAnchors.length > 0 || data.inputParams.length > 0) && ( + <> + + + + Inputs + + + + + )} + {data.inputAnchors.map((inputAnchor, index) => ( + + ))} + {data.inputParams.map((inputParam, index) => ( + + ))} + {data.inputParams.find((param) => param.additionalParams) && ( +
param.additionalParams).length === + data.inputParams.length + data.inputAnchors.length + ? 20 + : 0 + }} + > + +
+ )} + + + + Output + + + + + {data.outputAnchors.map((outputAnchor, index) => ( + + ))} +
+
+ setShowDialog(false)} + > + + ) +} + +CanvasNode.propTypes = { + data: PropTypes.object +} + +export default CanvasNode diff --git a/packages/ui/src/views/canvas/NodeInputHandler.js b/packages/ui/src/views/canvas/NodeInputHandler.js new file mode 100644 index 0000000000000000000000000000000000000000..1dc656e8f05cd51c160bd2cb57934c6c4e6e94e3 --- /dev/null +++ b/packages/ui/src/views/canvas/NodeInputHandler.js @@ -0,0 +1,206 @@ +import PropTypes from 'prop-types' +import { Handle, Position, useUpdateNodeInternals } from 'reactflow' +import { useEffect, useRef, useState, useContext } from 'react' +import { useSelector } from 'react-redux' + +// material-ui +import { useTheme, styled } from '@mui/material/styles' +import { Box, Typography, Tooltip, IconButton } from '@mui/material' +import { tooltipClasses } from '@mui/material/Tooltip' +import { IconArrowsMaximize } from '@tabler/icons' + +// project import +import { Dropdown } from 'ui-component/dropdown/Dropdown' +import { Input } from 'ui-component/input/Input' +import { File } from 'ui-component/file/File' +import { SwitchInput } from 'ui-component/switch/Switch' +import { flowContext } from 'store/context/ReactFlowContext' +import { isValidConnection, getAvailableNodesForVariable } from 'utils/genericHelper' +import { JsonEditorInput } from 'ui-component/json/JsonEditor' +import { TooltipWithParser } from 'ui-component/tooltip/TooltipWithParser' + +const CustomWidthTooltip = styled(({ className, ...props }) => )({ + [`& .${tooltipClasses.tooltip}`]: { + maxWidth: 500 + } +}) + +// ===========================|| NodeInputHandler ||=========================== // + +const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isAdditionalParams = false }) => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const ref = useRef(null) + const { reactFlowInstance } = useContext(flowContext) + const updateNodeInternals = useUpdateNodeInternals() + const [position, setPosition] = useState(0) + const [showExpandDialog, setShowExpandDialog] = useState(false) + const [expandDialogProps, setExpandDialogProps] = useState({}) + + const onExpandDialogClicked = (value, inputParam) => { + const dialogProp = { + value, + inputParam, + disabled, + confirmButtonName: 'Save', + cancelButtonName: 'Cancel' + } + + if (!disabled) { + const nodes = reactFlowInstance.getNodes() + const edges = reactFlowInstance.getEdges() + const nodesForVariable = inputParam.acceptVariable ? getAvailableNodesForVariable(nodes, edges, data.id, inputParam.id) : [] + dialogProp.availableNodesForVariable = nodesForVariable + } + setExpandDialogProps(dialogProp) + setShowExpandDialog(true) + } + + const onExpandDialogSave = (newValue, inputParamName) => { + setShowExpandDialog(false) + data.inputs[inputParamName] = newValue + } + + useEffect(() => { + if (ref.current && ref.current.offsetTop && ref.current.clientHeight) { + setPosition(ref.current.offsetTop + ref.current.clientHeight / 2) + updateNodeInternals(data.id) + } + }, [data.id, ref, updateNodeInternals]) + + useEffect(() => { + updateNodeInternals(data.id) + }, [data.id, position, updateNodeInternals]) + + return ( +
+ {inputAnchor && ( + <> + + isValidConnection(connection, reactFlowInstance)} + style={{ + height: 10, + width: 10, + backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, + top: position + }} + /> + + + + {inputAnchor.label} + {!inputAnchor.optional &&  *} + + + + )} + + {((inputParam && !inputParam.additionalParams) || isAdditionalParams) && ( + <> + {inputParam.acceptVariable && ( + + isValidConnection(connection, reactFlowInstance)} + style={{ + height: 10, + width: 10, + backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, + top: position + }} + /> + + )} + +
+ + {inputParam.label} + {!inputParam.optional &&  *} + {inputParam.description && } + +
+ {inputParam.type === 'string' && inputParam.rows && ( + + onExpandDialogClicked(data.inputs[inputParam.name] ?? inputParam.default ?? '', inputParam) + } + > + + + )} +
+ {inputParam.type === 'file' && ( + (data.inputs[inputParam.name] = newValue)} + value={data.inputs[inputParam.name] ?? inputParam.default ?? 'Choose a file to upload'} + /> + )} + {inputParam.type === 'boolean' && ( + (data.inputs[inputParam.name] = newValue)} + value={data.inputs[inputParam.name] ?? inputParam.default ?? false} + /> + )} + {(inputParam.type === 'string' || inputParam.type === 'password' || inputParam.type === 'number') && ( + (data.inputs[inputParam.name] = newValue)} + value={data.inputs[inputParam.name] ?? inputParam.default ?? ''} + showDialog={showExpandDialog} + dialogProps={expandDialogProps} + onDialogCancel={() => setShowExpandDialog(false)} + onDialogConfirm={(newValue, inputParamName) => onExpandDialogSave(newValue, inputParamName)} + /> + )} + {inputParam.type === 'json' && ( + (data.inputs[inputParam.name] = newValue)} + value={data.inputs[inputParam.name] ?? inputParam.default ?? ''} + isDarkMode={customization.isDarkMode} + /> + )} + {inputParam.type === 'options' && ( + (data.inputs[inputParam.name] = newValue)} + value={data.inputs[inputParam.name] ?? inputParam.default ?? 'chose an option'} + /> + )} +
+ + )} +
+ ) +} + +NodeInputHandler.propTypes = { + inputAnchor: PropTypes.object, + inputParam: PropTypes.object, + data: PropTypes.object, + disabled: PropTypes.bool, + isAdditionalParams: PropTypes.bool +} + +export default NodeInputHandler diff --git a/packages/ui/src/views/canvas/NodeOutputHandler.js b/packages/ui/src/views/canvas/NodeOutputHandler.js new file mode 100644 index 0000000000000000000000000000000000000000..c5fc1345dfcc209310cfb3fcf441b72074bd1017 --- /dev/null +++ b/packages/ui/src/views/canvas/NodeOutputHandler.js @@ -0,0 +1,122 @@ +import PropTypes from 'prop-types' +import { Handle, Position, useUpdateNodeInternals } from 'reactflow' +import { useEffect, useRef, useState, useContext } from 'react' + +// material-ui +import { useTheme, styled } from '@mui/material/styles' +import { Box, Typography, Tooltip } from '@mui/material' +import { tooltipClasses } from '@mui/material/Tooltip' +import { flowContext } from 'store/context/ReactFlowContext' +import { isValidConnection } from 'utils/genericHelper' +import { Dropdown } from 'ui-component/dropdown/Dropdown' + +const CustomWidthTooltip = styled(({ className, ...props }) => )({ + [`& .${tooltipClasses.tooltip}`]: { + maxWidth: 500 + } +}) + +// ===========================|| NodeOutputHandler ||=========================== // + +const NodeOutputHandler = ({ outputAnchor, data, disabled = false }) => { + const theme = useTheme() + const ref = useRef(null) + const updateNodeInternals = useUpdateNodeInternals() + const [position, setPosition] = useState(0) + const [dropdownValue, setDropdownValue] = useState(null) + const { reactFlowInstance } = useContext(flowContext) + + useEffect(() => { + if (ref.current && ref.current?.offsetTop && ref.current?.clientHeight) { + setTimeout(() => { + setPosition(ref.current?.offsetTop + ref.current?.clientHeight / 2) + updateNodeInternals(data.id) + }, 0) + } + }, [data.id, ref, updateNodeInternals]) + + useEffect(() => { + setTimeout(() => { + updateNodeInternals(data.id) + }, 0) + }, [data.id, position, updateNodeInternals]) + + useEffect(() => { + if (dropdownValue) { + setTimeout(() => { + updateNodeInternals(data.id) + }, 0) + } + }, [data.id, dropdownValue, updateNodeInternals]) + + return ( +
+ {outputAnchor.type !== 'options' && !outputAnchor.options && ( + <> + + isValidConnection(connection, reactFlowInstance)} + style={{ + height: 10, + width: 10, + backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, + top: position + }} + /> + + + {outputAnchor.label} + + + )} + {outputAnchor.type === 'options' && outputAnchor.options && outputAnchor.options.length > 0 && ( + <> + opt.name === data.outputs?.[outputAnchor.name])?.type ?? outputAnchor.type + } + > + opt.name === data.outputs?.[outputAnchor.name])?.id ?? ''} + isValidConnection={(connection) => isValidConnection(connection, reactFlowInstance)} + style={{ + height: 10, + width: 10, + backgroundColor: data.selected ? theme.palette.primary.main : theme.palette.text.secondary, + top: position + }} + /> + + + { + setDropdownValue(newValue) + data.outputs[outputAnchor.name] = newValue + }} + value={data.outputs[outputAnchor.name] ?? outputAnchor.default ?? 'choose an option'} + /> + + + )} +
+ ) +} + +NodeOutputHandler.propTypes = { + outputAnchor: PropTypes.object, + data: PropTypes.object, + disabled: PropTypes.bool +} + +export default NodeOutputHandler diff --git a/packages/ui/src/views/canvas/index.css b/packages/ui/src/views/canvas/index.css new file mode 100644 index 0000000000000000000000000000000000000000..851dee5b92357a4cb181fc7db098f1bdd7a6db0f --- /dev/null +++ b/packages/ui/src/views/canvas/index.css @@ -0,0 +1,37 @@ +.edgebutton { + width: 20px; + height: 20px; + background: #eee; + border: 1px solid #fff; + cursor: pointer; + border-radius: 50%; + font-size: 12px; + line-height: 1; +} + +.edgebutton:hover { + background: #5e35b1; + color: #eee; + box-shadow: 0 0 6px 2px rgba(0, 0, 0, 0.08); +} + +.edgebutton-foreignobject div { + background: transparent; + width: 40px; + height: 40px; + display: flex; + justify-content: center; + align-items: center; + min-height: 40px; +} + +.reactflow-parent-wrapper { + display: flex; + flex-grow: 1; + height: 100%; +} + +.reactflow-parent-wrapper .reactflow-wrapper { + flex-grow: 1; + height: 100%; +} diff --git a/packages/ui/src/views/canvas/index.js b/packages/ui/src/views/canvas/index.js new file mode 100644 index 0000000000000000000000000000000000000000..2d71f03ae3e66953f9a8b91f3efe5043bda01d2f --- /dev/null +++ b/packages/ui/src/views/canvas/index.js @@ -0,0 +1,528 @@ +import { useEffect, useRef, useState, useCallback, useContext } from 'react' +import ReactFlow, { addEdge, Controls, Background, useNodesState, useEdgesState } from 'reactflow' +import 'reactflow/dist/style.css' + +import { useDispatch, useSelector } from 'react-redux' +import { useNavigate, useLocation } from 'react-router-dom' +import { usePrompt } from '../../utils/usePrompt' +import { + REMOVE_DIRTY, + SET_DIRTY, + SET_CHATFLOW, + enqueueSnackbar as enqueueSnackbarAction, + closeSnackbar as closeSnackbarAction +} from 'store/actions' + +// material-ui +import { Toolbar, Box, AppBar, Button } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import CanvasNode from './CanvasNode' +import ButtonEdge from './ButtonEdge' +import CanvasHeader from './CanvasHeader' +import AddNodes from './AddNodes' +import ConfirmDialog from 'ui-component/dialog/ConfirmDialog' +import { ChatPopUp } from 'views/chatmessage/ChatPopUp' +import { flowContext } from 'store/context/ReactFlowContext' + +// API +import nodesApi from 'api/nodes' +import chatflowsApi from 'api/chatflows' + +// Hooks +import useApi from 'hooks/useApi' +import useConfirm from 'hooks/useConfirm' + +// icons +import { IconX } from '@tabler/icons' + +// utils +import { getUniqueNodeId, initNode, getEdgeLabelName, rearrangeToolsOrdering } from 'utils/genericHelper' +import useNotifier from 'utils/useNotifier' + +const nodeTypes = { customNode: CanvasNode } +const edgeTypes = { buttonedge: ButtonEdge } + +// ==============================|| CANVAS ||============================== // + +const Canvas = () => { + const theme = useTheme() + const navigate = useNavigate() + + const { state } = useLocation() + const templateFlowData = state ? state.templateFlowData : '' + + const URLpath = document.location.pathname.toString().split('/') + const chatflowId = URLpath[URLpath.length - 1] === 'canvas' ? '' : URLpath[URLpath.length - 1] + + const { confirm } = useConfirm() + + const dispatch = useDispatch() + const canvas = useSelector((state) => state.canvas) + const [canvasDataStore, setCanvasDataStore] = useState(canvas) + const [chatflow, setChatflow] = useState(null) + + const { reactFlowInstance, setReactFlowInstance } = useContext(flowContext) + + // ==============================|| Snackbar ||============================== // + + useNotifier() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + // ==============================|| ReactFlow ||============================== // + + const [nodes, setNodes, onNodesChange] = useNodesState() + const [edges, setEdges, onEdgesChange] = useEdgesState() + + const [selectedNode, setSelectedNode] = useState(null) + + const reactFlowWrapper = useRef(null) + + // ==============================|| Chatflow API ||============================== // + + const getNodesApi = useApi(nodesApi.getAllNodes) + const createNewChatflowApi = useApi(chatflowsApi.createNewChatflow) + const testChatflowApi = useApi(chatflowsApi.testChatflow) + const updateChatflowApi = useApi(chatflowsApi.updateChatflow) + const getSpecificChatflowApi = useApi(chatflowsApi.getSpecificChatflow) + + // ==============================|| Events & Actions ||============================== // + + const onConnect = (params) => { + const newEdge = { + ...params, + type: 'buttonedge', + id: `${params.source}-${params.sourceHandle}-${params.target}-${params.targetHandle}`, + data: { label: getEdgeLabelName(params.sourceHandle) } + } + + const targetNodeId = params.targetHandle.split('-')[0] + const sourceNodeId = params.sourceHandle.split('-')[0] + const targetInput = params.targetHandle.split('-')[2] + + setNodes((nds) => + nds.map((node) => { + if (node.id === targetNodeId) { + setTimeout(() => setDirty(), 0) + let value + const inputAnchor = node.data.inputAnchors.find((ancr) => ancr.name === targetInput) + const inputParam = node.data.inputParams.find((param) => param.name === targetInput) + + if (inputAnchor && inputAnchor.list) { + const newValues = node.data.inputs[targetInput] || [] + if (targetInput === 'tools') { + rearrangeToolsOrdering(newValues, sourceNodeId) + } else { + newValues.push(`{{${sourceNodeId}.data.instance}}`) + } + value = newValues + } else if (inputParam && inputParam.acceptVariable) { + value = node.data.inputs[targetInput] || '' + } else { + value = `{{${sourceNodeId}.data.instance}}` + } + node.data = { + ...node.data, + inputs: { + ...node.data.inputs, + [targetInput]: value + } + } + } + return node + }) + ) + + setEdges((eds) => addEdge(newEdge, eds)) + } + + const handleLoadFlow = (file) => { + try { + const flowData = JSON.parse(file) + const nodes = flowData.nodes || [] + + setNodes(nodes) + setEdges(flowData.edges || []) + setDirty() + } catch (e) { + console.error(e) + } + } + + const handleDeleteFlow = async () => { + const confirmPayload = { + title: `Delete`, + description: `Delete chatflow ${chatflow.name}?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + await chatflowsApi.deleteChatflow(chatflow.id) + navigate(-1) + } catch (error) { + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + enqueueSnackbar({ + message: errorData, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const handleSaveFlow = (chatflowName) => { + if (reactFlowInstance) { + setNodes((nds) => + nds.map((node) => { + node.data = { + ...node.data, + selected: false + } + return node + }) + ) + + const rfInstanceObject = reactFlowInstance.toObject() + const flowData = JSON.stringify(rfInstanceObject) + + if (!chatflow.id) { + const newChatflowBody = { + name: chatflowName, + deployed: false, + flowData + } + createNewChatflowApi.request(newChatflowBody) + } else { + const updateBody = { + name: chatflowName, + flowData + } + updateChatflowApi.request(chatflow.id, updateBody) + } + } + } + + // eslint-disable-next-line + const onNodeClick = useCallback((event, clickedNode) => { + setSelectedNode(clickedNode) + setNodes((nds) => + nds.map((node) => { + if (node.id === clickedNode.id) { + node.data = { + ...node.data, + selected: true + } + } else { + node.data = { + ...node.data, + selected: false + } + } + + return node + }) + ) + }) + + const onDragOver = useCallback((event) => { + event.preventDefault() + event.dataTransfer.dropEffect = 'move' + }, []) + + const onDrop = useCallback( + (event) => { + event.preventDefault() + const reactFlowBounds = reactFlowWrapper.current.getBoundingClientRect() + let nodeData = event.dataTransfer.getData('application/reactflow') + + // check if the dropped element is valid + if (typeof nodeData === 'undefined' || !nodeData) { + return + } + + nodeData = JSON.parse(nodeData) + + const position = reactFlowInstance.project({ + x: event.clientX - reactFlowBounds.left - 100, + y: event.clientY - reactFlowBounds.top - 50 + }) + + const newNodeId = getUniqueNodeId(nodeData, reactFlowInstance.getNodes()) + + const newNode = { + id: newNodeId, + position, + type: 'customNode', + data: initNode(nodeData, newNodeId) + } + + setSelectedNode(newNode) + setNodes((nds) => + nds.concat(newNode).map((node) => { + if (node.id === newNode.id) { + node.data = { + ...node.data, + selected: true + } + } else { + node.data = { + ...node.data, + selected: false + } + } + + return node + }) + ) + setTimeout(() => setDirty(), 0) + }, + + // eslint-disable-next-line + [reactFlowInstance] + ) + + const saveChatflowSuccess = () => { + dispatch({ type: REMOVE_DIRTY }) + enqueueSnackbar({ + message: 'Chatflow saved', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + + const errorFailed = (message) => { + enqueueSnackbar({ + message, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + + const setDirty = () => { + dispatch({ type: SET_DIRTY }) + } + + // ==============================|| useEffect ||============================== // + + // Get specific chatflow successful + useEffect(() => { + if (getSpecificChatflowApi.data) { + const chatflow = getSpecificChatflowApi.data + const initialFlow = chatflow.flowData ? JSON.parse(chatflow.flowData) : [] + setNodes(initialFlow.nodes || []) + setEdges(initialFlow.edges || []) + dispatch({ type: SET_CHATFLOW, chatflow }) + } else if (getSpecificChatflowApi.error) { + const error = getSpecificChatflowApi.error + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + errorFailed(`Failed to retrieve chatflow: ${errorData}`) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getSpecificChatflowApi.data, getSpecificChatflowApi.error]) + + // Create new chatflow successful + useEffect(() => { + if (createNewChatflowApi.data) { + const chatflow = createNewChatflowApi.data + dispatch({ type: SET_CHATFLOW, chatflow }) + saveChatflowSuccess() + window.history.replaceState(null, null, `/canvas/${chatflow.id}`) + } else if (createNewChatflowApi.error) { + const error = createNewChatflowApi.error + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + errorFailed(`Failed to save chatflow: ${errorData}`) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [createNewChatflowApi.data, createNewChatflowApi.error]) + + // Update chatflow successful + useEffect(() => { + if (updateChatflowApi.data) { + dispatch({ type: SET_CHATFLOW, chatflow: updateChatflowApi.data }) + saveChatflowSuccess() + } else if (updateChatflowApi.error) { + const error = updateChatflowApi.error + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + errorFailed(`Failed to save chatflow: ${errorData}`) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [updateChatflowApi.data, updateChatflowApi.error]) + + // Test chatflow failed + useEffect(() => { + if (testChatflowApi.error) { + enqueueSnackbar({ + message: 'Test chatflow failed', + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [testChatflowApi.error]) + + useEffect(() => setChatflow(canvasDataStore.chatflow), [canvasDataStore.chatflow]) + + // Initialization + useEffect(() => { + if (chatflowId) { + getSpecificChatflowApi.request(chatflowId) + } else { + if (localStorage.getItem('duplicatedFlowData')) { + handleLoadFlow(localStorage.getItem('duplicatedFlowData')) + setTimeout(() => localStorage.removeItem('duplicatedFlowData'), 0) + } else { + setNodes([]) + setEdges([]) + } + dispatch({ + type: SET_CHATFLOW, + chatflow: { + name: 'Untitled chatflow' + } + }) + } + + getNodesApi.request() + + // Clear dirty state before leaving and remove any ongoing test triggers and webhooks + return () => { + setTimeout(() => dispatch({ type: REMOVE_DIRTY }), 0) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setCanvasDataStore(canvas) + }, [canvas]) + + useEffect(() => { + function handlePaste(e) { + const pasteData = e.clipboardData.getData('text') + //TODO: prevent paste event when input focused, temporary fix: catch chatflow syntax + if (pasteData.includes('{"nodes":[') && pasteData.includes('],"edges":[')) { + handleLoadFlow(pasteData) + } + } + + window.addEventListener('paste', handlePaste) + + return () => { + window.removeEventListener('paste', handlePaste) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (templateFlowData && templateFlowData.includes('"nodes":[') && templateFlowData.includes('],"edges":[')) { + handleLoadFlow(templateFlowData) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [templateFlowData]) + + usePrompt('You have unsaved changes! Do you want to navigate away?', canvasDataStore.isDirty) + + return ( + <> + + + + + + + +
+
+ + + + + + +
+
+
+ +
+ + ) +} + +export default Canvas diff --git a/packages/ui/src/views/chatflows/index.js b/packages/ui/src/views/chatflows/index.js new file mode 100644 index 0000000000000000000000000000000000000000..6712623e64058525d8d9eb1cb0aa01079d7da9c9 --- /dev/null +++ b/packages/ui/src/views/chatflows/index.js @@ -0,0 +1,138 @@ +import { useEffect, useState } from 'react' +import { useNavigate } from 'react-router-dom' +import { useSelector } from 'react-redux' + +// material-ui +import { Grid, Box, Stack } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from 'ui-component/cards/MainCard' +import ItemCard from 'ui-component/cards/ItemCard' +import { gridSpacing } from 'store/constant' +import WorkflowEmptySVG from 'assets/images/workflow_empty.svg' +import { StyledButton } from 'ui-component/button/StyledButton' +import LoginDialog from 'ui-component/dialog/LoginDialog' + +// API +import chatflowsApi from 'api/chatflows' + +// Hooks +import useApi from 'hooks/useApi' + +// const +import { baseURL } from 'store/constant' + +// icons +import { IconPlus } from '@tabler/icons' + +// ==============================|| CHATFLOWS ||============================== // + +const Chatflows = () => { + const navigate = useNavigate() + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + const [isLoading, setLoading] = useState(true) + const [images, setImages] = useState({}) + const [loginDialogOpen, setLoginDialogOpen] = useState(false) + const [loginDialogProps, setLoginDialogProps] = useState({}) + + const getAllChatflowsApi = useApi(chatflowsApi.getAllChatflows) + + const onLoginClick = (username, password) => { + localStorage.setItem('username', username) + localStorage.setItem('password', password) + navigate(0) + } + + const addNew = () => { + navigate('/canvas') + } + + const goToCanvas = (selectedChatflow) => { + navigate(`/canvas/${selectedChatflow.id}`) + } + + useEffect(() => { + getAllChatflowsApi.request() + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getAllChatflowsApi.error) { + if (getAllChatflowsApi.error?.response?.status === 401) { + setLoginDialogProps({ + title: 'Login', + confirmButtonName: 'Login' + }) + setLoginDialogOpen(true) + } + } + }, [getAllChatflowsApi.error]) + + useEffect(() => { + setLoading(getAllChatflowsApi.loading) + }, [getAllChatflowsApi.loading]) + + useEffect(() => { + if (getAllChatflowsApi.data) { + try { + const chatflows = getAllChatflowsApi.data + const images = {} + for (let i = 0; i < chatflows.length; i += 1) { + const flowDataStr = chatflows[i].flowData + const flowData = JSON.parse(flowDataStr) + const nodes = flowData.nodes || [] + images[chatflows[i].id] = [] + for (let j = 0; j < nodes.length; j += 1) { + const imageSrc = `${baseURL}/api/v1/node-icon/${nodes[j].data.name}` + if (!images[chatflows[i].id].includes(imageSrc)) { + images[chatflows[i].id].push(imageSrc) + } + } + } + setImages(images) + } catch (e) { + console.error(e) + } + } + }, [getAllChatflowsApi.data]) + + return ( + + +

Chatflows

+ + + + }> + Add New + + + +
+ + {!isLoading && + getAllChatflowsApi.data && + getAllChatflowsApi.data.map((data, index) => ( + + goToCanvas(data)} data={data} images={images[data.id]} /> + + ))} + + {!isLoading && (!getAllChatflowsApi.data || getAllChatflowsApi.data.length === 0) && ( + + + WorkflowEmptySVG + +
No Chatflows Yet
+
+ )} + +
+ ) +} + +export default Chatflows diff --git a/packages/ui/src/views/chatmessage/ChatExpandDialog.js b/packages/ui/src/views/chatmessage/ChatExpandDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..aa5cd50483f99abfd4386bd417fc90e4b897f505 --- /dev/null +++ b/packages/ui/src/views/chatmessage/ChatExpandDialog.js @@ -0,0 +1,62 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' + +import { Dialog, DialogContent, DialogTitle, Button } from '@mui/material' +import { ChatMessage } from './ChatMessage' +import { StyledButton } from 'ui-component/button/StyledButton' +import { IconEraser } from '@tabler/icons' + +const ChatExpandDialog = ({ show, dialogProps, onClear, onCancel }) => { + const portalElement = document.getElementById('portal') + const customization = useSelector((state) => state.customization) + + const component = show ? ( + + +
+ {dialogProps.title} +
+ {customization.isDarkMode && ( + } + > + Clear Chat + + )} + {!customization.isDarkMode && ( + + )} +
+
+ + + +
+ ) : null + + return createPortal(component, portalElement) +} + +ChatExpandDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onClear: PropTypes.func, + onCancel: PropTypes.func +} + +export default ChatExpandDialog diff --git a/packages/ui/src/views/chatmessage/ChatMessage.css b/packages/ui/src/views/chatmessage/ChatMessage.css new file mode 100644 index 0000000000000000000000000000000000000000..9086fb1379ea3deb1780c57f0aea4adf381dc374 --- /dev/null +++ b/packages/ui/src/views/chatmessage/ChatMessage.css @@ -0,0 +1,131 @@ +.messagelist { + width: 100%; + height: 100%; + overflow-y: scroll; + border-radius: 0.5rem; +} + +.messagelistloading { + display: flex; + width: 100%; + justify-content: center; + margin-top: 1rem; +} + +.usermessage { + padding: 1rem 1.5rem 1rem 1.5rem; +} + +.usermessagewaiting-light { + padding: 1rem 1.5rem 1rem 1.5rem; + background: linear-gradient(to left, #ede7f6, #e3f2fd, #ede7f6); + background-size: 200% 200%; + background-position: -100% 0; + animation: loading-gradient 2s ease-in-out infinite; + animation-direction: alternate; + animation-name: loading-gradient; +} + +.usermessagewaiting-dark { + padding: 1rem 1.5rem 1rem 1.5rem; + color: #ececf1; + background: linear-gradient(to left, #2e2352, #1d3d60, #2e2352); + background-size: 200% 200%; + background-position: -100% 0; + animation: loading-gradient 2s ease-in-out infinite; + animation-direction: alternate; + animation-name: loading-gradient; +} + +@keyframes loading-gradient { + 0% { + background-position: -100% 0; + } + 100% { + background-position: 100% 0; + } +} + +.apimessage { + padding: 1rem 1.5rem 1rem 1.5rem; + animation: fadein 0.5s; +} + +@keyframes fadein { + from { + opacity: 0; + } + to { + opacity: 1; + } +} + +.apimessage, +.usermessage, +.usermessagewaiting { + display: flex; +} + +.markdownanswer { + line-height: 1.75; +} + +.markdownanswer a:hover { + opacity: 0.8; +} + +.markdownanswer a { + color: #16bed7; + font-weight: 500; +} + +.markdownanswer code { + color: #0ab126; + font-weight: 500; + white-space: pre-wrap !important; +} + +.markdownanswer ol, +.markdownanswer ul { + margin: 1rem; +} + +.boticon, +.usericon { + margin-top: 1rem; + margin-right: 1rem; + border-radius: 1rem; +} + +.markdownanswer h1, +.markdownanswer h2, +.markdownanswer h3 { + font-size: inherit; +} + +.center { + display: flex; + justify-content: center; + align-items: center; + position: relative; + flex-direction: column; + padding: 10px; +} + +.cloud { + width: 400px; + height: calc(100vh - 260px); + border-radius: 0.5rem; + display: flex; + justify-content: center; + align-items: center; +} + +.cloud-dialog { + width: 100%; + height: calc(100vh - 230px); + border-radius: 0.5rem; + display: flex; + justify-content: center; + align-items: center; +} diff --git a/packages/ui/src/views/chatmessage/ChatMessage.js b/packages/ui/src/views/chatmessage/ChatMessage.js new file mode 100644 index 0000000000000000000000000000000000000000..077419f1b785eb6e49294f4a3be1a88ca6e5f590 --- /dev/null +++ b/packages/ui/src/views/chatmessage/ChatMessage.js @@ -0,0 +1,397 @@ +import { useState, useRef, useEffect, useCallback } from 'react' +import { useSelector } from 'react-redux' +import PropTypes from 'prop-types' +import socketIOClient from 'socket.io-client' +import { cloneDeep } from 'lodash' +import rehypeMathjax from 'rehype-mathjax' +import remarkGfm from 'remark-gfm' +import remarkMath from 'remark-math' + +import { CircularProgress, OutlinedInput, Divider, InputAdornment, IconButton, Box, Chip } from '@mui/material' +import { useTheme } from '@mui/material/styles' +import { IconSend } from '@tabler/icons' + +// project import +import { CodeBlock } from 'ui-component/markdown/CodeBlock' +import { MemoizedReactMarkdown } from 'ui-component/markdown/MemoizedReactMarkdown' +import SourceDocDialog from 'ui-component/dialog/SourceDocDialog' +import './ChatMessage.css' + +// api +import chatmessageApi from 'api/chatmessage' +import chatflowsApi from 'api/chatflows' +import predictionApi from 'api/prediction' + +// Hooks +import useApi from 'hooks/useApi' + +// Const +import { baseURL, maxScroll } from 'store/constant' + +export const ChatMessage = ({ open, chatflowid, isDialog }) => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + const ps = useRef() + + const [userInput, setUserInput] = useState('') + const [loading, setLoading] = useState(false) + const [messages, setMessages] = useState([ + { + message: 'Hi there! How can I help?', + type: 'apiMessage' + } + ]) + const [socketIOClientId, setSocketIOClientId] = useState('') + const [isChatFlowAvailableToStream, setIsChatFlowAvailableToStream] = useState(false) + const [sourceDialogOpen, setSourceDialogOpen] = useState(false) + const [sourceDialogProps, setSourceDialogProps] = useState({}) + + const inputRef = useRef(null) + const getChatmessageApi = useApi(chatmessageApi.getChatmessageFromChatflow) + const getIsChatflowStreamingApi = useApi(chatflowsApi.getIsChatflowStreaming) + + const onSourceDialogClick = (data) => { + setSourceDialogProps({ data }) + setSourceDialogOpen(true) + } + + const scrollToBottom = () => { + if (ps.current) { + ps.current.scrollTo({ top: maxScroll }) + } + } + + const onChange = useCallback((e) => setUserInput(e.target.value), [setUserInput]) + + const addChatMessage = async (message, type, sourceDocuments) => { + try { + const newChatMessageBody = { + role: type, + content: message, + chatflowid: chatflowid + } + if (sourceDocuments) newChatMessageBody.sourceDocuments = JSON.stringify(sourceDocuments) + await chatmessageApi.createNewChatmessage(chatflowid, newChatMessageBody) + } catch (error) { + console.error(error) + } + } + + const updateLastMessage = (text) => { + setMessages((prevMessages) => { + let allMessages = [...cloneDeep(prevMessages)] + if (allMessages[allMessages.length - 1].type === 'userMessage') return allMessages + allMessages[allMessages.length - 1].message += text + return allMessages + }) + } + + const updateLastMessageSourceDocuments = (sourceDocuments) => { + setMessages((prevMessages) => { + let allMessages = [...cloneDeep(prevMessages)] + if (allMessages[allMessages.length - 1].type === 'userMessage') return allMessages + allMessages[allMessages.length - 1].sourceDocuments = sourceDocuments + return allMessages + }) + } + + // Handle errors + const handleError = (message = 'Oops! There seems to be an error. Please try again.') => { + message = message.replace(`Unable to parse JSON response from chat agent.\n\n`, '') + setMessages((prevMessages) => [...prevMessages, { message, type: 'apiMessage' }]) + addChatMessage(message, 'apiMessage') + setLoading(false) + setUserInput('') + setTimeout(() => { + inputRef.current?.focus() + }, 100) + } + + // Handle form submission + const handleSubmit = async (e) => { + e.preventDefault() + + if (userInput.trim() === '') { + return + } + + setLoading(true) + setMessages((prevMessages) => [...prevMessages, { message: userInput, type: 'userMessage' }]) + addChatMessage(userInput, 'userMessage') + + // Send user question and history to API + try { + const params = { + question: userInput, + history: messages.filter((msg) => msg.message !== 'Hi there! How can I help?') + } + if (isChatFlowAvailableToStream) params.socketIOClientId = socketIOClientId + + const response = await predictionApi.sendMessageAndGetPrediction(chatflowid, params) + + if (response.data) { + const data = response.data + if (typeof data === 'object' && data.text && data.sourceDocuments) { + if (!isChatFlowAvailableToStream) { + setMessages((prevMessages) => [ + ...prevMessages, + { message: data.text, sourceDocuments: data.sourceDocuments, type: 'apiMessage' } + ]) + } + addChatMessage(data.text, 'apiMessage', data.sourceDocuments) + } else { + if (!isChatFlowAvailableToStream) { + setMessages((prevMessages) => [...prevMessages, { message: data, type: 'apiMessage' }]) + } + addChatMessage(data, 'apiMessage') + } + setLoading(false) + setUserInput('') + setTimeout(() => { + inputRef.current?.focus() + scrollToBottom() + }, 100) + } + } catch (error) { + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + handleError(errorData) + return + } + } + + // Prevent blank submissions and allow for multiline input + const handleEnter = (e) => { + if (e.key === 'Enter' && userInput) { + if (!e.shiftKey && userInput) { + handleSubmit(e) + } + } else if (e.key === 'Enter') { + e.preventDefault() + } + } + + // Get chatmessages successful + useEffect(() => { + if (getChatmessageApi.data) { + const loadedMessages = [] + for (const message of getChatmessageApi.data) { + const obj = { + message: message.content, + type: message.role + } + if (message.sourceDocuments) obj.sourceDocuments = JSON.parse(message.sourceDocuments) + loadedMessages.push(obj) + } + setMessages((prevMessages) => [...prevMessages, ...loadedMessages]) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getChatmessageApi.data]) + + // Get chatflow streaming capability + useEffect(() => { + if (getIsChatflowStreamingApi.data) { + setIsChatFlowAvailableToStream(getIsChatflowStreamingApi.data?.isStreaming ?? false) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getIsChatflowStreamingApi.data]) + + // Auto scroll chat to bottom + useEffect(() => { + scrollToBottom() + }, [messages]) + + useEffect(() => { + if (isDialog && inputRef) { + setTimeout(() => { + inputRef.current?.focus() + }, 100) + } + }, [isDialog, inputRef]) + + useEffect(() => { + let socket + if (open && chatflowid) { + getChatmessageApi.request(chatflowid) + getIsChatflowStreamingApi.request(chatflowid) + scrollToBottom() + + socket = socketIOClient(baseURL) + + socket.on('connect', () => { + setSocketIOClientId(socket.id) + }) + + socket.on('start', () => { + setMessages((prevMessages) => [...prevMessages, { message: '', type: 'apiMessage' }]) + }) + + socket.on('sourceDocuments', updateLastMessageSourceDocuments) + + socket.on('token', updateLastMessage) + } + + return () => { + setUserInput('') + setLoading(false) + setMessages([ + { + message: 'Hi there! How can I help?', + type: 'apiMessage' + } + ]) + if (socket) { + socket.disconnect() + setSocketIOClientId('') + } + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [open, chatflowid]) + + return ( + <> +
+
+ {messages && + messages.map((message, index) => { + return ( + // The latest message sent by the user will be animated while waiting for a response + <> + + {/* Display the correct icon depending on the message type */} + {message.type === 'apiMessage' ? ( + AI + ) : ( + Me + )} +
+
+ {/* Messages are being rendered in Markdown format */} + + ) : ( + + {children} + + ) + } + }} + > + {message.message} + +
+ {message.sourceDocuments && ( +
+ {message.sourceDocuments.map((source, index) => { + return ( + onSourceDialogClick(source)} + /> + ) + })} +
+ )} +
+
+ + ) + })} +
+
+ +
+
+
+ + + {loading ? ( +
+ +
+ ) : ( + // Send icon SVG in input field + + )} +
+ + } + /> + +
+
+ setSourceDialogOpen(false)} /> + + ) +} + +ChatMessage.propTypes = { + open: PropTypes.bool, + chatflowid: PropTypes.string, + isDialog: PropTypes.bool +} diff --git a/packages/ui/src/views/chatmessage/ChatPopUp.js b/packages/ui/src/views/chatmessage/ChatPopUp.js new file mode 100644 index 0000000000000000000000000000000000000000..93050c3a8efde18b703735430492738f18ff8454 --- /dev/null +++ b/packages/ui/src/views/chatmessage/ChatPopUp.js @@ -0,0 +1,208 @@ +import { useState, useRef, useEffect } from 'react' +import { useDispatch } from 'react-redux' +import PropTypes from 'prop-types' + +import { ClickAwayListener, Paper, Popper, Button } from '@mui/material' +import { useTheme } from '@mui/material/styles' +import { IconMessage, IconX, IconEraser, IconArrowsMaximize } from '@tabler/icons' + +// project import +import { StyledFab } from 'ui-component/button/StyledFab' +import MainCard from 'ui-component/cards/MainCard' +import Transitions from 'ui-component/extended/Transitions' +import { ChatMessage } from './ChatMessage' +import ChatExpandDialog from './ChatExpandDialog' + +// api +import chatmessageApi from 'api/chatmessage' + +// Hooks +import useConfirm from 'hooks/useConfirm' +import useNotifier from 'utils/useNotifier' + +// Const +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from 'store/actions' + +export const ChatPopUp = ({ chatflowid }) => { + const theme = useTheme() + const { confirm } = useConfirm() + const dispatch = useDispatch() + + useNotifier() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [open, setOpen] = useState(false) + const [showExpandDialog, setShowExpandDialog] = useState(false) + const [expandDialogProps, setExpandDialogProps] = useState({}) + + const anchorRef = useRef(null) + const prevOpen = useRef(open) + + const handleClose = (event) => { + if (anchorRef.current && anchorRef.current.contains(event.target)) { + return + } + setOpen(false) + } + + const handleToggle = () => { + setOpen((prevOpen) => !prevOpen) + } + + const expandChat = () => { + const props = { + open: true, + chatflowid: chatflowid + } + setExpandDialogProps(props) + setShowExpandDialog(true) + } + + const resetChatDialog = () => { + const props = { + ...expandDialogProps, + open: false + } + setExpandDialogProps(props) + setTimeout(() => { + const resetProps = { + ...expandDialogProps, + open: true + } + setExpandDialogProps(resetProps) + }, 500) + } + + const clearChat = async () => { + const confirmPayload = { + title: `Clear Chat History`, + description: `Are you sure you want to clear all chat history?`, + confirmButtonName: 'Clear', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + await chatmessageApi.deleteChatmessage(chatflowid) + resetChatDialog() + enqueueSnackbar({ + message: 'Succesfully cleared all chat history', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } catch (error) { + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + enqueueSnackbar({ + message: errorData, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + useEffect(() => { + if (prevOpen.current === true && open === false) { + anchorRef.current.focus() + } + prevOpen.current = open + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [open, chatflowid]) + + return ( + <> + + {open ? : } + + {open && ( + + + + )} + {open && ( + + + + )} + + {({ TransitionProps }) => ( + + + + + + + + + + )} + + setShowExpandDialog(false)} + > + + ) +} + +ChatPopUp.propTypes = { chatflowid: PropTypes.string } diff --git a/packages/ui/src/views/marketplaces/MarketplaceCanvas.js b/packages/ui/src/views/marketplaces/MarketplaceCanvas.js new file mode 100644 index 0000000000000000000000000000000000000000..7ce29451fe5a5fd9dc222639a24aec642a5617b6 --- /dev/null +++ b/packages/ui/src/views/marketplaces/MarketplaceCanvas.js @@ -0,0 +1,105 @@ +import { useEffect, useRef } from 'react' +import ReactFlow, { Controls, Background, useNodesState, useEdgesState } from 'reactflow' +import 'reactflow/dist/style.css' +import 'views/canvas/index.css' + +import { useLocation, useNavigate } from 'react-router-dom' + +// material-ui +import { Toolbar, Box, AppBar } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MarketplaceCanvasNode from './MarketplaceCanvasNode' + +import MarketplaceCanvasHeader from './MarketplaceCanvasHeader' + +const nodeTypes = { customNode: MarketplaceCanvasNode } +const edgeTypes = { buttonedge: '' } + +// ==============================|| CANVAS ||============================== // + +const MarketplaceCanvas = () => { + const theme = useTheme() + const navigate = useNavigate() + + const { state } = useLocation() + const { flowData, name } = state + + // ==============================|| ReactFlow ||============================== // + + const [nodes, setNodes, onNodesChange] = useNodesState() + const [edges, setEdges, onEdgesChange] = useEdgesState() + + const reactFlowWrapper = useRef(null) + + // ==============================|| useEffect ||============================== // + + useEffect(() => { + if (flowData) { + const initialFlow = JSON.parse(flowData) + setNodes(initialFlow.nodes || []) + setEdges(initialFlow.edges || []) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [flowData]) + + const onChatflowCopy = (flowData) => { + const templateFlowData = JSON.stringify(flowData) + navigate(`/canvas`, { state: { templateFlowData } }) + } + + return ( + <> + + + + onChatflowCopy(flowData)} + /> + + + +
+
+ + + + +
+
+
+
+ + ) +} + +export default MarketplaceCanvas diff --git a/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.js b/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.js new file mode 100644 index 0000000000000000000000000000000000000000..ec18b5d684e77123b48fd9e4937f6cfe06a9feed --- /dev/null +++ b/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.js @@ -0,0 +1,76 @@ +import PropTypes from 'prop-types' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { Avatar, Box, ButtonBase, Typography, Stack } from '@mui/material' +import { StyledButton } from 'ui-component/button/StyledButton' + +// icons +import { IconCopy, IconChevronLeft } from '@tabler/icons' + +// ==============================|| CANVAS HEADER ||============================== // + +const MarketplaceCanvasHeader = ({ flowName, flowData, onChatflowCopy }) => { + const theme = useTheme() + const navigate = useNavigate() + + return ( + <> + + + navigate(-1)} + > + + + + + + + + {flowName} + + + + + onChatflowCopy(flowData)} + startIcon={} + > + Use Template + + + + ) +} + +MarketplaceCanvasHeader.propTypes = { + flowName: PropTypes.string, + flowData: PropTypes.object, + onChatflowCopy: PropTypes.func +} + +export default MarketplaceCanvasHeader diff --git a/packages/ui/src/views/marketplaces/MarketplaceCanvasNode.js b/packages/ui/src/views/marketplaces/MarketplaceCanvasNode.js new file mode 100644 index 0000000000000000000000000000000000000000..8ec5ada30988a732b63d3b2cd01a3bf1014032b8 --- /dev/null +++ b/packages/ui/src/views/marketplaces/MarketplaceCanvasNode.js @@ -0,0 +1,151 @@ +import PropTypes from 'prop-types' +import { useState } from 'react' + +// material-ui +import { styled, useTheme } from '@mui/material/styles' +import { Box, Typography, Divider, Button } from '@mui/material' + +// project imports +import MainCard from 'ui-component/cards/MainCard' +import NodeInputHandler from 'views/canvas/NodeInputHandler' +import NodeOutputHandler from 'views/canvas/NodeOutputHandler' +import AdditionalParamsDialog from 'ui-component/dialog/AdditionalParamsDialog' + +// const +import { baseURL } from 'store/constant' + +const CardWrapper = styled(MainCard)(({ theme }) => ({ + background: theme.palette.card.main, + color: theme.darkTextPrimary, + border: 'solid 1px', + borderColor: theme.palette.primary[200] + 75, + width: '300px', + height: 'auto', + padding: '10px', + boxShadow: '0 2px 14px 0 rgb(32 40 45 / 8%)', + '&:hover': { + borderColor: theme.palette.primary.main + } +})) + +// ===========================|| CANVAS NODE ||=========================== // + +const MarketplaceCanvasNode = ({ data }) => { + const theme = useTheme() + + const [showDialog, setShowDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + + const onDialogClicked = () => { + const dialogProps = { + data, + inputParams: data.inputParams.filter((param) => param.additionalParams), + disabled: true, + confirmButtonName: 'Save', + cancelButtonName: 'Cancel' + } + setDialogProps(dialogProps) + setShowDialog(true) + } + + return ( + <> + + +
+ +
+ Notification +
+
+ + + {data.label} + + +
+ {(data.inputAnchors.length > 0 || data.inputParams.length > 0) && ( + <> + + + + Inputs + + + + + )} + {data.inputAnchors.map((inputAnchor, index) => ( + + ))} + {data.inputParams.map((inputParam, index) => ( + + ))} + {data.inputParams.find((param) => param.additionalParams) && ( +
+ +
+ )} + + + + Output + + + + + {data.outputAnchors.map((outputAnchor, index) => ( + + ))} +
+
+ setShowDialog(false)} + > + + ) +} + +MarketplaceCanvasNode.propTypes = { + data: PropTypes.object +} + +export default MarketplaceCanvasNode diff --git a/packages/ui/src/views/marketplaces/index.js b/packages/ui/src/views/marketplaces/index.js new file mode 100644 index 0000000000000000000000000000000000000000..ba9eb3d63b0f9b282551ded0353e9cdd34e88cf4 --- /dev/null +++ b/packages/ui/src/views/marketplaces/index.js @@ -0,0 +1,101 @@ +import { useEffect, useState } from 'react' +import { useNavigate } from 'react-router-dom' +import { useSelector } from 'react-redux' + +// material-ui +import { Grid, Box, Stack } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from 'ui-component/cards/MainCard' +import ItemCard from 'ui-component/cards/ItemCard' +import { gridSpacing } from 'store/constant' +import WorkflowEmptySVG from 'assets/images/workflow_empty.svg' + +// API +import marketplacesApi from 'api/marketplaces' + +// Hooks +import useApi from 'hooks/useApi' + +// const +import { baseURL } from 'store/constant' + +// ==============================|| Marketplace ||============================== // + +const Marketplace = () => { + const navigate = useNavigate() + + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + const [isLoading, setLoading] = useState(true) + const [images, setImages] = useState({}) + + const getAllMarketplacesApi = useApi(marketplacesApi.getAllMarketplaces) + + const goToCanvas = (selectedChatflow) => { + navigate(`/marketplace/${selectedChatflow.id}`, { state: selectedChatflow }) + } + + useEffect(() => { + getAllMarketplacesApi.request() + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getAllMarketplacesApi.loading) + }, [getAllMarketplacesApi.loading]) + + useEffect(() => { + if (getAllMarketplacesApi.data) { + try { + const chatflows = getAllMarketplacesApi.data + const images = {} + for (let i = 0; i < chatflows.length; i += 1) { + const flowDataStr = chatflows[i].flowData + const flowData = JSON.parse(flowDataStr) + const nodes = flowData.nodes || [] + images[chatflows[i].id] = [] + for (let j = 0; j < nodes.length; j += 1) { + const imageSrc = `${baseURL}/api/v1/node-icon/${nodes[j].data.name}` + if (!images[chatflows[i].id].includes(imageSrc)) { + images[chatflows[i].id].push(imageSrc) + } + } + } + setImages(images) + } catch (e) { + console.error(e) + } + } + }, [getAllMarketplacesApi.data]) + + return ( + + +

Marketplace

+
+ + {!isLoading && + getAllMarketplacesApi.data && + getAllMarketplacesApi.data.map((data, index) => ( + + goToCanvas(data)} data={data} images={images[data.id]} /> + + ))} + + {!isLoading && (!getAllMarketplacesApi.data || getAllMarketplacesApi.data.length === 0) && ( + + + WorkflowEmptySVG + +
No Marketplace Yet
+
+ )} +
+ ) +} + +export default Marketplace diff --git a/packages/ui/src/views/settings/index.js b/packages/ui/src/views/settings/index.js new file mode 100644 index 0000000000000000000000000000000000000000..8d76cc0d7aa478c3160f706c10cce1ef274cc041 --- /dev/null +++ b/packages/ui/src/views/settings/index.js @@ -0,0 +1,104 @@ +import { useState, useEffect } from 'react' +import PropTypes from 'prop-types' + +// material-ui +import { useTheme } from '@mui/material/styles' +import { Box, List, Paper, Popper, ClickAwayListener } from '@mui/material' + +// third-party +import PerfectScrollbar from 'react-perfect-scrollbar' + +// project imports +import MainCard from 'ui-component/cards/MainCard' +import Transitions from 'ui-component/extended/Transitions' +import NavItem from 'layout/MainLayout/Sidebar/MenuList/NavItem' + +import settings from 'menu-items/settings' + +// ==============================|| SETTINGS ||============================== // + +const Settings = ({ chatflow, isSettingsOpen, anchorEl, onSettingsItemClick, onUploadFile, onClose }) => { + const theme = useTheme() + const [settingsMenu, setSettingsMenu] = useState([]) + + const [open, setOpen] = useState(false) + + useEffect(() => { + if (chatflow && !chatflow.id) { + const settingsMenu = settings.children.filter((menu) => menu.id === 'loadChatflow') + setSettingsMenu(settingsMenu) + } else if (chatflow && chatflow.id) { + const settingsMenu = settings.children + setSettingsMenu(settingsMenu) + } + }, [chatflow]) + + useEffect(() => { + setOpen(isSettingsOpen) + }, [isSettingsOpen]) + + // settings list items + const items = settingsMenu.map((menu) => { + return ( + onSettingsItemClick(id)} + onUploadFile={onUploadFile} + /> + ) + }) + + return ( + <> + + {({ TransitionProps }) => ( + + + + + + + {items} + + + + + + + )} + + + ) +} + +Settings.propTypes = { + chatflow: PropTypes.object, + isSettingsOpen: PropTypes.bool, + anchorEl: PropTypes.any, + onSettingsItemClick: PropTypes.func, + onUploadFile: PropTypes.func, + onClose: PropTypes.func +} + +export default Settings diff --git a/turbo.json b/turbo.json new file mode 100644 index 0000000000000000000000000000000000000000..a0c98004888816437e7190a3a118b9124b155e79 --- /dev/null +++ b/turbo.json @@ -0,0 +1,13 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "build": { + "dependsOn": ["^build"], + "outputs": ["dist/**"] + }, + "test": {}, + "dev": { + "cache": false + } + } +}