Compare commits
550 Commits
flowise@2.
...
main
| Author | SHA1 | Date |
|---|---|---|
|
|
465005a503 | |
|
|
e6e0c2d07b | |
|
|
660a8e357a | |
|
|
113180d03b | |
|
|
069ba28bc0 | |
|
|
20db1597a4 | |
|
|
478a294095 | |
|
|
6a59af11e6 | |
|
|
562370b8e2 | |
|
|
4e92db6910 | |
|
|
7cc2c13694 | |
|
|
3ad2b3a559 | |
|
|
da32fc7167 | |
|
|
315e3aedc3 | |
|
|
9dbb4bf623 | |
|
|
1f3f7a7194 | |
|
|
4d79653741 | |
|
|
03ef28afbc | |
|
|
0cc7b3036e | |
|
|
097404f24a | |
|
|
2029588d4d | |
|
|
c9db81096a | |
|
|
b5f7fac015 | |
|
|
ca22160361 | |
|
|
ffe69936dc | |
|
|
b8f7a200fb | |
|
|
2f2b6e1713 | |
|
|
4e1fac501f | |
|
|
888994bc8f | |
|
|
3cab803918 | |
|
|
366d38b861 | |
|
|
2414057c08 | |
|
|
4a642f02d0 | |
|
|
ceb0512e2f | |
|
|
94cae3b66f | |
|
|
3fafd15a80 | |
|
|
9ff3d653ae | |
|
|
0dc14b5cd3 | |
|
|
b9a020dc70 | |
|
|
761ffe6851 | |
|
|
6d3755d16e | |
|
|
faf0a0a315 | |
|
|
4624e15c2e | |
|
|
a7b6f9b208 | |
|
|
2bd96090f0 | |
|
|
346a55b6d8 | |
|
|
03c1750d73 | |
|
|
ec1762b10f | |
|
|
02bb2ba62b | |
|
|
fdb6422aef | |
|
|
fe6f5f88a5 | |
|
|
82124d4871 | |
|
|
3b8b21342d | |
|
|
679a0409f5 | |
|
|
1fa9303d7c | |
|
|
75eb5f57aa | |
|
|
3d731664f9 | |
|
|
0f8d45d25c | |
|
|
3e8db185dd | |
|
|
6f5b0d9906 | |
|
|
fd7fc2f4d7 | |
|
|
a92f7dfc3f | |
|
|
80224275d9 | |
|
|
4417102f6c | |
|
|
0149688a16 | |
|
|
f3d5b7766d | |
|
|
97515989a2 | |
|
|
601de76aea | |
|
|
c99d870c82 | |
|
|
5df09a15b8 | |
|
|
e925801b63 | |
|
|
eed7581d0e | |
|
|
1ae1638ed9 | |
|
|
0a3c8b94ab | |
|
|
9554b1a8e3 | |
|
|
ac565b8981 | |
|
|
37ef6ffa50 | |
|
|
2ae4678da4 | |
|
|
6f94d61f22 | |
|
|
62d34066c9 | |
|
|
f3f2eabb89 | |
|
|
bff859520a | |
|
|
4111ec31b0 | |
|
|
7ab586c865 | |
|
|
ac794ab6eb | |
|
|
1fb12cd931 | |
|
|
a0dca552a2 | |
|
|
a38d37f4b5 | |
|
|
1a410d84ac | |
|
|
7a50755546 | |
|
|
ac252516f8 | |
|
|
6fe5b98d6f | |
|
|
9b8fee3d8f | |
|
|
8d0a198e2f | |
|
|
580957e4aa | |
|
|
a86f618186 | |
|
|
8c1175225f | |
|
|
28b0174eea | |
|
|
b501932491 | |
|
|
6890ced939 | |
|
|
0065e8f1a0 | |
|
|
31434e52ce | |
|
|
84a0a45ff7 | |
|
|
dd284e37c3 | |
|
|
b5da234ce7 | |
|
|
e48f28d13d | |
|
|
cf6539cd3f | |
|
|
011d60332e | |
|
|
e9d4c3b54b | |
|
|
41131dfac3 | |
|
|
42152dd036 | |
|
|
fc50f2308b | |
|
|
f560768133 | |
|
|
c4322ce70b | |
|
|
79023c8909 | |
|
|
05763db8d3 | |
|
|
6e291cf05d | |
|
|
89a0f23fe5 | |
|
|
c00ae78488 | |
|
|
b2dcdab5b9 | |
|
|
6885c38d18 | |
|
|
6e2f2df269 | |
|
|
4af067a444 | |
|
|
e002e617df | |
|
|
4987a2880d | |
|
|
736c2b11a1 | |
|
|
6fb9bb559f | |
|
|
32bf030924 | |
|
|
099cf481b4 | |
|
|
113086a2fb | |
|
|
c17dd1f141 | |
|
|
42fed5713e | |
|
|
449e8113e0 | |
|
|
9e178d6887 | |
|
|
2ab20f71d9 | |
|
|
b026671887 | |
|
|
23cb5f7801 | |
|
|
763e33b073 | |
|
|
a5a728fd06 | |
|
|
9b3971d8d8 | |
|
|
af1464f7c2 | |
|
|
bf1ddc3be5 | |
|
|
ad0679801a | |
|
|
9cac8d7a00 | |
|
|
e5381f5090 | |
|
|
b126472816 | |
|
|
4ce0851858 | |
|
|
44087bc706 | |
|
|
55f8f69060 | |
|
|
6e44051bea | |
|
|
7a74e33be1 | |
|
|
e99aecb473 | |
|
|
ba6a602cbe | |
|
|
fbae51b260 | |
|
|
114a844964 | |
|
|
68dc041d02 | |
|
|
32cd06cd28 | |
|
|
32e5b13c46 | |
|
|
db4de4552a | |
|
|
9c070c7205 | |
|
|
fddd40a5cd | |
|
|
bbcfb5ab63 | |
|
|
fa15b6873d | |
|
|
9181ae2879 | |
|
|
3b1b4dc5f9 | |
|
|
b608219642 | |
|
|
3187377c61 | |
|
|
feb899ab19 | |
|
|
9e743e4aa1 | |
|
|
141c49013a | |
|
|
b024cd61f4 | |
|
|
78144f37b5 | |
|
|
e3e4d6a904 | |
|
|
5930f1119c | |
|
|
8aa2507ed9 | |
|
|
ff9a2a65b5 | |
|
|
d29db16bfc | |
|
|
28fec16873 | |
|
|
3a33bfadf0 | |
|
|
89a806f722 | |
|
|
ed27ad0c58 | |
|
|
049596a7b5 | |
|
|
5259bab778 | |
|
|
9b54aa8879 | |
|
|
0998bf4327 | |
|
|
e8dac2048f | |
|
|
498129e9d2 | |
|
|
46816c7c1e | |
|
|
bbb03b7b3b | |
|
|
aea2b184da | |
|
|
8846fd14e6 | |
|
|
5ae6ae2916 | |
|
|
9a6fd97f2c | |
|
|
221ac9b25d | |
|
|
caffad0fb0 | |
|
|
8562d4a563 | |
|
|
d272683a98 | |
|
|
00342bde88 | |
|
|
a3f47af027 | |
|
|
d081221a97 | |
|
|
f2bd83252d | |
|
|
910a3c5229 | |
|
|
d77919ba50 | |
|
|
e8c36b6894 | |
|
|
efc9ac222f | |
|
|
dca91b979b | |
|
|
9a06a85a8d | |
|
|
96a57a58e7 | |
|
|
fbe9f34a60 | |
|
|
2b7a074c8b | |
|
|
cc4a773010 | |
|
|
d584c0b700 | |
|
|
ebf222731e | |
|
|
2605a1f74e | |
|
|
2e1999e6f1 | |
|
|
5e5b2a18e2 | |
|
|
cf965f3d8e | |
|
|
0ac01d3cbb | |
|
|
1bed5a264e | |
|
|
8a6b95ef0e | |
|
|
9839009823 | |
|
|
791c1e3274 | |
|
|
e3eeb5d8a8 | |
|
|
9d438529a6 | |
|
|
ee5ab1bd6d | |
|
|
849b94b049 | |
|
|
14fc1b4d20 | |
|
|
bf05f25f7e | |
|
|
6baec93860 | |
|
|
30e8317327 | |
|
|
aea2801b8c | |
|
|
a25c5c4514 | |
|
|
768de6140c | |
|
|
0627693133 | |
|
|
bbf6970600 | |
|
|
9b60cf1234 | |
|
|
be7599542b | |
|
|
4c3b729b79 | |
|
|
e326bc8f49 | |
|
|
e7553a1c4e | |
|
|
9efb70e04c | |
|
|
c78b5326b6 | |
|
|
7a5368c6f6 | |
|
|
d66e40e2e7 | |
|
|
81699a1e56 | |
|
|
5dd30b1a70 | |
|
|
d5bc718246 | |
|
|
3f26569e6e | |
|
|
397ba63d60 | |
|
|
4038eb13fc | |
|
|
24426266ba | |
|
|
a872d77f38 | |
|
|
2e42dfb635 | |
|
|
f50a817bf4 | |
|
|
602054e509 | |
|
|
543800562e | |
|
|
035b5555a9 | |
|
|
9a60b7b223 | |
|
|
a107aa7a77 | |
|
|
15dd28356b | |
|
|
8ba1a09077 | |
|
|
306b6fbb31 | |
|
|
3d2c5c90e9 | |
|
|
dfb401ad83 | |
|
|
21caedde72 | |
|
|
e17994d8fe | |
|
|
f644c47251 | |
|
|
f45ca72df2 | |
|
|
8272283618 | |
|
|
12b4259a01 | |
|
|
2387a06ce4 | |
|
|
6495c64dac | |
|
|
02a6753498 | |
|
|
7dfa269502 | |
|
|
0c5f7ea003 | |
|
|
30c4180d97 | |
|
|
6dcb65cedb | |
|
|
2cd8db0c53 | |
|
|
8793ed628c | |
|
|
5f7f83a5d2 | |
|
|
d134b66bd8 | |
|
|
aa0984e802 | |
|
|
63ff703e7a | |
|
|
a88337cc83 | |
|
|
eb69b23d73 | |
|
|
82e21d1fed | |
|
|
60b18353a7 | |
|
|
7c803f4e0b | |
|
|
979920ff7f | |
|
|
6f8079f6ee | |
|
|
954e6c88f4 | |
|
|
9682a0ccd9 | |
|
|
27bc47ed57 | |
|
|
da8d0f12d6 | |
|
|
3d6bf72e73 | |
|
|
2baa43d66f | |
|
|
7ef0e99eb2 | |
|
|
82d60c7d15 | |
|
|
4326cbe6b5 | |
|
|
7e7ff24941 | |
|
|
729043bcba | |
|
|
895b810f87 | |
|
|
d06b7d7aef | |
|
|
07b251b4bc | |
|
|
01dab4365a | |
|
|
572fb31a1c | |
|
|
eca190dca6 | |
|
|
0dd6c5b2b9 | |
|
|
66bf0749af | |
|
|
5a37227d14 | |
|
|
e35a126b46 | |
|
|
9bc6bfed69 | |
|
|
cf67afb078 | |
|
|
86782e9971 | |
|
|
6cf1c82f04 | |
|
|
9170cac58b | |
|
|
cd36924bf4 | |
|
|
4786aafddc | |
|
|
0a4570ecda | |
|
|
a6e64230b4 | |
|
|
624143ad15 | |
|
|
a27826cdc0 | |
|
|
761a6416ab | |
|
|
82d16458e4 | |
|
|
eadf1b11b3 | |
|
|
98e75ad7d6 | |
|
|
a8f990c242 | |
|
|
4a0e86b30f | |
|
|
10f85ef47e | |
|
|
0c3329b81b | |
|
|
7924fbce0d | |
|
|
82e6f43b5c | |
|
|
e467d0615c | |
|
|
ac9d732550 | |
|
|
d75e847091 | |
|
|
c3610ff3c7 | |
|
|
df26e8aef9 | |
|
|
b55fe07511 | |
|
|
8157dce8ee | |
|
|
f5b9c6907e | |
|
|
e1979e42c3 | |
|
|
5b4693cca3 | |
|
|
925ca7be81 | |
|
|
6dcc7bb152 | |
|
|
ddeb59169b | |
|
|
fc6eea7653 | |
|
|
ac0450523a | |
|
|
f8ca105822 | |
|
|
68d3c83980 | |
|
|
9c1652570e | |
|
|
ddba891dcb | |
|
|
9d9135bed5 | |
|
|
a7b4ae733f | |
|
|
e75c831beb | |
|
|
416e57380e | |
|
|
54d1b5e3bb | |
|
|
9d9b40a326 | |
|
|
e9ece5ce1e | |
|
|
a6506b3bf7 | |
|
|
27ad522b8d | |
|
|
4009eb227b | |
|
|
d71369c3b7 | |
|
|
d3510d1054 | |
|
|
654bd48849 | |
|
|
c318fc57e9 | |
|
|
d53b1b657f | |
|
|
5faff52053 | |
|
|
36870e94d4 | |
|
|
4277819c5f | |
|
|
3098c8e75f | |
|
|
f963e5aa48 | |
|
|
ca69a39b82 | |
|
|
b988cae58c | |
|
|
8e63b999b8 | |
|
|
dd56d03b78 | |
|
|
a07546145d | |
|
|
3bd2d63a19 | |
|
|
d7c0858424 | |
|
|
9957184680 | |
|
|
cb06df4584 | |
|
|
0e1b1ee251 | |
|
|
13fce45856 | |
|
|
4fa2672c9d | |
|
|
7867489727 | |
|
|
145a3bb415 | |
|
|
6a0b8be422 | |
|
|
c5455137f9 | |
|
|
2b9a1ae316 | |
|
|
c2b830f279 | |
|
|
c6968ff385 | |
|
|
829d2b1597 | |
|
|
4130156397 | |
|
|
9d9aaaa886 | |
|
|
c3ea5a9b8f | |
|
|
641df313d4 | |
|
|
bde9e543d4 | |
|
|
93cf47ce40 | |
|
|
69a272201a | |
|
|
a369f0c1cc | |
|
|
ac7cf30e01 | |
|
|
cf7d841f88 | |
|
|
29bff647a9 | |
|
|
713ed26971 | |
|
|
9c22bee991 | |
|
|
fc9d6e7a16 | |
|
|
fc9740af16 | |
|
|
da04289ecf | |
|
|
e58c8b953d | |
|
|
289c2591d6 | |
|
|
1678815540 | |
|
|
7d8541a44b | |
|
|
9a92aa12f9 | |
|
|
a8d74336dd | |
|
|
97a196e11a | |
|
|
86a26f6ae3 | |
|
|
d8dc1820e1 | |
|
|
229366968a | |
|
|
3290798d4b | |
|
|
ac138a7a27 | |
|
|
fe2d16004c | |
|
|
0e10952b45 | |
|
|
542936c33f | |
|
|
19c36c6d11 | |
|
|
f71e5cbfb6 | |
|
|
c0a74782d8 | |
|
|
23ccef165d | |
|
|
54cdf00e7e | |
|
|
15dc8b701a | |
|
|
0449e80395 | |
|
|
96dd1aaeea | |
|
|
c96d634db2 | |
|
|
8e2e790a93 | |
|
|
aeb82c2fb4 | |
|
|
01cb3ef20d | |
|
|
a0b4abdd13 | |
|
|
2a0e712b7d | |
|
|
e1ea1c68d1 | |
|
|
8d327e465c | |
|
|
7d125d50d4 | |
|
|
20a797d2e0 | |
|
|
a49177f7fb | |
|
|
81cd904b73 | |
|
|
315536ed6e | |
|
|
abb1b2d0d2 | |
|
|
ad60140e38 | |
|
|
9a68b4b021 | |
|
|
dc8112d4f0 | |
|
|
6e95989647 | |
|
|
a2a475ba7a | |
|
|
14adb936f2 | |
|
|
5d311755fa | |
|
|
e26fc63be0 | |
|
|
50a7339299 | |
|
|
1baa4f8e4f | |
|
|
ca559c5f21 | |
|
|
5cc0e98604 | |
|
|
4aa97b0c9a | |
|
|
4c9d46d7e5 | |
|
|
3fc4e79089 | |
|
|
320eab65d6 | |
|
|
9c2203be62 | |
|
|
5c9f17814b | |
|
|
c89be26024 | |
|
|
62d5d1e8ef | |
|
|
ef96b57303 | |
|
|
24eb437bad | |
|
|
cc87d85675 | |
|
|
16aa3a0d29 | |
|
|
aab493c3c7 | |
|
|
d2fa430725 | |
|
|
89c5eb0492 | |
|
|
d60242c224 | |
|
|
15d06ec4b3 | |
|
|
a2d5cf979c | |
|
|
c36267cf74 | |
|
|
8d266052ae | |
|
|
1ae78c2739 | |
|
|
c2c1ca9162 | |
|
|
b2224b9ca1 | |
|
|
e429af139e | |
|
|
9a417bdc95 | |
|
|
22801591da | |
|
|
7aead83d61 | |
|
|
4c400301f2 | |
|
|
3b13e8345a | |
|
|
1111101cff | |
|
|
e22c60115e | |
|
|
7af5815fa9 | |
|
|
b6165e3578 | |
|
|
d71ad22e27 | |
|
|
50475f1fe5 | |
|
|
b34a82335d | |
|
|
9cceba2240 | |
|
|
5ba0ded4cc | |
|
|
4e434fd725 | |
|
|
a7c1ab881c | |
|
|
93f3a5d98a | |
|
|
0381a99c4d | |
|
|
94f67c0212 | |
|
|
e8a33e4d4d | |
|
|
bbe0203f4e | |
|
|
b29523d093 | |
|
|
d6b35465e5 | |
|
|
2df129f91b | |
|
|
e8903a5719 | |
|
|
c809f4165a | |
|
|
1b48d564f9 | |
|
|
bb15e5c2c0 | |
|
|
2360f5fdeb | |
|
|
fff6319f5d | |
|
|
4044febfd9 | |
|
|
6511fb3fe1 | |
|
|
4c29b2390c | |
|
|
d5498858ec | |
|
|
85abd157a2 | |
|
|
bfd677059e | |
|
|
26b78ad55a | |
|
|
d974564ba5 | |
|
|
13fb0f1384 | |
|
|
99d4bacd8a | |
|
|
b087f4c4a1 | |
|
|
5c5416240b | |
|
|
ddca80d4e0 | |
|
|
680fe8dee1 | |
|
|
7d1234a8b4 | |
|
|
cadc3b8fb3 | |
|
|
fe2ed26999 | |
|
|
e02045285f | |
|
|
e773181b61 | |
|
|
09d20fa5ad | |
|
|
371da23986 | |
|
|
20e093884b | |
|
|
ba8a462bf8 | |
|
|
7482e7fff6 | |
|
|
bd203a180a | |
|
|
d20a970a7b | |
|
|
3b804d7777 | |
|
|
56c00ebf38 | |
|
|
3478b39a40 | |
|
|
f6858f4ec9 | |
|
|
940c8fd3b0 | |
|
|
126808b62a | |
|
|
c50c7bdcce | |
|
|
4c1951d5b6 | |
|
|
76ae921240 | |
|
|
af5e6b0968 | |
|
|
7d9e1514b1 | |
|
|
a2c36b4447 | |
|
|
36496b1611 | |
|
|
919cb5097e |
|
|
@ -1,37 +0,0 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: '[BUG]'
|
||||
labels: ''
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Flow**
|
||||
If applicable, add exported flow in order to help replicating the problem.
|
||||
|
||||
**Setup**
|
||||
|
||||
- Installation [e.g. docker, `npx flowise start`, `pnpm start`]
|
||||
- Flowise Version [e.g. 1.2.11]
|
||||
- OS: [e.g. macOS, Windows, Linux]
|
||||
- Browser [e.g. chrome, safari]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
name: Bug Report
|
||||
description: File a bug report to help us improve
|
||||
labels: ['bug']
|
||||
assignees: []
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Make sure to have a proper title and description.
|
||||
|
||||
- type: textarea
|
||||
id: bug-description
|
||||
attributes:
|
||||
label: Describe the bug
|
||||
description: A clear and concise description of what the bug is.
|
||||
placeholder: Tell us what you see!
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: To Reproduce
|
||||
description: Steps to reproduce the behavior
|
||||
placeholder: |
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: If applicable, add screenshots to help explain your problem.
|
||||
placeholder: Drag and drop or paste screenshots here
|
||||
|
||||
- type: textarea
|
||||
id: flow
|
||||
attributes:
|
||||
label: Flow
|
||||
description: If applicable, add exported flow in order to help replicating the problem.
|
||||
placeholder: Paste your exported flow here
|
||||
|
||||
- type: dropdown
|
||||
id: method
|
||||
attributes:
|
||||
label: Use Method
|
||||
description: How did you use Flowise?
|
||||
options:
|
||||
- Flowise Cloud
|
||||
- Docker
|
||||
- npx flowise start
|
||||
- pnpm start
|
||||
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Flowise Version
|
||||
description: What version of Flowise are you running?
|
||||
placeholder: e.g., 1.2.11
|
||||
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating System
|
||||
description: What operating system are you using?
|
||||
options:
|
||||
- Windows
|
||||
- macOS
|
||||
- Linux
|
||||
- Other
|
||||
|
||||
- type: dropdown
|
||||
id: browser
|
||||
attributes:
|
||||
label: Browser
|
||||
description: What browser are you using?
|
||||
options:
|
||||
- Chrome
|
||||
- Firefox
|
||||
- Safari
|
||||
- Edge
|
||||
- Other
|
||||
|
||||
- type: textarea
|
||||
id: context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: Add any other context about the problem here.
|
||||
placeholder: Any additional information that might be helpful
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: '[FEATURE]'
|
||||
labels: ''
|
||||
assignees: ''
|
||||
---
|
||||
|
||||
**Describe the feature you'd like**
|
||||
A clear and concise description of what you would like Flowise to have.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
name: Feature Request
|
||||
description: Suggest a new feature or enhancement for Flowise
|
||||
labels: ['enhancement']
|
||||
assignees: []
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for suggesting a new feature! Please provide as much detail as possible to help us understand your request.
|
||||
|
||||
- type: textarea
|
||||
id: feature-description
|
||||
attributes:
|
||||
label: Feature Description
|
||||
description: A clear and concise description of the feature you'd like to see in Flowise.
|
||||
placeholder: Describe what you want to be added or improved...
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: feature-category
|
||||
attributes:
|
||||
label: Feature Category
|
||||
description: What category does this feature belong to?
|
||||
options:
|
||||
- UI/UX Improvement
|
||||
- New Node/Component
|
||||
- Integration
|
||||
- Performance
|
||||
- Security
|
||||
- Documentation
|
||||
- API Enhancement
|
||||
- Workflow/Flow Management
|
||||
- Authentication/Authorization
|
||||
- Database/Storage
|
||||
- Deployment/DevOps
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: problem-statement
|
||||
attributes:
|
||||
label: Problem Statement
|
||||
description: What problem does this feature solve? What's the current pain point?
|
||||
placeholder: Describe the problem or limitation you're facing...
|
||||
|
||||
- type: textarea
|
||||
id: proposed-solution
|
||||
attributes:
|
||||
label: Proposed Solution
|
||||
description: How would you like this feature to work? Be as specific as possible.
|
||||
placeholder: Describe your ideal solution in detail...
|
||||
|
||||
- type: textarea
|
||||
id: mockups-references
|
||||
attributes:
|
||||
label: Mockups or References
|
||||
description: Any mockups, screenshots, or references to similar features in other tools?
|
||||
placeholder: Upload images or provide links to examples...
|
||||
|
||||
- type: textarea
|
||||
id: additional-context
|
||||
attributes:
|
||||
label: Additional Context
|
||||
description: Any other information, context, or examples that would help us understand this request.
|
||||
placeholder: Add any other relevant information...
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
name: autoSyncMergedPullRequest
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- closed
|
||||
branches: ['main']
|
||||
jobs:
|
||||
autoSyncMergedPullRequest:
|
||||
if: github.event.pull_request.merged == true
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Show PR info
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo The PR #${{ github.event.pull_request.number }} was merged on main branch!
|
||||
- name: Repository Dispatch
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.AUTOSYNC_TOKEN }}
|
||||
repository: ${{ secrets.AUTOSYNC_CH_URL }}
|
||||
event-type: ${{ secrets.AUTOSYNC_PR_EVENT_TYPE }}
|
||||
client-payload: >-
|
||||
{
|
||||
"ref": "${{ github.ref }}",
|
||||
"prNumber": "${{ github.event.pull_request.number }}",
|
||||
"prTitle": "${{ github.event.pull_request.title }}",
|
||||
"prDescription": "",
|
||||
"sha": "${{ github.sha }}"
|
||||
}
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
name: autoSyncSingleCommit
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
doNotAutoSyncSingleCommit:
|
||||
if: github.event.commits[1] != null
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: IGNORE autoSyncSingleCommit
|
||||
run: |
|
||||
echo This single commit has came from a merged commit. We will ignore it. This case is handled in autoSyncMergedPullRequest workflow for merge commits comming from merged pull requests only! Beware, the regular merge commits are not handled by any workflow for the moment.
|
||||
autoSyncSingleCommit:
|
||||
if: github.event.commits[1] == null
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: autoSyncSingleCommit
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJSON(github) }}
|
||||
run: |
|
||||
echo Autosync a single commit with id: ${{ github.sha }} from openSource main branch towards cloud hosted version.
|
||||
- name: Repository Dispatch
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.AUTOSYNC_TOKEN }}
|
||||
repository: ${{ secrets.AUTOSYNC_CH_URL }}
|
||||
event-type: ${{ secrets.AUTOSYNC_SC_EVENT_TYPE }}
|
||||
client-payload: >-
|
||||
{
|
||||
"ref": "${{ github.ref }}",
|
||||
"sha": "${{ github.sha }}",
|
||||
"commitMessage": "${{ github.event.commits[0].id }}"
|
||||
}
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
name: Docker Image CI - Docker Hub
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
node_version:
|
||||
description: 'Node.js version to build this image with.'
|
||||
type: choice
|
||||
required: true
|
||||
default: '20'
|
||||
options:
|
||||
- '20'
|
||||
tag_version:
|
||||
description: 'Tag version of the image to be pushed.'
|
||||
type: string
|
||||
required: true
|
||||
default: 'latest'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set default values
|
||||
id: defaults
|
||||
run: |
|
||||
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
||||
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
# -------------------------
|
||||
# Build and push main image
|
||||
# -------------------------
|
||||
- name: Build and push main image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
flowiseai/flowise:${{ steps.defaults.outputs.tag_version }}
|
||||
|
||||
# -------------------------
|
||||
# Build and push worker image
|
||||
# -------------------------
|
||||
- name: Build and push worker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: docker/worker/Dockerfile
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
flowiseai/flowise-worker:${{ steps.defaults.outputs.tag_version }}
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
name: Docker Image CI - AWS ECR
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
description: 'Environment to push the image to.'
|
||||
required: true
|
||||
default: 'dev'
|
||||
type: choice
|
||||
options:
|
||||
- dev
|
||||
- prod
|
||||
node_version:
|
||||
description: 'Node.js version to build this image with.'
|
||||
type: choice
|
||||
required: true
|
||||
default: '20'
|
||||
options:
|
||||
- '20'
|
||||
tag_version:
|
||||
description: 'Tag version of the image to be pushed.'
|
||||
type: string
|
||||
required: true
|
||||
default: 'latest'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
environment: ${{ github.event.inputs.environment }}
|
||||
steps:
|
||||
- name: Set default values
|
||||
id: defaults
|
||||
run: |
|
||||
echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT
|
||||
echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v3
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ secrets.AWS_REGION }}
|
||||
|
||||
- name: Login to Amazon ECR
|
||||
uses: aws-actions/amazon-ecr-login@v1
|
||||
|
||||
# -------------------------
|
||||
# Build and push main image
|
||||
# -------------------------
|
||||
- name: Build and push main image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile
|
||||
build-args: |
|
||||
NODE_VERSION=${{ steps.defaults.outputs.node_version }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
${{ format('{0}.dkr.ecr.{1}.amazonaws.com/flowise:{2}',
|
||||
secrets.AWS_ACCOUNT_ID,
|
||||
secrets.AWS_REGION,
|
||||
steps.defaults.outputs.tag_version) }}
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
name: Docker Image CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
node_version:
|
||||
description: 'Node.js version to build this image with.'
|
||||
type: choice
|
||||
required: true
|
||||
default: '20'
|
||||
options:
|
||||
- '20'
|
||||
tag_version:
|
||||
description: 'Tag version of the image to be pushed.'
|
||||
type: string
|
||||
required: true
|
||||
default: 'latest'
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.1
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.0.0
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
build-args: |
|
||||
NODE_VERSION=${{github.event.inputs.node_version}}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: flowiseai/flowise:${{github.event.inputs.tag_version}}
|
||||
|
|
@ -6,6 +6,7 @@ on:
|
|||
pull_request:
|
||||
branches:
|
||||
- '*'
|
||||
workflow_dispatch:
|
||||
permissions:
|
||||
contents: read
|
||||
jobs:
|
||||
|
|
@ -26,14 +27,16 @@ jobs:
|
|||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
check-latest: false
|
||||
cache: 'pnpm'
|
||||
- run: npm i -g pnpm
|
||||
cache-dependency-path: 'pnpm-lock.yaml'
|
||||
- run: pnpm install
|
||||
- run: ./node_modules/.bin/cypress install
|
||||
- run: pnpm lint
|
||||
- run: pnpm build
|
||||
- name: Install dependencies
|
||||
env:
|
||||
NODE_OPTIONS: '--max_old_space_size=4096'
|
||||
- name: Cypress install
|
||||
run: pnpm cypress install
|
||||
- name: Install dependencies (Cypress Action)
|
||||
uses: cypress-io/github-action@v6
|
||||
with:
|
||||
working-directory: ./
|
||||
|
|
|
|||
|
|
@ -8,13 +8,12 @@ on:
|
|||
pull_request:
|
||||
branches:
|
||||
- '*'
|
||||
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
PUPPETEER_SKIP_DOWNLOAD: true
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- run: docker build --no-cache -t flowise .
|
||||
|
|
|
|||
|
|
@ -114,51 +114,52 @@ Flowise has 3 different modules in a single mono repository.
|
|||
|
||||
to make sure everything works fine in production.
|
||||
|
||||
11. Commit code and submit Pull Request from forked branch pointing to [Flowise master](https://github.com/FlowiseAI/Flowise/tree/master).
|
||||
11. Commit code and submit Pull Request from forked branch pointing to [Flowise main](https://github.com/FlowiseAI/Flowise/tree/main).
|
||||
|
||||
## 🌱 Env Variables
|
||||
|
||||
Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://docs.flowiseai.com/environment-variables)
|
||||
|
||||
| Variable | Description | Type | Default |
|
||||
| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
|
||||
| PORT | The HTTP port Flowise runs on | Number | 3000 |
|
||||
| CORS_ORIGINS | The allowed origins for all cross-origin HTTP calls | String | |
|
||||
| IFRAME_ORIGINS | The allowed origins for iframe src embedding | String | |
|
||||
| FLOWISE_USERNAME | Username to login | String | |
|
||||
| FLOWISE_PASSWORD | Password to login | String | |
|
||||
| FLOWISE_FILE_SIZE_LIMIT | Upload File Size Limit | String | 50mb |
|
||||
| DISABLE_CHATFLOW_REUSE | Forces the creation of a new ChatFlow for each call instead of reusing existing ones from cache | Boolean | |
|
||||
| DEBUG | Print logs from components | Boolean | |
|
||||
| LOG_PATH | Location where log files are stored | String | `your-path/Flowise/logs` |
|
||||
| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` |
|
||||
| LOG_JSON_SPACES | Spaces to beautify JSON logs | | 2 |
|
||||
| APIKEY_STORAGE_TYPE | To store api keys on a JSON file or database. Default is `json` | Enum String: `json`, `db` | `json` |
|
||||
| APIKEY_PATH | Location where api keys are saved when `APIKEY_STORAGE_TYPE` is `json` | String | `your-path/Flowise/packages/server` |
|
||||
| TOOL_FUNCTION_BUILTIN_DEP | NodeJS built-in modules to be used for Tool Function | String | |
|
||||
| TOOL_FUNCTION_EXTERNAL_DEP | External modules to be used for Tool Function | String | |
|
||||
| DATABASE_TYPE | Type of database to store the flowise data | Enum String: `sqlite`, `mysql`, `postgres` | `sqlite` |
|
||||
| DATABASE_PATH | Location where database is saved (When DATABASE_TYPE is sqlite) | String | `your-home-dir/.flowise` |
|
||||
| DATABASE_HOST | Host URL or IP address (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_PORT | Database port (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_USER | Database username (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_PASSWORD | Database password (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_NAME | Database name (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_SSL_KEY_BASE64 | Database SSL client cert in base64 (takes priority over DATABASE_SSL) | Boolean | false |
|
||||
| DATABASE_SSL | Database connection overssl (When DATABASE_TYPE is postgre) | Boolean | false |
|
||||
| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` |
|
||||
| FLOWISE_SECRETKEY_OVERWRITE | Encryption key to be used instead of the key stored in SECRETKEY_PATH | String | |
|
||||
| DISABLE_FLOWISE_TELEMETRY | Turn off telemetry | Boolean | |
|
||||
| MODEL_LIST_CONFIG_JSON | File path to load list of models from your local config file | String | `/your_model_list_config_file_path` |
|
||||
| STORAGE_TYPE | Type of storage for uploaded files. default is `local` | Enum String: `s3`, `local` | `local` |
|
||||
| BLOB_STORAGE_PATH | Local folder path where uploaded files are stored when `STORAGE_TYPE` is `local` | String | `your-home-dir/.flowise/storage` |
|
||||
| S3_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `s3` | String | |
|
||||
| S3_STORAGE_ACCESS_KEY_ID | AWS Access Key | String | |
|
||||
| S3_STORAGE_SECRET_ACCESS_KEY | AWS Secret Key | String | |
|
||||
| S3_STORAGE_REGION | Region for S3 bucket | String | |
|
||||
| S3_ENDPOINT_URL | Custom Endpoint for S3 | String | |
|
||||
| S3_FORCE_PATH_STYLE | Set this to true to force the request to use path-style addressing | Boolean | false |
|
||||
| SHOW_COMMUNITY_NODES | Show nodes created by community | Boolean | |
|
||||
| Variable | Description | Type | Default |
|
||||
| ---------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- |
|
||||
| PORT | The HTTP port Flowise runs on | Number | 3000 |
|
||||
| CORS_ORIGINS | The allowed origins for all cross-origin HTTP calls | String | |
|
||||
| IFRAME_ORIGINS | The allowed origins for iframe src embedding | String | |
|
||||
| FLOWISE_FILE_SIZE_LIMIT | Upload File Size Limit | String | 50mb |
|
||||
| DEBUG | Print logs from components | Boolean | |
|
||||
| LOG_PATH | Location where log files are stored | String | `your-path/Flowise/logs` |
|
||||
| LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` |
|
||||
| LOG_JSON_SPACES | Spaces to beautify JSON logs | | 2 |
|
||||
| TOOL_FUNCTION_BUILTIN_DEP | NodeJS built-in modules to be used for Custom Tool or Function | String | |
|
||||
| TOOL_FUNCTION_EXTERNAL_DEP | External modules to be used for Custom Tool or Function | String | |
|
||||
| ALLOW_BUILTIN_DEP | Allow project dependencies to be used for Custom Tool or Function | Boolean | false |
|
||||
| DATABASE_TYPE | Type of database to store the flowise data | Enum String: `sqlite`, `mysql`, `postgres` | `sqlite` |
|
||||
| DATABASE_PATH | Location where database is saved (When DATABASE_TYPE is sqlite) | String | `your-home-dir/.flowise` |
|
||||
| DATABASE_HOST | Host URL or IP address (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_PORT | Database port (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_USER | Database username (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_PASSWORD | Database password (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_NAME | Database name (When DATABASE_TYPE is not sqlite) | String | |
|
||||
| DATABASE_SSL_KEY_BASE64 | Database SSL client cert in base64 (takes priority over DATABASE_SSL) | Boolean | false |
|
||||
| DATABASE_SSL | Database connection overssl (When DATABASE_TYPE is postgre) | Boolean | false |
|
||||
| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` |
|
||||
| FLOWISE_SECRETKEY_OVERWRITE | Encryption key to be used instead of the key stored in SECRETKEY_PATH | String | |
|
||||
| MODEL_LIST_CONFIG_JSON | File path to load list of models from your local config file | String | `/your_model_list_config_file_path` |
|
||||
| STORAGE_TYPE | Type of storage for uploaded files. default is `local` | Enum String: `s3`, `local`, `gcs` | `local` |
|
||||
| BLOB_STORAGE_PATH | Local folder path where uploaded files are stored when `STORAGE_TYPE` is `local` | String | `your-home-dir/.flowise/storage` |
|
||||
| S3_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `s3` | String | |
|
||||
| S3_STORAGE_ACCESS_KEY_ID | AWS Access Key | String | |
|
||||
| S3_STORAGE_SECRET_ACCESS_KEY | AWS Secret Key | String | |
|
||||
| S3_STORAGE_REGION | Region for S3 bucket | String | |
|
||||
| S3_ENDPOINT_URL | Custom Endpoint for S3 | String | |
|
||||
| S3_FORCE_PATH_STYLE | Set this to true to force the request to use path-style addressing | Boolean | false |
|
||||
| GOOGLE_CLOUD_STORAGE_PROJ_ID | The GCP project id for cloud storage & logging when `STORAGE_TYPE` is `gcs` | String | |
|
||||
| GOOGLE_CLOUD_STORAGE_CREDENTIAL | The credential key file path when `STORAGE_TYPE` is `gcs` | String | |
|
||||
| GOOGLE_CLOUD_STORAGE_BUCKET_NAME | Bucket name to hold the uploaded files when `STORAGE_TYPE` is `gcs` | String | |
|
||||
| GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS | Enable uniform bucket level access when `STORAGE_TYPE` is `gcs` | Boolean | true |
|
||||
| SHOW_COMMUNITY_NODES | Show nodes created by community | Boolean | |
|
||||
| DISABLED_NODES | Hide nodes from UI (comma separated list of node names) | String | |
|
||||
| TRUST_PROXY | Configure proxy trust settings for proper IP detection. Values: 'true' (trust all), 'false' (disable), number (hop count), or Express proxy values (e.g., 'loopback', 'linklocal', 'uniquelocal', IP addresses). [Learn More](https://expressjs.com/en/guide/behind-proxies.html) | Boolean/String/Number | true |
|
||||
|
||||
You can also specify the env variables when using `npx`. For example:
|
||||
|
||||
|
|
|
|||
35
Dockerfile
35
Dockerfile
|
|
@ -5,30 +5,41 @@
|
|||
# docker run -d -p 3000:3000 flowise
|
||||
|
||||
FROM node:20-alpine
|
||||
RUN apk add --update libc6-compat python3 make g++
|
||||
# needed for pdfjs-dist
|
||||
RUN apk add --no-cache build-base cairo-dev pango-dev
|
||||
|
||||
# Install Chromium
|
||||
RUN apk add --no-cache chromium
|
||||
|
||||
#install PNPM globaly
|
||||
RUN npm install -g pnpm
|
||||
# Install system dependencies and build tools
|
||||
RUN apk update && \
|
||||
apk add --no-cache \
|
||||
libc6-compat \
|
||||
python3 \
|
||||
make \
|
||||
g++ \
|
||||
build-base \
|
||||
cairo-dev \
|
||||
pango-dev \
|
||||
chromium \
|
||||
curl && \
|
||||
npm install -g pnpm
|
||||
|
||||
ENV PUPPETEER_SKIP_DOWNLOAD=true
|
||||
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser
|
||||
|
||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||
|
||||
WORKDIR /usr/src
|
||||
WORKDIR /usr/src/flowise
|
||||
|
||||
# Copy app source
|
||||
COPY . .
|
||||
|
||||
RUN pnpm install
|
||||
# Install dependencies and build
|
||||
RUN pnpm install && \
|
||||
pnpm build
|
||||
|
||||
RUN pnpm build
|
||||
# Give the node user ownership of the application files
|
||||
RUN chown -R node:node .
|
||||
|
||||
# Switch to non-root user (node user already exists in node:20-alpine)
|
||||
USER node
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD [ "pnpm", "start" ]
|
||||
CMD [ "pnpm", "start" ]
|
||||
14
LICENSE.md
14
LICENSE.md
|
|
@ -1,6 +1,14 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
Copyright (c) 2023-present FlowiseAI, Inc.
|
||||
|
||||
Portions of this software are licensed as follows:
|
||||
|
||||
- All content that resides under https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/enterprise directory and files with explicit copyright notice such as [IdentityManager.ts](https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/IdentityManager.ts) are licensed under [Commercial License](https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/enterprise/LICENSE.md).
|
||||
- All third party components incorporated into the FlowiseAI Software are licensed under the original license provided by the owner of the applicable component.
|
||||
- Content outside of the above mentioned directories or restrictions above is available under the "Apache 2.0" license as defined below.
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
|
|
|
|||
107
README.md
107
README.md
|
|
@ -1,8 +1,11 @@
|
|||
<!-- markdownlint-disable MD030 -->
|
||||
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.png?raw=true"></a>
|
||||
<p align="center">
|
||||
<img src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_white.svg#gh-light-mode-only">
|
||||
<img src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_dark.svg#gh-dark-mode-only">
|
||||
</p>
|
||||
|
||||
# Flowise - Build LLM Apps Easily
|
||||
<div align="center">
|
||||
|
||||
[](https://github.com/FlowiseAI/Flowise/releases)
|
||||
[](https://discord.gg/jbaHfsRVBW)
|
||||
|
|
@ -10,11 +13,26 @@
|
|||
[](https://star-history.com/#FlowiseAI/Flowise)
|
||||
[](https://github.com/FlowiseAI/Flowise/fork)
|
||||
|
||||
English | [中文](./i18n/README-ZH.md) | [日本語](./i18n/README-JA.md) | [한국어](./i18n/README-KR.md)
|
||||
English | [繁體中文](./i18n/README-TW.md) | [简体中文](./i18n/README-ZH.md) | [日本語](./i18n/README-JA.md) | [한국어](./i18n/README-KR.md)
|
||||
|
||||
<h3>Drag & drop UI to build your customized LLM flow</h3>
|
||||
</div>
|
||||
|
||||
<h3>Build AI Agents, Visually</h3>
|
||||
<a href="https://github.com/FlowiseAI/Flowise">
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.gif?raw=true"></a>
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_agentflow.gif?raw=true"></a>
|
||||
|
||||
## 📚 Table of Contents
|
||||
|
||||
- [⚡ Quick Start](#-quick-start)
|
||||
- [🐳 Docker](#-docker)
|
||||
- [👨💻 Developers](#-developers)
|
||||
- [🌱 Env Variables](#-env-variables)
|
||||
- [📖 Documentation](#-documentation)
|
||||
- [🌐 Self Host](#-self-host)
|
||||
- [☁️ Flowise Cloud](#️-flowise-cloud)
|
||||
- [🙋 Support](#-support)
|
||||
- [🙌 Contributing](#-contributing)
|
||||
- [📄 License](#-license)
|
||||
|
||||
## ⚡Quick Start
|
||||
|
||||
|
|
@ -30,12 +48,6 @@ Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0
|
|||
npx flowise start
|
||||
```
|
||||
|
||||
With username & password
|
||||
|
||||
```bash
|
||||
npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234
|
||||
```
|
||||
|
||||
3. Open [http://localhost:3000](http://localhost:3000)
|
||||
|
||||
## 🐳 Docker
|
||||
|
|
@ -52,9 +64,11 @@ Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0
|
|||
### Docker Image
|
||||
|
||||
1. Build the image locally:
|
||||
|
||||
```bash
|
||||
docker build --no-cache -t flowise .
|
||||
```
|
||||
|
||||
2. Run image:
|
||||
|
||||
```bash
|
||||
|
|
@ -62,6 +76,7 @@ Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0
|
|||
```
|
||||
|
||||
3. Stop image:
|
||||
|
||||
```bash
|
||||
docker stop flowise
|
||||
```
|
||||
|
|
@ -84,13 +99,13 @@ Flowise has 3 different modules in a single mono repository.
|
|||
|
||||
### Setup
|
||||
|
||||
1. Clone the repository
|
||||
1. Clone the repository:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/FlowiseAI/Flowise.git
|
||||
```
|
||||
|
||||
2. Go into repository folder
|
||||
2. Go into repository folder:
|
||||
|
||||
```bash
|
||||
cd Flowise
|
||||
|
|
@ -110,10 +125,24 @@ Flowise has 3 different modules in a single mono repository.
|
|||
|
||||
<details>
|
||||
<summary>Exit code 134 (JavaScript heap out of memory)</summary>
|
||||
If you get this error when running the above `build` script, try increasing the Node.js heap size and run the script again:
|
||||
If you get this error when running the above `build` script, try increasing the Node.js heap size and run the script again:
|
||||
|
||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||
pnpm build
|
||||
```bash
|
||||
# macOS / Linux / Git Bash
|
||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||
|
||||
# Windows PowerShell
|
||||
$env:NODE_OPTIONS="--max-old-space-size=4096"
|
||||
|
||||
# Windows CMD
|
||||
set NODE_OPTIONS=--max-old-space-size=4096
|
||||
```
|
||||
|
||||
Then run:
|
||||
|
||||
```bash
|
||||
pnpm build
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
|
|
@ -129,7 +158,7 @@ Flowise has 3 different modules in a single mono repository.
|
|||
|
||||
- Create `.env` file and specify the `VITE_PORT` (refer to `.env.example`) in `packages/ui`
|
||||
- Create `.env` file and specify the `PORT` (refer to `.env.example`) in `packages/server`
|
||||
- Run
|
||||
- Run:
|
||||
|
||||
```bash
|
||||
pnpm dev
|
||||
|
|
@ -137,42 +166,37 @@ Flowise has 3 different modules in a single mono repository.
|
|||
|
||||
Any code changes will reload the app automatically on [http://localhost:8080](http://localhost:8080)
|
||||
|
||||
## 🔒 Authentication
|
||||
|
||||
To enable app level authentication, add `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `.env` file in `packages/server`:
|
||||
|
||||
```
|
||||
FLOWISE_USERNAME=user
|
||||
FLOWISE_PASSWORD=1234
|
||||
```
|
||||
|
||||
## 🌱 Env Variables
|
||||
|
||||
Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)
|
||||
Flowise supports different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)
|
||||
|
||||
## 📖 Documentation
|
||||
|
||||
[Flowise Docs](https://docs.flowiseai.com/)
|
||||
You can view the Flowise Docs [here](https://docs.flowiseai.com/)
|
||||
|
||||
## 🌐 Self Host
|
||||
|
||||
Deploy Flowise self-hosted in your existing infrastructure, we support various [deployments](https://docs.flowiseai.com/configuration/deployment)
|
||||
|
||||
- [AWS](https://docs.flowiseai.com/deployment/aws)
|
||||
- [Azure](https://docs.flowiseai.com/deployment/azure)
|
||||
- [Digital Ocean](https://docs.flowiseai.com/deployment/digital-ocean)
|
||||
- [GCP](https://docs.flowiseai.com/deployment/gcp)
|
||||
- [AWS](https://docs.flowiseai.com/configuration/deployment/aws)
|
||||
- [Azure](https://docs.flowiseai.com/configuration/deployment/azure)
|
||||
- [Digital Ocean](https://docs.flowiseai.com/configuration/deployment/digital-ocean)
|
||||
- [GCP](https://docs.flowiseai.com/configuration/deployment/gcp)
|
||||
- [Alibaba Cloud](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Flowise社区版)
|
||||
- <details>
|
||||
<summary>Others</summary>
|
||||
|
||||
- [Railway](https://docs.flowiseai.com/deployment/railway)
|
||||
- [Railway](https://docs.flowiseai.com/configuration/deployment/railway)
|
||||
|
||||
[](https://railway.app/template/pn4G8S?referralCode=WVNPD9)
|
||||
|
||||
- [Render](https://docs.flowiseai.com/deployment/render)
|
||||
- [Northflank](https://northflank.com/stacks/deploy-flowiseai)
|
||||
|
||||
[](https://docs.flowiseai.com/deployment/render)
|
||||
[](https://northflank.com/stacks/deploy-flowiseai)
|
||||
|
||||
- [Render](https://docs.flowiseai.com/configuration/deployment/render)
|
||||
|
||||
[](https://docs.flowiseai.com/configuration/deployment/render)
|
||||
|
||||
- [HuggingFace Spaces](https://docs.flowiseai.com/deployment/hugging-face)
|
||||
|
||||
|
|
@ -182,9 +206,9 @@ Deploy Flowise self-hosted in your existing infrastructure, we support various [
|
|||
|
||||
[](https://elest.io/open-source/flowiseai)
|
||||
|
||||
- [Sealos](https://cloud.sealos.io/?openapp=system-template%3FtemplateName%3Dflowise)
|
||||
- [Sealos](https://template.sealos.io/deploy?templateName=flowise)
|
||||
|
||||
[](https://cloud.sealos.io/?openapp=system-template%3FtemplateName%3Dflowise)
|
||||
[](https://template.sealos.io/deploy?templateName=flowise)
|
||||
|
||||
- [RepoCloud](https://repocloud.io/details/?app_id=29)
|
||||
|
||||
|
|
@ -194,11 +218,11 @@ Deploy Flowise self-hosted in your existing infrastructure, we support various [
|
|||
|
||||
## ☁️ Flowise Cloud
|
||||
|
||||
[Get Started with Flowise Cloud](https://flowiseai.com/)
|
||||
Get Started with [Flowise Cloud](https://flowiseai.com/).
|
||||
|
||||
## 🙋 Support
|
||||
|
||||
Feel free to ask any questions, raise problems, and request new features in [discussion](https://github.com/FlowiseAI/Flowise/discussions)
|
||||
Feel free to ask any questions, raise problems, and request new features in [Discussion](https://github.com/FlowiseAI/Flowise/discussions).
|
||||
|
||||
## 🙌 Contributing
|
||||
|
||||
|
|
@ -206,9 +230,10 @@ Thanks go to these awesome contributors
|
|||
|
||||
<a href="https://github.com/FlowiseAI/Flowise/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=FlowiseAI/Flowise" />
|
||||
</a>
|
||||
</a><br><br>
|
||||
|
||||
See [Contributing Guide](CONTRIBUTING.md). Reach out to us at [Discord](https://discord.gg/jbaHfsRVBW) if you have any questions or issues.
|
||||
|
||||
See [contributing guide](CONTRIBUTING.md). Reach out to us at [Discord](https://discord.gg/jbaHfsRVBW) if you have any questions or issues.
|
||||
[](https://star-history.com/#FlowiseAI/Flowise&Date)
|
||||
|
||||
## 📄 License
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
### Responsible Disclosure Policy
|
||||
|
||||
At Flowise, we prioritize security and continuously work to safeguard our systems. However, vulnerabilities can still exist. If you identify a security issue, please report it to us so we can address it promptly. Your cooperation helps us better protect our platform and users.
|
||||
|
||||
### Out of scope vulnerabilities
|
||||
|
||||
- Clickjacking on pages without sensitive actions
|
||||
- CSRF on unauthenticated/logout/login pages
|
||||
- Attacks requiring MITM (Man-in-the-Middle) or physical device access
|
||||
- Social engineering attacks
|
||||
- Activities that cause service disruption (DoS)
|
||||
- Content spoofing and text injection without a valid attack vector
|
||||
- Email spoofing
|
||||
- Absence of DNSSEC, CAA, CSP headers
|
||||
- Missing Secure or HTTP-only flag on non-sensitive cookies
|
||||
- Deadlinks
|
||||
- User enumeration
|
||||
|
||||
### Reporting Guidelines
|
||||
|
||||
- Submit your findings to https://github.com/FlowiseAI/Flowise/security
|
||||
- Provide clear details to help us reproduce and fix the issue quickly.
|
||||
|
||||
### Disclosure Guidelines
|
||||
|
||||
- Do not publicly disclose vulnerabilities until we have assessed, resolved, and notified affected users.
|
||||
- If you plan to present your research (e.g., at a conference or in a blog), share a draft with us at least **30 days in advance** for review.
|
||||
- Avoid including:
|
||||
- Data from any Flowise customer projects
|
||||
- Flowise user/customer information
|
||||
- Details about Flowise employees, contractors, or partners
|
||||
|
||||
### Response to Reports
|
||||
|
||||
- We will acknowledge your report within **5 business days** and provide an estimated resolution timeline.
|
||||
- Your report will be kept **confidential**, and your details will not be shared without your consent.
|
||||
|
||||
We appreciate your efforts in helping us maintain a secure platform and look forward to working together to resolve any issues responsibly.
|
||||
|
|
@ -1,14 +1,12 @@
|
|||
PORT=3000
|
||||
|
||||
# APIKEY_PATH=/your_apikey_path/.flowise # (will be deprecated by end of 2025)
|
||||
|
||||
############################################################################################################
|
||||
############################################## DATABASE ####################################################
|
||||
############################################################################################################
|
||||
|
||||
DATABASE_PATH=/root/.flowise
|
||||
APIKEY_PATH=/root/.flowise
|
||||
SECRETKEY_PATH=/root/.flowise
|
||||
LOG_PATH=/root/.flowise/logs
|
||||
BLOB_STORAGE_PATH=/root/.flowise/storage
|
||||
|
||||
# NUMBER_OF_PROXIES= 1
|
||||
# CORS_ORIGINS=*
|
||||
# IFRAME_ORIGINS=*
|
||||
|
||||
# DATABASE_TYPE=postgres
|
||||
# DATABASE_PORT=5432
|
||||
# DATABASE_HOST=""
|
||||
|
|
@ -16,46 +14,118 @@ BLOB_STORAGE_PATH=/root/.flowise/storage
|
|||
# DATABASE_USER=root
|
||||
# DATABASE_PASSWORD=mypassword
|
||||
# DATABASE_SSL=true
|
||||
# DATABASE_REJECT_UNAUTHORIZED=true
|
||||
# DATABASE_SSL_KEY_BASE64=<Self signed certificate in BASE64>
|
||||
|
||||
# FLOWISE_USERNAME=user
|
||||
# FLOWISE_PASSWORD=1234
|
||||
# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey
|
||||
# FLOWISE_FILE_SIZE_LIMIT=50mb
|
||||
|
||||
# DISABLE_CHATFLOW_REUSE=true
|
||||
############################################################################################################
|
||||
############################################## SECRET KEYS #################################################
|
||||
############################################################################################################
|
||||
|
||||
# SECRETKEY_STORAGE_TYPE=local #(local | aws)
|
||||
SECRETKEY_PATH=/root/.flowise
|
||||
# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey # (if you want to overwrite the secret key)
|
||||
# SECRETKEY_AWS_ACCESS_KEY=<your-access-key>
|
||||
# SECRETKEY_AWS_SECRET_KEY=<your-secret-key>
|
||||
# SECRETKEY_AWS_REGION=us-west-2
|
||||
# SECRETKEY_AWS_NAME=FlowiseEncryptionKey
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################## LOGGING #####################################################
|
||||
############################################################################################################
|
||||
|
||||
# DEBUG=true
|
||||
# LOG_LEVEL=info (error | warn | info | verbose | debug)
|
||||
LOG_PATH=/root/.flowise/logs
|
||||
# LOG_LEVEL=info #(error | warn | info | verbose | debug)
|
||||
# LOG_SANITIZE_BODY_FIELDS=password,pwd,pass,secret,token,apikey,api_key,accesstoken,access_token,refreshtoken,refresh_token,clientsecret,client_secret,privatekey,private_key,secretkey,secret_key,auth,authorization,credential,credentials
|
||||
# LOG_SANITIZE_HEADER_FIELDS=authorization,x-api-key,x-auth-token,cookie
|
||||
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
|
||||
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
|
||||
# ALLOW_BUILTIN_DEP=false
|
||||
|
||||
# LANGCHAIN_TRACING_V2=true
|
||||
# LANGCHAIN_ENDPOINT=https://api.smith.langchain.com
|
||||
# LANGCHAIN_API_KEY=your_api_key
|
||||
# LANGCHAIN_PROJECT=your_project
|
||||
|
||||
# DISABLE_FLOWISE_TELEMETRY=true
|
||||
############################################################################################################
|
||||
############################################## STORAGE #####################################################
|
||||
############################################################################################################
|
||||
|
||||
# Uncomment the following line to enable model list config, load the list of models from your local config file
|
||||
# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format
|
||||
# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path
|
||||
|
||||
# STORAGE_TYPE=local (local | s3)
|
||||
# BLOB_STORAGE_PATH=/your_storage_path/.flowise/storage
|
||||
# STORAGE_TYPE=local (local | s3 | gcs)
|
||||
BLOB_STORAGE_PATH=/root/.flowise/storage
|
||||
# S3_STORAGE_BUCKET_NAME=flowise
|
||||
# S3_STORAGE_ACCESS_KEY_ID=<your-access-key>
|
||||
# S3_STORAGE_SECRET_ACCESS_KEY=<your-secret-key>
|
||||
# S3_STORAGE_REGION=us-west-2
|
||||
# S3_ENDPOINT_URL=<custom-s3-endpoint-url>
|
||||
# S3_FORCE_PATH_STYLE=false
|
||||
# GOOGLE_CLOUD_STORAGE_CREDENTIAL=/the/keyfilename/path
|
||||
# GOOGLE_CLOUD_STORAGE_PROJ_ID=<your-gcp-project-id>
|
||||
# GOOGLE_CLOUD_STORAGE_BUCKET_NAME=<the-bucket-name>
|
||||
# GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=true
|
||||
|
||||
# APIKEY_STORAGE_TYPE=json (json | db)
|
||||
|
||||
############################################################################################################
|
||||
############################################## SETTINGS ####################################################
|
||||
############################################################################################################
|
||||
|
||||
# NUMBER_OF_PROXIES= 1
|
||||
# CORS_ORIGINS=*
|
||||
# IFRAME_ORIGINS=*
|
||||
# FLOWISE_FILE_SIZE_LIMIT=50mb
|
||||
# SHOW_COMMUNITY_NODES=true
|
||||
# DISABLE_FLOWISE_TELEMETRY=true
|
||||
# DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable)
|
||||
# Uncomment the following line to enable model list config, load the list of models from your local config file
|
||||
# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format
|
||||
# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################ AUTH PARAMETERS ###############################################
|
||||
############################################################################################################
|
||||
|
||||
# APP_URL=http://localhost:3000
|
||||
|
||||
# SMTP_HOST=smtp.host.com
|
||||
# SMTP_PORT=465
|
||||
# SMTP_USER=smtp_user
|
||||
# SMTP_PASSWORD=smtp_password
|
||||
# SMTP_SECURE=true
|
||||
# ALLOW_UNAUTHORIZED_CERTS=false
|
||||
# SENDER_EMAIL=team@example.com
|
||||
|
||||
JWT_AUTH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD'
|
||||
JWT_REFRESH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD'
|
||||
JWT_ISSUER='ISSUER'
|
||||
JWT_AUDIENCE='AUDIENCE'
|
||||
JWT_TOKEN_EXPIRY_IN_MINUTES=360
|
||||
JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
|
||||
# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart)
|
||||
# EXPRESS_SESSION_SECRET=flowise
|
||||
# SECURE_COOKIES=
|
||||
|
||||
# INVITE_TOKEN_EXPIRY_IN_HOURS=24
|
||||
# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15
|
||||
# PASSWORD_SALT_HASH_ROUNDS=10
|
||||
# TOKEN_HASH_SECRET='popcorn'
|
||||
|
||||
# WORKSPACE_INVITE_TEMPLATE_PATH=/path/to/custom/workspace_invite.hbs
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################# ENTERPRISE ###################################################
|
||||
############################################################################################################
|
||||
|
||||
# LICENSE_URL=
|
||||
# FLOWISE_EE_LICENSE_KEY=
|
||||
# OFFLINE=
|
||||
|
||||
|
||||
############################################################################################################
|
||||
########################################### METRICS COLLECTION #############################################
|
||||
############################################################################################################
|
||||
|
||||
# POSTHOG_PUBLIC_API_KEY=your_posthog_public_api_key
|
||||
|
||||
######################
|
||||
# METRICS COLLECTION
|
||||
#######################
|
||||
# ENABLE_METRICS=false
|
||||
# METRICS_PROVIDER=prometheus # prometheus | open_telemetry
|
||||
# METRICS_INCLUDE_NODE_METRICS=true # default is true
|
||||
|
|
@ -66,8 +136,45 @@ BLOB_STORAGE_PATH=/root/.flowise/storage
|
|||
# METRICS_OPEN_TELEMETRY_PROTOCOL=http # http | grpc | proto (default is http)
|
||||
# METRICS_OPEN_TELEMETRY_DEBUG=true # default is false
|
||||
|
||||
# Uncomment the following lines to enable global agent proxy
|
||||
# see https://www.npmjs.com/package/global-agent for more details
|
||||
|
||||
############################################################################################################
|
||||
############################################### PROXY ######################################################
|
||||
############################################################################################################
|
||||
|
||||
# Uncomment the following lines to enable global agent proxy, see https://www.npmjs.com/package/global-agent for more details
|
||||
# GLOBAL_AGENT_HTTP_PROXY=CorporateHttpProxyUrl
|
||||
# GLOBAL_AGENT_HTTPS_PROXY=CorporateHttpsProxyUrl
|
||||
# GLOBAL_AGENT_NO_PROXY=ExceptionHostsToBypassProxyIfNeeded
|
||||
# GLOBAL_AGENT_NO_PROXY=ExceptionHostsToBypassProxyIfNeeded
|
||||
|
||||
|
||||
############################################################################################################
|
||||
########################################### QUEUE CONFIGURATION ############################################
|
||||
############################################################################################################
|
||||
|
||||
# MODE=queue #(queue | main)
|
||||
# QUEUE_NAME=flowise-queue
|
||||
# QUEUE_REDIS_EVENT_STREAM_MAX_LEN=100000
|
||||
# WORKER_CONCURRENCY=100000
|
||||
# REMOVE_ON_AGE=86400
|
||||
# REMOVE_ON_COUNT=10000
|
||||
# REDIS_URL=
|
||||
# REDIS_HOST=localhost
|
||||
# REDIS_PORT=6379
|
||||
# REDIS_USERNAME=
|
||||
# REDIS_PASSWORD=
|
||||
# REDIS_TLS=
|
||||
# REDIS_CERT=
|
||||
# REDIS_KEY=
|
||||
# REDIS_CA=
|
||||
# REDIS_KEEP_ALIVE=
|
||||
# ENABLE_BULLMQ_DASHBOARD=
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################## SECURITY ####################################################
|
||||
############################################################################################################
|
||||
|
||||
# HTTP_DENY_LIST=
|
||||
# CUSTOM_MCP_SECURITY_CHECK=true
|
||||
# CUSTOM_MCP_PROTOCOL=sse #(stdio | sse)
|
||||
# TRUST_PROXY=true #(true | false | 1 | loopback| linklocal | uniquelocal | IP addresses | loopback, IP addresses)
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ RUN npm install -g flowise
|
|||
FROM node:20-alpine
|
||||
|
||||
# Install runtime dependencies
|
||||
RUN apk add --no-cache chromium git python3 py3-pip make g++ build-base cairo-dev pango-dev
|
||||
RUN apk add --no-cache chromium git python3 py3-pip make g++ build-base cairo-dev pango-dev curl
|
||||
|
||||
# Set the environment variable for Puppeteer to find Chromium
|
||||
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser
|
||||
|
|
|
|||
|
|
@ -9,28 +9,43 @@ Starts Flowise from [DockerHub Image](https://hub.docker.com/r/flowiseai/flowise
|
|||
3. Open [http://localhost:3000](http://localhost:3000)
|
||||
4. You can bring the containers down by `docker compose stop`
|
||||
|
||||
## 🔒 Authentication
|
||||
|
||||
1. Create `.env` file and specify the `PORT`, `FLOWISE_USERNAME`, and `FLOWISE_PASSWORD` (refer to `.env.example`)
|
||||
2. Pass `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `docker-compose.yml` file:
|
||||
```
|
||||
environment:
|
||||
- PORT=${PORT}
|
||||
- FLOWISE_USERNAME=${FLOWISE_USERNAME}
|
||||
- FLOWISE_PASSWORD=${FLOWISE_PASSWORD}
|
||||
```
|
||||
3. `docker compose up -d`
|
||||
4. Open [http://localhost:3000](http://localhost:3000)
|
||||
5. You can bring the containers down by `docker compose stop`
|
||||
|
||||
## 🌱 Env Variables
|
||||
|
||||
If you like to persist your data (flows, logs, apikeys, credentials), set these variables in the `.env` file inside `docker` folder:
|
||||
If you like to persist your data (flows, logs, credentials, storage), set these variables in the `.env` file inside `docker` folder:
|
||||
|
||||
- DATABASE_PATH=/root/.flowise
|
||||
- APIKEY_PATH=/root/.flowise
|
||||
- LOG_PATH=/root/.flowise/logs
|
||||
- SECRETKEY_PATH=/root/.flowise
|
||||
- BLOB_STORAGE_PATH=/root/.flowise/storage
|
||||
|
||||
Flowise also support different environment variables to configure your instance. Read [more](https://docs.flowiseai.com/environment-variables)
|
||||
Flowise also support different environment variables to configure your instance. Read [more](https://docs.flowiseai.com/configuration/environment-variables)
|
||||
|
||||
## Queue Mode:
|
||||
|
||||
### Building from source:
|
||||
|
||||
You can build the images for worker and main from scratch with:
|
||||
|
||||
```
|
||||
docker compose -f docker-compose-queue-source.yml up -d
|
||||
```
|
||||
|
||||
Monitor Health:
|
||||
|
||||
```
|
||||
docker compose -f docker-compose-queue-source.yml ps
|
||||
```
|
||||
|
||||
### From pre-built images:
|
||||
|
||||
You can also use the pre-built images:
|
||||
|
||||
```
|
||||
docker compose -f docker-compose-queue-prebuilt.yml up -d
|
||||
```
|
||||
|
||||
Monitor Health:
|
||||
|
||||
```
|
||||
docker compose -f docker-compose-queue-prebuilt.yml ps
|
||||
```
|
||||
|
|
|
|||
|
|
@ -0,0 +1,316 @@
|
|||
version: '3.1'
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: redis:alpine
|
||||
container_name: flowise-redis
|
||||
ports:
|
||||
- '6379:6379'
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
networks:
|
||||
- flowise-net
|
||||
restart: always
|
||||
|
||||
flowise:
|
||||
image: flowiseai/flowise:latest
|
||||
container_name: flowise-main
|
||||
restart: always
|
||||
ports:
|
||||
- '${PORT:-3000}:${PORT:-3000}'
|
||||
volumes:
|
||||
- ~/.flowise:/root/.flowise
|
||||
environment:
|
||||
# --- Essential Flowise Vars ---
|
||||
- PORT=${PORT:-3000}
|
||||
- DATABASE_PATH=${DATABASE_PATH:-/root/.flowise}
|
||||
- DATABASE_TYPE=${DATABASE_TYPE}
|
||||
- DATABASE_PORT=${DATABASE_PORT}
|
||||
- DATABASE_HOST=${DATABASE_HOST}
|
||||
- DATABASE_NAME=${DATABASE_NAME}
|
||||
- DATABASE_USER=${DATABASE_USER}
|
||||
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||
- DATABASE_SSL=${DATABASE_SSL}
|
||||
- DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64}
|
||||
|
||||
# SECRET KEYS
|
||||
- SECRETKEY_STORAGE_TYPE=${SECRETKEY_STORAGE_TYPE}
|
||||
- SECRETKEY_PATH=${SECRETKEY_PATH}
|
||||
- FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE}
|
||||
- SECRETKEY_AWS_ACCESS_KEY=${SECRETKEY_AWS_ACCESS_KEY}
|
||||
- SECRETKEY_AWS_SECRET_KEY=${SECRETKEY_AWS_SECRET_KEY}
|
||||
- SECRETKEY_AWS_REGION=${SECRETKEY_AWS_REGION}
|
||||
- SECRETKEY_AWS_NAME=${SECRETKEY_AWS_NAME}
|
||||
|
||||
# LOGGING
|
||||
- DEBUG=${DEBUG}
|
||||
- LOG_PATH=${LOG_PATH}
|
||||
- LOG_LEVEL=${LOG_LEVEL}
|
||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
||||
|
||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||
- TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP}
|
||||
- ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP}
|
||||
|
||||
# STORAGE
|
||||
- STORAGE_TYPE=${STORAGE_TYPE}
|
||||
- BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH}
|
||||
- S3_STORAGE_BUCKET_NAME=${S3_STORAGE_BUCKET_NAME}
|
||||
- S3_STORAGE_ACCESS_KEY_ID=${S3_STORAGE_ACCESS_KEY_ID}
|
||||
- S3_STORAGE_SECRET_ACCESS_KEY=${S3_STORAGE_SECRET_ACCESS_KEY}
|
||||
- S3_STORAGE_REGION=${S3_STORAGE_REGION}
|
||||
- S3_ENDPOINT_URL=${S3_ENDPOINT_URL}
|
||||
- S3_FORCE_PATH_STYLE=${S3_FORCE_PATH_STYLE}
|
||||
- GOOGLE_CLOUD_STORAGE_CREDENTIAL=${GOOGLE_CLOUD_STORAGE_CREDENTIAL}
|
||||
- GOOGLE_CLOUD_STORAGE_PROJ_ID=${GOOGLE_CLOUD_STORAGE_PROJ_ID}
|
||||
- GOOGLE_CLOUD_STORAGE_BUCKET_NAME=${GOOGLE_CLOUD_STORAGE_BUCKET_NAME}
|
||||
- GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=${GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS}
|
||||
|
||||
# SETTINGS
|
||||
- NUMBER_OF_PROXIES=${NUMBER_OF_PROXIES}
|
||||
- CORS_ORIGINS=${CORS_ORIGINS}
|
||||
- IFRAME_ORIGINS=${IFRAME_ORIGINS}
|
||||
- FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT}
|
||||
- SHOW_COMMUNITY_NODES=${SHOW_COMMUNITY_NODES}
|
||||
- DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY}
|
||||
- DISABLED_NODES=${DISABLED_NODES}
|
||||
- MODEL_LIST_CONFIG_JSON=${MODEL_LIST_CONFIG_JSON}
|
||||
|
||||
# AUTH PARAMETERS
|
||||
- APP_URL=${APP_URL}
|
||||
- JWT_AUTH_TOKEN_SECRET=${JWT_AUTH_TOKEN_SECRET}
|
||||
- JWT_REFRESH_TOKEN_SECRET=${JWT_REFRESH_TOKEN_SECRET}
|
||||
- JWT_ISSUER=${JWT_ISSUER}
|
||||
- JWT_AUDIENCE=${JWT_AUDIENCE}
|
||||
- JWT_TOKEN_EXPIRY_IN_MINUTES=${JWT_TOKEN_EXPIRY_IN_MINUTES}
|
||||
- JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=${JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES}
|
||||
- EXPIRE_AUTH_TOKENS_ON_RESTART=${EXPIRE_AUTH_TOKENS_ON_RESTART}
|
||||
- EXPRESS_SESSION_SECRET=${EXPRESS_SESSION_SECRET}
|
||||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
||||
|
||||
# EMAIL
|
||||
- SMTP_HOST=${SMTP_HOST}
|
||||
- SMTP_PORT=${SMTP_PORT}
|
||||
- SMTP_USER=${SMTP_USER}
|
||||
- SMTP_PASSWORD=${SMTP_PASSWORD}
|
||||
- SMTP_SECURE=${SMTP_SECURE}
|
||||
- ALLOW_UNAUTHORIZED_CERTS=${ALLOW_UNAUTHORIZED_CERTS}
|
||||
- SENDER_EMAIL=${SENDER_EMAIL}
|
||||
|
||||
# ENTERPRISE
|
||||
- LICENSE_URL=${LICENSE_URL}
|
||||
- FLOWISE_EE_LICENSE_KEY=${FLOWISE_EE_LICENSE_KEY}
|
||||
- OFFLINE=${OFFLINE}
|
||||
- INVITE_TOKEN_EXPIRY_IN_HOURS=${INVITE_TOKEN_EXPIRY_IN_HOURS}
|
||||
- WORKSPACE_INVITE_TEMPLATE_PATH=${WORKSPACE_INVITE_TEMPLATE_PATH}
|
||||
|
||||
# METRICS COLLECTION
|
||||
- POSTHOG_PUBLIC_API_KEY=${POSTHOG_PUBLIC_API_KEY}
|
||||
- ENABLE_METRICS=${ENABLE_METRICS}
|
||||
- METRICS_PROVIDER=${METRICS_PROVIDER}
|
||||
- METRICS_INCLUDE_NODE_METRICS=${METRICS_INCLUDE_NODE_METRICS}
|
||||
- METRICS_SERVICE_NAME=${METRICS_SERVICE_NAME}
|
||||
- METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=${METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT}
|
||||
- METRICS_OPEN_TELEMETRY_PROTOCOL=${METRICS_OPEN_TELEMETRY_PROTOCOL}
|
||||
- METRICS_OPEN_TELEMETRY_DEBUG=${METRICS_OPEN_TELEMETRY_DEBUG}
|
||||
|
||||
# PROXY
|
||||
- GLOBAL_AGENT_HTTP_PROXY=${GLOBAL_AGENT_HTTP_PROXY}
|
||||
- GLOBAL_AGENT_HTTPS_PROXY=${GLOBAL_AGENT_HTTPS_PROXY}
|
||||
- GLOBAL_AGENT_NO_PROXY=${GLOBAL_AGENT_NO_PROXY}
|
||||
|
||||
# --- Queue Configuration (Main Instance) ---
|
||||
- MODE=${MODE:-queue}
|
||||
- QUEUE_NAME=${QUEUE_NAME:-flowise-queue}
|
||||
- QUEUE_REDIS_EVENT_STREAM_MAX_LEN=${QUEUE_REDIS_EVENT_STREAM_MAX_LEN}
|
||||
- WORKER_CONCURRENCY=${WORKER_CONCURRENCY}
|
||||
- REMOVE_ON_AGE=${REMOVE_ON_AGE}
|
||||
- REMOVE_ON_COUNT=${REMOVE_ON_COUNT}
|
||||
- REDIS_URL=${REDIS_URL:-redis://redis:6379}
|
||||
- REDIS_HOST=${REDIS_HOST}
|
||||
- REDIS_PORT=${REDIS_PORT}
|
||||
- REDIS_USERNAME=${REDIS_USERNAME}
|
||||
- REDIS_PASSWORD=${REDIS_PASSWORD}
|
||||
- REDIS_TLS=${REDIS_TLS}
|
||||
- REDIS_CERT=${REDIS_CERT}
|
||||
- REDIS_KEY=${REDIS_KEY}
|
||||
- REDIS_CA=${REDIS_CA}
|
||||
- REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE}
|
||||
- ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD}
|
||||
|
||||
# SECURITY
|
||||
- CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK}
|
||||
- CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL}
|
||||
- HTTP_DENY_LIST=${HTTP_DENY_LIST}
|
||||
- TRUST_PROXY=${TRUST_PROXY}
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:${PORT:-3000}/api/v1/ping']
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
entrypoint: /bin/sh -c "sleep 3; flowise start"
|
||||
depends_on:
|
||||
- redis
|
||||
networks:
|
||||
- flowise-net
|
||||
|
||||
flowise-worker:
|
||||
image: flowiseai/flowise-worker:latest
|
||||
container_name: flowise-worker
|
||||
restart: always
|
||||
volumes:
|
||||
- ~/.flowise:/root/.flowise
|
||||
environment:
|
||||
# --- Essential Flowise Vars ---
|
||||
- WORKER_PORT=${WORKER_PORT:-5566}
|
||||
- DATABASE_PATH=${DATABASE_PATH:-/root/.flowise}
|
||||
- DATABASE_TYPE=${DATABASE_TYPE}
|
||||
- DATABASE_PORT=${DATABASE_PORT}
|
||||
- DATABASE_HOST=${DATABASE_HOST}
|
||||
- DATABASE_NAME=${DATABASE_NAME}
|
||||
- DATABASE_USER=${DATABASE_USER}
|
||||
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||
- DATABASE_SSL=${DATABASE_SSL}
|
||||
- DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64}
|
||||
|
||||
# SECRET KEYS
|
||||
- SECRETKEY_STORAGE_TYPE=${SECRETKEY_STORAGE_TYPE}
|
||||
- SECRETKEY_PATH=${SECRETKEY_PATH}
|
||||
- FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE}
|
||||
- SECRETKEY_AWS_ACCESS_KEY=${SECRETKEY_AWS_ACCESS_KEY}
|
||||
- SECRETKEY_AWS_SECRET_KEY=${SECRETKEY_AWS_SECRET_KEY}
|
||||
- SECRETKEY_AWS_REGION=${SECRETKEY_AWS_REGION}
|
||||
- SECRETKEY_AWS_NAME=${SECRETKEY_AWS_NAME}
|
||||
|
||||
# LOGGING
|
||||
- DEBUG=${DEBUG}
|
||||
- LOG_PATH=${LOG_PATH}
|
||||
- LOG_LEVEL=${LOG_LEVEL}
|
||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
||||
|
||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||
- TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP}
|
||||
- ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP}
|
||||
|
||||
# STORAGE
|
||||
- STORAGE_TYPE=${STORAGE_TYPE}
|
||||
- BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH}
|
||||
- S3_STORAGE_BUCKET_NAME=${S3_STORAGE_BUCKET_NAME}
|
||||
- S3_STORAGE_ACCESS_KEY_ID=${S3_STORAGE_ACCESS_KEY_ID}
|
||||
- S3_STORAGE_SECRET_ACCESS_KEY=${S3_STORAGE_SECRET_ACCESS_KEY}
|
||||
- S3_STORAGE_REGION=${S3_STORAGE_REGION}
|
||||
- S3_ENDPOINT_URL=${S3_ENDPOINT_URL}
|
||||
- S3_FORCE_PATH_STYLE=${S3_FORCE_PATH_STYLE}
|
||||
- GOOGLE_CLOUD_STORAGE_CREDENTIAL=${GOOGLE_CLOUD_STORAGE_CREDENTIAL}
|
||||
- GOOGLE_CLOUD_STORAGE_PROJ_ID=${GOOGLE_CLOUD_STORAGE_PROJ_ID}
|
||||
- GOOGLE_CLOUD_STORAGE_BUCKET_NAME=${GOOGLE_CLOUD_STORAGE_BUCKET_NAME}
|
||||
- GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=${GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS}
|
||||
|
||||
# SETTINGS
|
||||
- NUMBER_OF_PROXIES=${NUMBER_OF_PROXIES}
|
||||
- CORS_ORIGINS=${CORS_ORIGINS}
|
||||
- IFRAME_ORIGINS=${IFRAME_ORIGINS}
|
||||
- FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT}
|
||||
- SHOW_COMMUNITY_NODES=${SHOW_COMMUNITY_NODES}
|
||||
- DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY}
|
||||
- DISABLED_NODES=${DISABLED_NODES}
|
||||
- MODEL_LIST_CONFIG_JSON=${MODEL_LIST_CONFIG_JSON}
|
||||
|
||||
# AUTH PARAMETERS
|
||||
- APP_URL=${APP_URL}
|
||||
- JWT_AUTH_TOKEN_SECRET=${JWT_AUTH_TOKEN_SECRET}
|
||||
- JWT_REFRESH_TOKEN_SECRET=${JWT_REFRESH_TOKEN_SECRET}
|
||||
- JWT_ISSUER=${JWT_ISSUER}
|
||||
- JWT_AUDIENCE=${JWT_AUDIENCE}
|
||||
- JWT_TOKEN_EXPIRY_IN_MINUTES=${JWT_TOKEN_EXPIRY_IN_MINUTES}
|
||||
- JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=${JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES}
|
||||
- EXPIRE_AUTH_TOKENS_ON_RESTART=${EXPIRE_AUTH_TOKENS_ON_RESTART}
|
||||
- EXPRESS_SESSION_SECRET=${EXPRESS_SESSION_SECRET}
|
||||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
||||
|
||||
# EMAIL
|
||||
- SMTP_HOST=${SMTP_HOST}
|
||||
- SMTP_PORT=${SMTP_PORT}
|
||||
- SMTP_USER=${SMTP_USER}
|
||||
- SMTP_PASSWORD=${SMTP_PASSWORD}
|
||||
- SMTP_SECURE=${SMTP_SECURE}
|
||||
- ALLOW_UNAUTHORIZED_CERTS=${ALLOW_UNAUTHORIZED_CERTS}
|
||||
- SENDER_EMAIL=${SENDER_EMAIL}
|
||||
|
||||
# ENTERPRISE
|
||||
- LICENSE_URL=${LICENSE_URL}
|
||||
- FLOWISE_EE_LICENSE_KEY=${FLOWISE_EE_LICENSE_KEY}
|
||||
- OFFLINE=${OFFLINE}
|
||||
- INVITE_TOKEN_EXPIRY_IN_HOURS=${INVITE_TOKEN_EXPIRY_IN_HOURS}
|
||||
- WORKSPACE_INVITE_TEMPLATE_PATH=${WORKSPACE_INVITE_TEMPLATE_PATH}
|
||||
|
||||
# METRICS COLLECTION
|
||||
- POSTHOG_PUBLIC_API_KEY=${POSTHOG_PUBLIC_API_KEY}
|
||||
- ENABLE_METRICS=${ENABLE_METRICS}
|
||||
- METRICS_PROVIDER=${METRICS_PROVIDER}
|
||||
- METRICS_INCLUDE_NODE_METRICS=${METRICS_INCLUDE_NODE_METRICS}
|
||||
- METRICS_SERVICE_NAME=${METRICS_SERVICE_NAME}
|
||||
- METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=${METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT}
|
||||
- METRICS_OPEN_TELEMETRY_PROTOCOL=${METRICS_OPEN_TELEMETRY_PROTOCOL}
|
||||
- METRICS_OPEN_TELEMETRY_DEBUG=${METRICS_OPEN_TELEMETRY_DEBUG}
|
||||
|
||||
# PROXY
|
||||
- GLOBAL_AGENT_HTTP_PROXY=${GLOBAL_AGENT_HTTP_PROXY}
|
||||
- GLOBAL_AGENT_HTTPS_PROXY=${GLOBAL_AGENT_HTTPS_PROXY}
|
||||
- GLOBAL_AGENT_NO_PROXY=${GLOBAL_AGENT_NO_PROXY}
|
||||
|
||||
# --- Queue Configuration (Worker Instance) ---
|
||||
- MODE=${MODE:-queue}
|
||||
- QUEUE_NAME=${QUEUE_NAME:-flowise-queue}
|
||||
- QUEUE_REDIS_EVENT_STREAM_MAX_LEN=${QUEUE_REDIS_EVENT_STREAM_MAX_LEN}
|
||||
- WORKER_CONCURRENCY=${WORKER_CONCURRENCY}
|
||||
- REMOVE_ON_AGE=${REMOVE_ON_AGE}
|
||||
- REMOVE_ON_COUNT=${REMOVE_ON_COUNT}
|
||||
- REDIS_URL=${REDIS_URL:-redis://redis:6379}
|
||||
- REDIS_HOST=${REDIS_HOST}
|
||||
- REDIS_PORT=${REDIS_PORT}
|
||||
- REDIS_USERNAME=${REDIS_USERNAME}
|
||||
- REDIS_PASSWORD=${REDIS_PASSWORD}
|
||||
- REDIS_TLS=${REDIS_TLS}
|
||||
- REDIS_CERT=${REDIS_CERT}
|
||||
- REDIS_KEY=${REDIS_KEY}
|
||||
- REDIS_CA=${REDIS_CA}
|
||||
- REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE}
|
||||
- ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD}
|
||||
|
||||
# SECURITY
|
||||
- CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK}
|
||||
- CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL}
|
||||
- HTTP_DENY_LIST=${HTTP_DENY_LIST}
|
||||
- TRUST_PROXY=${TRUST_PROXY}
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:${WORKER_PORT:-5566}/healthz']
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
entrypoint: /bin/sh -c "node /app/healthcheck/healthcheck.js & sleep 5 && pnpm run start-worker"
|
||||
depends_on:
|
||||
- redis
|
||||
- flowise
|
||||
networks:
|
||||
- flowise-net
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
flowise-net:
|
||||
driver: bridge
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
version: '3.1'
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: redis:alpine
|
||||
container_name: flowise-redis
|
||||
ports:
|
||||
- '6379:6379'
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
networks:
|
||||
- flowise-net
|
||||
|
||||
flowise:
|
||||
container_name: flowise-main
|
||||
build:
|
||||
context: .. # Build using the Dockerfile in the root directory
|
||||
dockerfile: docker/Dockerfile
|
||||
ports:
|
||||
- '${PORT}:${PORT}'
|
||||
volumes:
|
||||
# Mount local .flowise to container's default location
|
||||
- ../.flowise:/root/.flowise
|
||||
environment:
|
||||
# --- Essential Flowise Vars ---
|
||||
- PORT=${PORT:-3000}
|
||||
- DATABASE_PATH=/root/.flowise
|
||||
- SECRETKEY_PATH=/root/.flowise
|
||||
- LOG_PATH=/root/.flowise/logs
|
||||
- BLOB_STORAGE_PATH=/root/.flowise/storage
|
||||
# --- Queue Vars (Main Instance) ---
|
||||
- MODE=queue
|
||||
- QUEUE_NAME=flowise-queue # Ensure this matches worker
|
||||
- REDIS_URL=redis://redis:6379 # Use service name 'redis'
|
||||
depends_on:
|
||||
- redis
|
||||
networks:
|
||||
- flowise-net
|
||||
|
||||
flowise-worker:
|
||||
container_name: flowise-worker
|
||||
build:
|
||||
context: .. # Build context is still the root
|
||||
dockerfile: docker/worker/Dockerfile # Ensure this path is correct
|
||||
volumes:
|
||||
# Mount same local .flowise to worker
|
||||
- ../.flowise:/root/.flowise
|
||||
environment:
|
||||
# --- Essential Flowise Vars ---
|
||||
- WORKER_PORT=${WORKER_PORT:-5566} # Port for worker healthcheck
|
||||
- DATABASE_PATH=/root/.flowise
|
||||
- SECRETKEY_PATH=/root/.flowise
|
||||
- LOG_PATH=/root/.flowise/logs
|
||||
- BLOB_STORAGE_PATH=/root/.flowise/storage
|
||||
# --- Queue Vars (Main Instance) ---
|
||||
- MODE=queue
|
||||
- QUEUE_NAME=flowise-queue # Ensure this matches worker
|
||||
- REDIS_URL=redis://redis:6379 # Use service name 'redis'
|
||||
depends_on:
|
||||
- redis
|
||||
- flowise
|
||||
networks:
|
||||
- flowise-net
|
||||
|
||||
volumes:
|
||||
redis_data:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
flowise-net:
|
||||
driver: bridge
|
||||
|
|
@ -2,16 +2,12 @@ version: '3.1'
|
|||
|
||||
services:
|
||||
flowise:
|
||||
image: flowiseai/flowise
|
||||
image: flowiseai/flowise:latest
|
||||
restart: always
|
||||
environment:
|
||||
- PORT=${PORT}
|
||||
- CORS_ORIGINS=${CORS_ORIGINS}
|
||||
- IFRAME_ORIGINS=${IFRAME_ORIGINS}
|
||||
- FLOWISE_USERNAME=${FLOWISE_USERNAME}
|
||||
- FLOWISE_PASSWORD=${FLOWISE_PASSWORD}
|
||||
- FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT}
|
||||
- DEBUG=${DEBUG}
|
||||
|
||||
# DATABASE
|
||||
- DATABASE_PATH=${DATABASE_PATH}
|
||||
- DATABASE_TYPE=${DATABASE_TYPE}
|
||||
- DATABASE_PORT=${DATABASE_PORT}
|
||||
|
|
@ -21,20 +17,130 @@ services:
|
|||
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||
- DATABASE_SSL=${DATABASE_SSL}
|
||||
- DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64}
|
||||
- APIKEY_STORAGE_TYPE=${APIKEY_STORAGE_TYPE}
|
||||
- APIKEY_PATH=${APIKEY_PATH}
|
||||
|
||||
# SECRET KEYS
|
||||
- SECRETKEY_STORAGE_TYPE=${SECRETKEY_STORAGE_TYPE}
|
||||
- SECRETKEY_PATH=${SECRETKEY_PATH}
|
||||
- FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE}
|
||||
- LOG_LEVEL=${LOG_LEVEL}
|
||||
- SECRETKEY_AWS_ACCESS_KEY=${SECRETKEY_AWS_ACCESS_KEY}
|
||||
- SECRETKEY_AWS_SECRET_KEY=${SECRETKEY_AWS_SECRET_KEY}
|
||||
- SECRETKEY_AWS_REGION=${SECRETKEY_AWS_REGION}
|
||||
- SECRETKEY_AWS_NAME=${SECRETKEY_AWS_NAME}
|
||||
|
||||
# LOGGING
|
||||
- DEBUG=${DEBUG}
|
||||
- LOG_PATH=${LOG_PATH}
|
||||
- LOG_LEVEL=${LOG_LEVEL}
|
||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
||||
|
||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||
- TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP}
|
||||
- ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP}
|
||||
|
||||
# STORAGE
|
||||
- STORAGE_TYPE=${STORAGE_TYPE}
|
||||
- BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH}
|
||||
- S3_STORAGE_BUCKET_NAME=${S3_STORAGE_BUCKET_NAME}
|
||||
- S3_STORAGE_ACCESS_KEY_ID=${S3_STORAGE_ACCESS_KEY_ID}
|
||||
- S3_STORAGE_SECRET_ACCESS_KEY=${S3_STORAGE_SECRET_ACCESS_KEY}
|
||||
- S3_STORAGE_REGION=${S3_STORAGE_REGION}
|
||||
- S3_ENDPOINT_URL=${S3_ENDPOINT_URL}
|
||||
- S3_FORCE_PATH_STYLE=${S3_FORCE_PATH_STYLE}
|
||||
- GOOGLE_CLOUD_STORAGE_CREDENTIAL=${GOOGLE_CLOUD_STORAGE_CREDENTIAL}
|
||||
- GOOGLE_CLOUD_STORAGE_PROJ_ID=${GOOGLE_CLOUD_STORAGE_PROJ_ID}
|
||||
- GOOGLE_CLOUD_STORAGE_BUCKET_NAME=${GOOGLE_CLOUD_STORAGE_BUCKET_NAME}
|
||||
- GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=${GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS}
|
||||
|
||||
# SETTINGS
|
||||
- NUMBER_OF_PROXIES=${NUMBER_OF_PROXIES}
|
||||
- CORS_ORIGINS=${CORS_ORIGINS}
|
||||
- IFRAME_ORIGINS=${IFRAME_ORIGINS}
|
||||
- FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT}
|
||||
- SHOW_COMMUNITY_NODES=${SHOW_COMMUNITY_NODES}
|
||||
- DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY}
|
||||
- DISABLED_NODES=${DISABLED_NODES}
|
||||
- MODEL_LIST_CONFIG_JSON=${MODEL_LIST_CONFIG_JSON}
|
||||
|
||||
# AUTH PARAMETERS
|
||||
- APP_URL=${APP_URL}
|
||||
- JWT_AUTH_TOKEN_SECRET=${JWT_AUTH_TOKEN_SECRET}
|
||||
- JWT_REFRESH_TOKEN_SECRET=${JWT_REFRESH_TOKEN_SECRET}
|
||||
- JWT_ISSUER=${JWT_ISSUER}
|
||||
- JWT_AUDIENCE=${JWT_AUDIENCE}
|
||||
- JWT_TOKEN_EXPIRY_IN_MINUTES=${JWT_TOKEN_EXPIRY_IN_MINUTES}
|
||||
- JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=${JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES}
|
||||
- EXPIRE_AUTH_TOKENS_ON_RESTART=${EXPIRE_AUTH_TOKENS_ON_RESTART}
|
||||
- EXPRESS_SESSION_SECRET=${EXPRESS_SESSION_SECRET}
|
||||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
||||
|
||||
# EMAIL
|
||||
- SMTP_HOST=${SMTP_HOST}
|
||||
- SMTP_PORT=${SMTP_PORT}
|
||||
- SMTP_USER=${SMTP_USER}
|
||||
- SMTP_PASSWORD=${SMTP_PASSWORD}
|
||||
- SMTP_SECURE=${SMTP_SECURE}
|
||||
- ALLOW_UNAUTHORIZED_CERTS=${ALLOW_UNAUTHORIZED_CERTS}
|
||||
- SENDER_EMAIL=${SENDER_EMAIL}
|
||||
|
||||
# ENTERPRISE
|
||||
- LICENSE_URL=${LICENSE_URL}
|
||||
- FLOWISE_EE_LICENSE_KEY=${FLOWISE_EE_LICENSE_KEY}
|
||||
- OFFLINE=${OFFLINE}
|
||||
- INVITE_TOKEN_EXPIRY_IN_HOURS=${INVITE_TOKEN_EXPIRY_IN_HOURS}
|
||||
- WORKSPACE_INVITE_TEMPLATE_PATH=${WORKSPACE_INVITE_TEMPLATE_PATH}
|
||||
|
||||
# METRICS COLLECTION
|
||||
- POSTHOG_PUBLIC_API_KEY=${POSTHOG_PUBLIC_API_KEY}
|
||||
- ENABLE_METRICS=${ENABLE_METRICS}
|
||||
- METRICS_PROVIDER=${METRICS_PROVIDER}
|
||||
- METRICS_INCLUDE_NODE_METRICS=${METRICS_INCLUDE_NODE_METRICS}
|
||||
- METRICS_SERVICE_NAME=${METRICS_SERVICE_NAME}
|
||||
- METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=${METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT}
|
||||
- METRICS_OPEN_TELEMETRY_PROTOCOL=${METRICS_OPEN_TELEMETRY_PROTOCOL}
|
||||
- METRICS_OPEN_TELEMETRY_DEBUG=${METRICS_OPEN_TELEMETRY_DEBUG}
|
||||
|
||||
# PROXY
|
||||
- GLOBAL_AGENT_HTTP_PROXY=${GLOBAL_AGENT_HTTP_PROXY}
|
||||
- GLOBAL_AGENT_HTTPS_PROXY=${GLOBAL_AGENT_HTTPS_PROXY}
|
||||
- GLOBAL_AGENT_NO_PROXY=${GLOBAL_AGENT_NO_PROXY}
|
||||
|
||||
# QUEUE CONFIGURATION
|
||||
- MODE=${MODE}
|
||||
- QUEUE_NAME=${QUEUE_NAME}
|
||||
- QUEUE_REDIS_EVENT_STREAM_MAX_LEN=${QUEUE_REDIS_EVENT_STREAM_MAX_LEN}
|
||||
- WORKER_CONCURRENCY=${WORKER_CONCURRENCY}
|
||||
- REMOVE_ON_AGE=${REMOVE_ON_AGE}
|
||||
- REMOVE_ON_COUNT=${REMOVE_ON_COUNT}
|
||||
- REDIS_URL=${REDIS_URL}
|
||||
- REDIS_HOST=${REDIS_HOST}
|
||||
- REDIS_PORT=${REDIS_PORT}
|
||||
- REDIS_USERNAME=${REDIS_USERNAME}
|
||||
- REDIS_PASSWORD=${REDIS_PASSWORD}
|
||||
- REDIS_TLS=${REDIS_TLS}
|
||||
- REDIS_CERT=${REDIS_CERT}
|
||||
- REDIS_KEY=${REDIS_KEY}
|
||||
- REDIS_CA=${REDIS_CA}
|
||||
- REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE}
|
||||
- ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD}
|
||||
|
||||
# SECURITY
|
||||
- CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK}
|
||||
- CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL}
|
||||
- HTTP_DENY_LIST=${HTTP_DENY_LIST}
|
||||
- TRUST_PROXY=${TRUST_PROXY}
|
||||
ports:
|
||||
- '${PORT}:${PORT}'
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:${PORT}/api/v1/ping']
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
volumes:
|
||||
- ~/.flowise:/root/.flowise
|
||||
entrypoint: /bin/sh -c "sleep 3; flowise start"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,180 @@
|
|||
WORKER_PORT=5566
|
||||
|
||||
# APIKEY_PATH=/your_apikey_path/.flowise # (will be deprecated by end of 2025)
|
||||
|
||||
############################################################################################################
|
||||
############################################## DATABASE ####################################################
|
||||
############################################################################################################
|
||||
|
||||
DATABASE_PATH=/root/.flowise
|
||||
# DATABASE_TYPE=postgres
|
||||
# DATABASE_PORT=5432
|
||||
# DATABASE_HOST=""
|
||||
# DATABASE_NAME=flowise
|
||||
# DATABASE_USER=root
|
||||
# DATABASE_PASSWORD=mypassword
|
||||
# DATABASE_SSL=true
|
||||
# DATABASE_REJECT_UNAUTHORIZED=true
|
||||
# DATABASE_SSL_KEY_BASE64=<Self signed certificate in BASE64>
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################## SECRET KEYS #################################################
|
||||
############################################################################################################
|
||||
|
||||
# SECRETKEY_STORAGE_TYPE=local #(local | aws)
|
||||
SECRETKEY_PATH=/root/.flowise
|
||||
# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey # (if you want to overwrite the secret key)
|
||||
# SECRETKEY_AWS_ACCESS_KEY=<your-access-key>
|
||||
# SECRETKEY_AWS_SECRET_KEY=<your-secret-key>
|
||||
# SECRETKEY_AWS_REGION=us-west-2
|
||||
# SECRETKEY_AWS_NAME=FlowiseEncryptionKey
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################## LOGGING #####################################################
|
||||
############################################################################################################
|
||||
|
||||
# DEBUG=true
|
||||
LOG_PATH=/root/.flowise/logs
|
||||
# LOG_LEVEL=info #(error | warn | info | verbose | debug)
|
||||
# LOG_SANITIZE_BODY_FIELDS=password,pwd,pass,secret,token,apikey,api_key,accesstoken,access_token,refreshtoken,refresh_token,clientsecret,client_secret,privatekey,private_key,secretkey,secret_key,auth,authorization,credential,credentials
|
||||
# LOG_SANITIZE_HEADER_FIELDS=authorization,x-api-key,x-auth-token,cookie
|
||||
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
|
||||
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
|
||||
# ALLOW_BUILTIN_DEP=false
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################## STORAGE #####################################################
|
||||
############################################################################################################
|
||||
|
||||
# STORAGE_TYPE=local (local | s3 | gcs)
|
||||
BLOB_STORAGE_PATH=/root/.flowise/storage
|
||||
# S3_STORAGE_BUCKET_NAME=flowise
|
||||
# S3_STORAGE_ACCESS_KEY_ID=<your-access-key>
|
||||
# S3_STORAGE_SECRET_ACCESS_KEY=<your-secret-key>
|
||||
# S3_STORAGE_REGION=us-west-2
|
||||
# S3_ENDPOINT_URL=<custom-s3-endpoint-url>
|
||||
# S3_FORCE_PATH_STYLE=false
|
||||
# GOOGLE_CLOUD_STORAGE_CREDENTIAL=/the/keyfilename/path
|
||||
# GOOGLE_CLOUD_STORAGE_PROJ_ID=<your-gcp-project-id>
|
||||
# GOOGLE_CLOUD_STORAGE_BUCKET_NAME=<the-bucket-name>
|
||||
# GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=true
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################## SETTINGS ####################################################
|
||||
############################################################################################################
|
||||
|
||||
# NUMBER_OF_PROXIES= 1
|
||||
# CORS_ORIGINS=*
|
||||
# IFRAME_ORIGINS=*
|
||||
# FLOWISE_FILE_SIZE_LIMIT=50mb
|
||||
# SHOW_COMMUNITY_NODES=true
|
||||
# DISABLE_FLOWISE_TELEMETRY=true
|
||||
# DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable)
|
||||
# Uncomment the following line to enable model list config, load the list of models from your local config file
|
||||
# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format
|
||||
# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################ AUTH PARAMETERS ###############################################
|
||||
############################################################################################################
|
||||
|
||||
# APP_URL=http://localhost:3000
|
||||
|
||||
# SMTP_HOST=smtp.host.com
|
||||
# SMTP_PORT=465
|
||||
# SMTP_USER=smtp_user
|
||||
# SMTP_PASSWORD=smtp_password
|
||||
# SMTP_SECURE=true
|
||||
# ALLOW_UNAUTHORIZED_CERTS=false
|
||||
# SENDER_EMAIL=team@example.com
|
||||
|
||||
JWT_AUTH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD'
|
||||
JWT_REFRESH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD'
|
||||
JWT_ISSUER='ISSUER'
|
||||
JWT_AUDIENCE='AUDIENCE'
|
||||
JWT_TOKEN_EXPIRY_IN_MINUTES=360
|
||||
JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200
|
||||
# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart)
|
||||
# EXPRESS_SESSION_SECRET=flowise
|
||||
# SECURE_COOKIES=
|
||||
|
||||
# INVITE_TOKEN_EXPIRY_IN_HOURS=24
|
||||
# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15
|
||||
# PASSWORD_SALT_HASH_ROUNDS=10
|
||||
# TOKEN_HASH_SECRET='popcorn'
|
||||
|
||||
# WORKSPACE_INVITE_TEMPLATE_PATH=/path/to/custom/workspace_invite.hbs
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################# ENTERPRISE ###################################################
|
||||
############################################################################################################
|
||||
|
||||
# LICENSE_URL=
|
||||
# FLOWISE_EE_LICENSE_KEY=
|
||||
# OFFLINE=
|
||||
|
||||
|
||||
############################################################################################################
|
||||
########################################### METRICS COLLECTION #############################################
|
||||
############################################################################################################
|
||||
|
||||
# POSTHOG_PUBLIC_API_KEY=your_posthog_public_api_key
|
||||
|
||||
# ENABLE_METRICS=false
|
||||
# METRICS_PROVIDER=prometheus # prometheus | open_telemetry
|
||||
# METRICS_INCLUDE_NODE_METRICS=true # default is true
|
||||
# METRICS_SERVICE_NAME=FlowiseAI
|
||||
|
||||
# ONLY NEEDED if METRICS_PROVIDER=open_telemetry
|
||||
# METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=http://localhost:4318/v1/metrics
|
||||
# METRICS_OPEN_TELEMETRY_PROTOCOL=http # http | grpc | proto (default is http)
|
||||
# METRICS_OPEN_TELEMETRY_DEBUG=true # default is false
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################### PROXY ######################################################
|
||||
############################################################################################################
|
||||
|
||||
# Uncomment the following lines to enable global agent proxy, see https://www.npmjs.com/package/global-agent for more details
|
||||
# GLOBAL_AGENT_HTTP_PROXY=CorporateHttpProxyUrl
|
||||
# GLOBAL_AGENT_HTTPS_PROXY=CorporateHttpsProxyUrl
|
||||
# GLOBAL_AGENT_NO_PROXY=ExceptionHostsToBypassProxyIfNeeded
|
||||
|
||||
|
||||
############################################################################################################
|
||||
########################################### QUEUE CONFIGURATION ############################################
|
||||
############################################################################################################
|
||||
|
||||
# MODE=queue #(queue | main)
|
||||
# QUEUE_NAME=flowise-queue
|
||||
# QUEUE_REDIS_EVENT_STREAM_MAX_LEN=100000
|
||||
# WORKER_CONCURRENCY=100000
|
||||
# REMOVE_ON_AGE=86400
|
||||
# REMOVE_ON_COUNT=10000
|
||||
# REDIS_URL=
|
||||
# REDIS_HOST=localhost
|
||||
# REDIS_PORT=6379
|
||||
# REDIS_USERNAME=
|
||||
# REDIS_PASSWORD=
|
||||
# REDIS_TLS=
|
||||
# REDIS_CERT=
|
||||
# REDIS_KEY=
|
||||
# REDIS_CA=
|
||||
# REDIS_KEEP_ALIVE=
|
||||
# ENABLE_BULLMQ_DASHBOARD=
|
||||
|
||||
|
||||
############################################################################################################
|
||||
############################################## SECURITY ####################################################
|
||||
############################################################################################################
|
||||
|
||||
# HTTP_DENY_LIST=
|
||||
# CUSTOM_MCP_SECURITY_CHECK=true
|
||||
# CUSTOM_MCP_PROTOCOL=sse #(stdio | sse)
|
||||
# TRUST_PROXY=true #(true | false | 1 | loopback| linklocal | uniquelocal | IP addresses | loopback, IP addresses)
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
FROM node:20-alpine
|
||||
|
||||
RUN apk add --update libc6-compat python3 make g++
|
||||
# needed for pdfjs-dist
|
||||
RUN apk add --no-cache build-base cairo-dev pango-dev
|
||||
|
||||
# Install Chromium and curl for container-level health checks
|
||||
RUN apk add --no-cache chromium curl
|
||||
|
||||
#install PNPM globally
|
||||
RUN npm install -g pnpm
|
||||
|
||||
ENV PUPPETEER_SKIP_DOWNLOAD=true
|
||||
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser
|
||||
|
||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Copy app source
|
||||
COPY . .
|
||||
|
||||
RUN pnpm install
|
||||
|
||||
RUN pnpm build
|
||||
|
||||
# --- Healthcheck Setup ---
|
||||
|
||||
WORKDIR /app/healthcheck
|
||||
|
||||
COPY docker/worker/healthcheck/package.json .
|
||||
|
||||
RUN npm install --omit=dev
|
||||
|
||||
COPY docker/worker/healthcheck/healthcheck.js .
|
||||
|
||||
# --- End Healthcheck Setup ---
|
||||
|
||||
# Set the main working directory back
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Environment variables for port configuration
|
||||
ENV WORKER_PORT=5566
|
||||
|
||||
# Expose port (can be overridden by env var)
|
||||
EXPOSE ${WORKER_PORT}
|
||||
|
||||
# Start healthcheck in background and flowise worker in foreground
|
||||
CMD ["/bin/sh", "-c", "node /app/healthcheck/healthcheck.js & sleep 5 && pnpm run start-worker"]
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
# Flowise Worker
|
||||
|
||||
By utilizing worker instances when operating in queue mode, Flowise can be scaled horizontally by adding more workers to handle increased workloads or scaled down by removing workers when demand decreases.
|
||||
|
||||
Here’s an overview of the process:
|
||||
|
||||
1. The primary Flowise instance sends an execution ID to a message broker, Redis, which maintains a queue of pending executions, allowing the next available worker to process them.
|
||||
2. A worker from the pool retrieves a message from Redis.
|
||||
The worker starts execute the actual job.
|
||||
3. Once the execution is completed, the worker alerts the main instance that the execution is finished.
|
||||
|
||||
# How to use
|
||||
|
||||
## Setting up Main Server:
|
||||
|
||||
1. Follow [setup guide](https://github.com/FlowiseAI/Flowise/blob/main/docker/README.md)
|
||||
2. In the `.env.example`, setup all the necessary env variables for `QUEUE CONFIGURATION`
|
||||
|
||||
## Setting up Worker:
|
||||
|
||||
1. Navigate to `docker/worker` folder
|
||||
2. In the `.env.example`, setup all the necessary env variables for `QUEUE CONFIGURATION`. Env variables for worker must match the one for main server. Change the `WORKER_PORT` to other available port numbers to listen for healthcheck. Ex: 5566
|
||||
3. `docker compose up -d`
|
||||
4. You can bring the worker container down by `docker compose stop`
|
||||
|
||||
## Entrypoint:
|
||||
|
||||
Different from main server image which is using `flowise start`, entrypoint for worker is `pnpm run start-worker`. This is because the worker's [Dockerfile](./Dockerfile) build the image from source files via `pnpm build` instead of npm registry via `RUN npm install -g flowise`.
|
||||
|
|
@ -0,0 +1,146 @@
|
|||
version: '3.1'
|
||||
|
||||
services:
|
||||
flowise:
|
||||
image: flowiseai/flowise-worker:latest
|
||||
restart: always
|
||||
environment:
|
||||
- WORKER_PORT=${WORKER_PORT:-5566}
|
||||
|
||||
# DATABASE
|
||||
- DATABASE_PATH=${DATABASE_PATH}
|
||||
- DATABASE_TYPE=${DATABASE_TYPE}
|
||||
- DATABASE_PORT=${DATABASE_PORT}
|
||||
- DATABASE_HOST=${DATABASE_HOST}
|
||||
- DATABASE_NAME=${DATABASE_NAME}
|
||||
- DATABASE_USER=${DATABASE_USER}
|
||||
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
|
||||
- DATABASE_SSL=${DATABASE_SSL}
|
||||
- DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64}
|
||||
|
||||
# SECRET KEYS
|
||||
- SECRETKEY_STORAGE_TYPE=${SECRETKEY_STORAGE_TYPE}
|
||||
- SECRETKEY_PATH=${SECRETKEY_PATH}
|
||||
- FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE}
|
||||
- SECRETKEY_AWS_ACCESS_KEY=${SECRETKEY_AWS_ACCESS_KEY}
|
||||
- SECRETKEY_AWS_SECRET_KEY=${SECRETKEY_AWS_SECRET_KEY}
|
||||
- SECRETKEY_AWS_REGION=${SECRETKEY_AWS_REGION}
|
||||
- SECRETKEY_AWS_NAME=${SECRETKEY_AWS_NAME}
|
||||
|
||||
# LOGGING
|
||||
- DEBUG=${DEBUG}
|
||||
- LOG_PATH=${LOG_PATH}
|
||||
- LOG_LEVEL=${LOG_LEVEL}
|
||||
- LOG_SANITIZE_BODY_FIELDS=${LOG_SANITIZE_BODY_FIELDS}
|
||||
- LOG_SANITIZE_HEADER_FIELDS=${LOG_SANITIZE_HEADER_FIELDS}
|
||||
|
||||
# CUSTOM TOOL/FUNCTION DEPENDENCIES
|
||||
- TOOL_FUNCTION_BUILTIN_DEP=${TOOL_FUNCTION_BUILTIN_DEP}
|
||||
- TOOL_FUNCTION_EXTERNAL_DEP=${TOOL_FUNCTION_EXTERNAL_DEP}
|
||||
- ALLOW_BUILTIN_DEP=${ALLOW_BUILTIN_DEP}
|
||||
|
||||
# STORAGE
|
||||
- STORAGE_TYPE=${STORAGE_TYPE}
|
||||
- BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH}
|
||||
- S3_STORAGE_BUCKET_NAME=${S3_STORAGE_BUCKET_NAME}
|
||||
- S3_STORAGE_ACCESS_KEY_ID=${S3_STORAGE_ACCESS_KEY_ID}
|
||||
- S3_STORAGE_SECRET_ACCESS_KEY=${S3_STORAGE_SECRET_ACCESS_KEY}
|
||||
- S3_STORAGE_REGION=${S3_STORAGE_REGION}
|
||||
- S3_ENDPOINT_URL=${S3_ENDPOINT_URL}
|
||||
- S3_FORCE_PATH_STYLE=${S3_FORCE_PATH_STYLE}
|
||||
- GOOGLE_CLOUD_STORAGE_CREDENTIAL=${GOOGLE_CLOUD_STORAGE_CREDENTIAL}
|
||||
- GOOGLE_CLOUD_STORAGE_PROJ_ID=${GOOGLE_CLOUD_STORAGE_PROJ_ID}
|
||||
- GOOGLE_CLOUD_STORAGE_BUCKET_NAME=${GOOGLE_CLOUD_STORAGE_BUCKET_NAME}
|
||||
- GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=${GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS}
|
||||
|
||||
# SETTINGS
|
||||
- NUMBER_OF_PROXIES=${NUMBER_OF_PROXIES}
|
||||
- CORS_ORIGINS=${CORS_ORIGINS}
|
||||
- IFRAME_ORIGINS=${IFRAME_ORIGINS}
|
||||
- FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT}
|
||||
- SHOW_COMMUNITY_NODES=${SHOW_COMMUNITY_NODES}
|
||||
- DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY}
|
||||
- DISABLED_NODES=${DISABLED_NODES}
|
||||
- MODEL_LIST_CONFIG_JSON=${MODEL_LIST_CONFIG_JSON}
|
||||
|
||||
# AUTH PARAMETERS
|
||||
- APP_URL=${APP_URL}
|
||||
- JWT_AUTH_TOKEN_SECRET=${JWT_AUTH_TOKEN_SECRET}
|
||||
- JWT_REFRESH_TOKEN_SECRET=${JWT_REFRESH_TOKEN_SECRET}
|
||||
- JWT_ISSUER=${JWT_ISSUER}
|
||||
- JWT_AUDIENCE=${JWT_AUDIENCE}
|
||||
- JWT_TOKEN_EXPIRY_IN_MINUTES=${JWT_TOKEN_EXPIRY_IN_MINUTES}
|
||||
- JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=${JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES}
|
||||
- EXPIRE_AUTH_TOKENS_ON_RESTART=${EXPIRE_AUTH_TOKENS_ON_RESTART}
|
||||
- EXPRESS_SESSION_SECRET=${EXPRESS_SESSION_SECRET}
|
||||
- PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=${PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS}
|
||||
- PASSWORD_SALT_HASH_ROUNDS=${PASSWORD_SALT_HASH_ROUNDS}
|
||||
- TOKEN_HASH_SECRET=${TOKEN_HASH_SECRET}
|
||||
- SECURE_COOKIES=${SECURE_COOKIES}
|
||||
|
||||
# EMAIL
|
||||
- SMTP_HOST=${SMTP_HOST}
|
||||
- SMTP_PORT=${SMTP_PORT}
|
||||
- SMTP_USER=${SMTP_USER}
|
||||
- SMTP_PASSWORD=${SMTP_PASSWORD}
|
||||
- SMTP_SECURE=${SMTP_SECURE}
|
||||
- ALLOW_UNAUTHORIZED_CERTS=${ALLOW_UNAUTHORIZED_CERTS}
|
||||
- SENDER_EMAIL=${SENDER_EMAIL}
|
||||
|
||||
# ENTERPRISE
|
||||
- LICENSE_URL=${LICENSE_URL}
|
||||
- FLOWISE_EE_LICENSE_KEY=${FLOWISE_EE_LICENSE_KEY}
|
||||
- OFFLINE=${OFFLINE}
|
||||
- INVITE_TOKEN_EXPIRY_IN_HOURS=${INVITE_TOKEN_EXPIRY_IN_HOURS}
|
||||
- WORKSPACE_INVITE_TEMPLATE_PATH=${WORKSPACE_INVITE_TEMPLATE_PATH}
|
||||
|
||||
# METRICS COLLECTION
|
||||
- POSTHOG_PUBLIC_API_KEY=${POSTHOG_PUBLIC_API_KEY}
|
||||
- ENABLE_METRICS=${ENABLE_METRICS}
|
||||
- METRICS_PROVIDER=${METRICS_PROVIDER}
|
||||
- METRICS_INCLUDE_NODE_METRICS=${METRICS_INCLUDE_NODE_METRICS}
|
||||
- METRICS_SERVICE_NAME=${METRICS_SERVICE_NAME}
|
||||
- METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=${METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT}
|
||||
- METRICS_OPEN_TELEMETRY_PROTOCOL=${METRICS_OPEN_TELEMETRY_PROTOCOL}
|
||||
- METRICS_OPEN_TELEMETRY_DEBUG=${METRICS_OPEN_TELEMETRY_DEBUG}
|
||||
|
||||
# PROXY
|
||||
- GLOBAL_AGENT_HTTP_PROXY=${GLOBAL_AGENT_HTTP_PROXY}
|
||||
- GLOBAL_AGENT_HTTPS_PROXY=${GLOBAL_AGENT_HTTPS_PROXY}
|
||||
- GLOBAL_AGENT_NO_PROXY=${GLOBAL_AGENT_NO_PROXY}
|
||||
|
||||
# QUEUE CONFIGURATION
|
||||
- MODE=${MODE}
|
||||
- QUEUE_NAME=${QUEUE_NAME}
|
||||
- QUEUE_REDIS_EVENT_STREAM_MAX_LEN=${QUEUE_REDIS_EVENT_STREAM_MAX_LEN}
|
||||
- WORKER_CONCURRENCY=${WORKER_CONCURRENCY}
|
||||
- REMOVE_ON_AGE=${REMOVE_ON_AGE}
|
||||
- REMOVE_ON_COUNT=${REMOVE_ON_COUNT}
|
||||
- REDIS_URL=${REDIS_URL}
|
||||
- REDIS_HOST=${REDIS_HOST}
|
||||
- REDIS_PORT=${REDIS_PORT}
|
||||
- REDIS_USERNAME=${REDIS_USERNAME}
|
||||
- REDIS_PASSWORD=${REDIS_PASSWORD}
|
||||
- REDIS_TLS=${REDIS_TLS}
|
||||
- REDIS_CERT=${REDIS_CERT}
|
||||
- REDIS_KEY=${REDIS_KEY}
|
||||
- REDIS_CA=${REDIS_CA}
|
||||
- REDIS_KEEP_ALIVE=${REDIS_KEEP_ALIVE}
|
||||
- ENABLE_BULLMQ_DASHBOARD=${ENABLE_BULLMQ_DASHBOARD}
|
||||
|
||||
# SECURITY
|
||||
- CUSTOM_MCP_SECURITY_CHECK=${CUSTOM_MCP_SECURITY_CHECK}
|
||||
- CUSTOM_MCP_PROTOCOL=${CUSTOM_MCP_PROTOCOL}
|
||||
- HTTP_DENY_LIST=${HTTP_DENY_LIST}
|
||||
- TRUST_PROXY=${TRUST_PROXY}
|
||||
ports:
|
||||
- '${WORKER_PORT}:${WORKER_PORT}'
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:${WORKER_PORT}/healthz']
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
start_period: 30s
|
||||
volumes:
|
||||
- ~/.flowise:/root/.flowise
|
||||
entrypoint: /bin/sh -c "node /app/healthcheck/healthcheck.js & sleep 5 && pnpm run start-worker"
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
const express = require('express')
|
||||
const app = express()
|
||||
|
||||
const port = process.env.WORKER_PORT || 5566
|
||||
|
||||
app.get('/healthz', (req, res) => {
|
||||
res.status(200).send('OK')
|
||||
})
|
||||
|
||||
app.listen(port, () => {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Healthcheck server listening on port ${port}`)
|
||||
})
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"name": "flowise-worker-healthcheck",
|
||||
"version": "1.0.0",
|
||||
"description": "Simple healthcheck server for Flowise worker",
|
||||
"main": "healthcheck.js",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"start": "node healthcheck.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"express": "^4.19.2"
|
||||
}
|
||||
}
|
||||
|
|
@ -112,46 +112,41 @@ Flowise 在一个单一的单体存储库中有 3 个不同的模块。
|
|||
pnpm start
|
||||
```
|
||||
|
||||
11. 提交代码并从指向 [Flowise 主分支](https://github.com/FlowiseAI/Flowise/tree/master) 的分叉分支上提交 Pull Request。
|
||||
11. 提交代码并从指向 [Flowise 主分支](https://github.com/FlowiseAI/Flowise/tree/main) 的分叉分支上提交 Pull Request。
|
||||
|
||||
## 🌱 环境变量
|
||||
|
||||
Flowise 支持不同的环境变量来配置您的实例。您可以在 `packages/server` 文件夹中的 `.env` 文件中指定以下变量。阅读[更多信息](https://docs.flowiseai.com/environment-variables)
|
||||
|
||||
| 变量名 | 描述 | 类型 | 默认值 |
|
||||
| ---------------------------- | -------------------------------------------------------------------- | ----------------------------------------------- | ----------------------------------- |
|
||||
| PORT | Flowise 运行的 HTTP 端口 | 数字 | 3000 |
|
||||
| FLOWISE_USERNAME | 登录用户名 | 字符串 | |
|
||||
| FLOWISE_PASSWORD | 登录密码 | 字符串 | |
|
||||
| FLOWISE_FILE_SIZE_LIMIT | 上传文件大小限制 | 字符串 | 50mb |
|
||||
| DISABLE_CHATFLOW_REUSE | 强制为每次调用创建一个新的 ChatFlow,而不是重用缓存中的现有 ChatFlow | 布尔值 | |
|
||||
| DEBUG | 打印组件的日志 | 布尔值 | |
|
||||
| LOG_PATH | 存储日志文件的位置 | 字符串 | `your-path/Flowise/logs` |
|
||||
| LOG_LEVEL | 日志的不同级别 | 枚举字符串: `error`, `info`, `verbose`, `debug` | `info` |
|
||||
| APIKEY_STORAGE_TYPE | 存储 API 密钥的存储类型 | 枚举字符串: `json`, `db` | `json` |
|
||||
| APIKEY_PATH | 存储 API 密钥的位置, 当`APIKEY_STORAGE_TYPE`是`json` | 字符串 | `your-path/Flowise/packages/server` |
|
||||
| TOOL_FUNCTION_BUILTIN_DEP | 用于工具函数的 NodeJS 内置模块 | 字符串 | |
|
||||
| TOOL_FUNCTION_EXTERNAL_DEP | 用于工具函数的外部模块 | 字符串 | |
|
||||
| DATABASE_TYPE | 存储 flowise 数据的数据库类型 | 枚举字符串: `sqlite`, `mysql`, `postgres` | `sqlite` |
|
||||
| DATABASE_PATH | 数据库保存的位置(当 DATABASE_TYPE 是 sqlite 时) | 字符串 | `your-home-dir/.flowise` |
|
||||
| DATABASE_HOST | 主机 URL 或 IP 地址(当 DATABASE_TYPE 不是 sqlite 时) | 字符串 | |
|
||||
| DATABASE_PORT | 数据库端口(当 DATABASE_TYPE 不是 sqlite 时) | 字符串 | |
|
||||
| DATABASE_USERNAME | 数据库用户名(当 DATABASE_TYPE 不是 sqlite 时) | 字符串 | |
|
||||
| DATABASE_PASSWORD | 数据库密码(当 DATABASE_TYPE 不是 sqlite 时) | 字符串 | |
|
||||
| DATABASE_NAME | 数据库名称(当 DATABASE_TYPE 不是 sqlite 时) | 字符串 | |
|
||||
| SECRETKEY_PATH | 保存加密密钥(用于加密/解密凭据)的位置 | 字符串 | `your-path/Flowise/packages/server` |
|
||||
| FLOWISE_SECRETKEY_OVERWRITE | 加密密钥用于替代存储在 SECRETKEY_PATH 中的密钥 | 字符串 |
|
||||
| DISABLE_FLOWISE_TELEMETRY | 关闭遥测 | 字符串 |
|
||||
| MODEL_LIST_CONFIG_JSON | 加载模型的位置 | 字符 | `/your_model_list_config_file_path` |
|
||||
| STORAGE_TYPE | 上传文件的存储类型 | 枚举字符串: `local`, `s3` | `local` |
|
||||
| BLOB_STORAGE_PATH | 上传文件存储的本地文件夹路径, 当`STORAGE_TYPE`是`local` | 字符串 | `your-home-dir/.flowise/storage` |
|
||||
| S3_STORAGE_BUCKET_NAME | S3 存储文件夹路径, 当`STORAGE_TYPE`是`s3` | 字符串 | |
|
||||
| S3_STORAGE_ACCESS_KEY_ID | AWS 访问密钥 (Access Key) | 字符串 | |
|
||||
| S3_STORAGE_SECRET_ACCESS_KEY | AWS 密钥 (Secret Key) | 字符串 | |
|
||||
| S3_STORAGE_REGION | S3 存储地区 | 字符串 | |
|
||||
| S3_ENDPOINT_URL | S3 端点 URL | 字符串 | |
|
||||
| S3_FORCE_PATH_STYLE | 将其设置为 true 以强制请求使用路径样式寻址 | Boolean | false |
|
||||
| SHOW_COMMUNITY_NODES | 显示由社区创建的节点 | 布尔值 | |
|
||||
| 变量名 | 描述 | 类型 | 默认值 |
|
||||
|-----------------------------|---------------------------------------------------------|-------------------------------------------------|-------------------------------------|
|
||||
| `PORT` | Flowise 运行的 HTTP 端口 | 数字 | 3000 |
|
||||
| `FLOWISE_FILE_SIZE_LIMIT` | 上传文件大小限制 | 字符串 | 50mb |
|
||||
| `DEBUG` | 打印组件的日志 | 布尔值 | |
|
||||
| `LOG_PATH` | 存储日志文件的位置 | 字符串 | `your-path/Flowise/logs` |
|
||||
| `LOG_LEVEL` | 日志的不同级别 | 枚举字符串: `error`, `info`, `verbose`, `debug` | `info` |
|
||||
| `TOOL_FUNCTION_BUILTIN_DEP` | 用于工具函数的 NodeJS 内置模块 | 字符串 | |
|
||||
| `TOOL_FUNCTION_EXTERNAL_DEP`| 用于工具函数的外部模块 | 字符串 | |
|
||||
| `DATABASE_TYPE` | 存储 Flowise 数据的数据库类型 | 枚举字符串: `sqlite`, `mysql`, `postgres` | `sqlite` |
|
||||
| `DATABASE_PATH` | 数据库保存的位置(当 `DATABASE_TYPE` 是 sqlite 时) | 字符串 | `your-home-dir/.flowise` |
|
||||
| `DATABASE_HOST` | 主机 URL 或 IP 地址(当 `DATABASE_TYPE` 不是 sqlite 时)| 字符串 | |
|
||||
| `DATABASE_PORT` | 数据库端口(当 `DATABASE_TYPE` 不是 sqlite 时) | 字符串 | |
|
||||
| `DATABASE_USERNAME` | 数据库用户名(当 `DATABASE_TYPE` 不是 sqlite 时) | 字符串 | |
|
||||
| `DATABASE_PASSWORD` | 数据库密码(当 `DATABASE_TYPE` 不是 sqlite 时) | 字符串 | |
|
||||
| `DATABASE_NAME` | 数据库名称(当 `DATABASE_TYPE` 不是 sqlite 时) | 字符串 | |
|
||||
| `SECRETKEY_PATH` | 保存加密密钥(用于加密/解密凭据)的位置 | 字符串 | `your-path/Flowise/packages/server` |
|
||||
| `FLOWISE_SECRETKEY_OVERWRITE`| 加密密钥用于替代存储在 `SECRETKEY_PATH` 中的密钥 | 字符串 | |
|
||||
| `MODEL_LIST_CONFIG_JSON` | 加载模型的位置 | 字符串 | `/your_model_list_config_file_path` |
|
||||
| `STORAGE_TYPE` | 上传文件的存储类型 | 枚举字符串: `local`, `s3` | `local` |
|
||||
| `BLOB_STORAGE_PATH` | 本地上传文件存储路径(当 `STORAGE_TYPE` 为 `local`) | 字符串 | `your-home-dir/.flowise/storage` |
|
||||
| `S3_STORAGE_BUCKET_NAME` | S3 存储文件夹路径(当 `STORAGE_TYPE` 为 `s3`) | 字符串 | |
|
||||
| `S3_STORAGE_ACCESS_KEY_ID` | AWS 访问密钥 (Access Key) | 字符串 | |
|
||||
| `S3_STORAGE_SECRET_ACCESS_KEY` | AWS 密钥 (Secret Key) | 字符串 | |
|
||||
| `S3_STORAGE_REGION` | S3 存储地区 | 字符串 | |
|
||||
| `S3_ENDPOINT_URL` | S3 端点 URL | 字符串 | |
|
||||
| `S3_FORCE_PATH_STYLE` | 设置为 true 以强制请求使用路径样式寻址 | 布尔值 | false |
|
||||
| `SHOW_COMMUNITY_NODES` | 显示由社区创建的节点 | 布尔值 | |
|
||||
| `DISABLED_NODES` | 从界面中隐藏节点(以逗号分隔的节点名称列表) | 字符串 | |
|
||||
|
||||
您也可以在使用 `npx` 时指定环境变量。例如:
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
<!-- markdownlint-disable MD030 -->
|
||||
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.png?raw=true"></a>
|
||||
|
||||
# Flowise - LLM アプリを簡単に構築
|
||||
<p align="center">
|
||||
<img src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_white.svg#gh-light-mode-only">
|
||||
<img src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_dark.svg#gh-dark-mode-only">
|
||||
</p>
|
||||
|
||||
[](https://github.com/FlowiseAI/Flowise/releases)
|
||||
[](https://discord.gg/jbaHfsRVBW)
|
||||
|
|
@ -10,11 +11,11 @@
|
|||
[](https://star-history.com/#FlowiseAI/Flowise)
|
||||
[](https://github.com/FlowiseAI/Flowise/fork)
|
||||
|
||||
[English](../README.md) | [中文](./README-ZH.md) | 日本語 | [한국어](./README-KR.md)
|
||||
[English](../README.md) | [繁體中文](./README-TW.md) | [简体中文](./README-ZH.md) | 日本語 | [한국어](./README-KR.md)
|
||||
|
||||
<h3>ドラッグ&ドロップでカスタマイズした LLM フローを構築できる UI</h3>
|
||||
<h3>AIエージェントをビジュアルに構築</h3>
|
||||
<a href="https://github.com/FlowiseAI/Flowise">
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.gif?raw=true"></a>
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_agentflow.gif?raw=true"></a>
|
||||
|
||||
## ⚡ クイックスタート
|
||||
|
||||
|
|
@ -30,12 +31,6 @@
|
|||
npx flowise start
|
||||
```
|
||||
|
||||
ユーザー名とパスワードを入力
|
||||
|
||||
```bash
|
||||
npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234
|
||||
```
|
||||
|
||||
3. [http://localhost:3000](http://localhost:3000) を開く
|
||||
|
||||
## 🐳 Docker
|
||||
|
|
@ -126,15 +121,6 @@ Flowise には、3 つの異なるモジュールが 1 つの mono リポジト
|
|||
|
||||
コードの変更は [http://localhost:8080](http://localhost:8080) に自動的にアプリをリロードします
|
||||
|
||||
## 🔒 認証
|
||||
|
||||
アプリレベルの認証を有効にするには、 `FLOWISE_USERNAME` と `FLOWISE_PASSWORD` を `packages/server` の `.env` ファイルに追加します:
|
||||
|
||||
```
|
||||
FLOWISE_USERNAME=user
|
||||
FLOWISE_PASSWORD=1234
|
||||
```
|
||||
|
||||
## 🌱 環境変数
|
||||
|
||||
Flowise は、インスタンスを設定するためのさまざまな環境変数をサポートしています。`packages/server` フォルダ内の `.env` ファイルで以下の変数を指定することができる。[続き](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)を読む
|
||||
|
|
@ -196,9 +182,9 @@ Flowise は、インスタンスを設定するためのさまざまな環境変
|
|||
<img src="https://contrib.rocks/image?repo=FlowiseAI/Flowise" />
|
||||
</a>
|
||||
|
||||
[コントリビューティングガイド](CONTRIBUTING.md)を参照してください。質問や問題があれば、[Discord](https://discord.gg/jbaHfsRVBW) までご連絡ください。
|
||||
[コントリビューティングガイド](../CONTRIBUTING.md)を参照してください。質問や問題があれば、[Discord](https://discord.gg/jbaHfsRVBW) までご連絡ください。
|
||||
[](https://star-history.com/#FlowiseAI/Flowise&Date)
|
||||
|
||||
## 📄 ライセンス
|
||||
|
||||
このリポジトリのソースコードは、[Apache License Version 2.0](LICENSE.md)の下で利用可能です。
|
||||
このリポジトリのソースコードは、[Apache License Version 2.0](../LICENSE.md)の下で利用可能です。
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
<!-- markdownlint-disable MD030 -->
|
||||
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.png?raw=true"></a>
|
||||
|
||||
# Flowise - 간편한 LLM 애플리케이션 제작
|
||||
<p align="center">
|
||||
<img src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_white.svg#gh-light-mode-only">
|
||||
<img src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_dark.svg#gh-dark-mode-only">
|
||||
</p>
|
||||
|
||||
[](https://github.com/FlowiseAI/Flowise/releases)
|
||||
[](https://discord.gg/jbaHfsRVBW)
|
||||
|
|
@ -10,11 +11,11 @@
|
|||
[](https://star-history.com/#FlowiseAI/Flowise)
|
||||
[](https://github.com/FlowiseAI/Flowise/fork)
|
||||
|
||||
[English](../README.md) | [中文](./README-ZH.md) | [日本語](./README-JA.md) | 한국어
|
||||
[English](../README.md) | [繁體中文](./README-TW.md) | [简体中文](./README-ZH.md) | [日本語](./README-JA.md) | 한국어
|
||||
|
||||
<h3>드래그 앤 드롭 UI로 맞춤형 LLM 플로우 구축하기</h3>
|
||||
<h3>AI 에이전트를 시각적으로 구축하세요</h3>
|
||||
<a href="https://github.com/FlowiseAI/Flowise">
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.gif?raw=true"></a>
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_agentflow.gif?raw=true"></a>
|
||||
|
||||
## ⚡빠른 시작 가이드
|
||||
|
||||
|
|
@ -30,12 +31,6 @@
|
|||
npx flowise start
|
||||
```
|
||||
|
||||
사용자 이름과 비밀번호로 시작하기
|
||||
|
||||
```bash
|
||||
npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234
|
||||
```
|
||||
|
||||
3. [http://localhost:3000](http://localhost:3000) URL 열기
|
||||
|
||||
## 🐳 도커(Docker)를 활용하여 시작하기
|
||||
|
|
@ -126,15 +121,6 @@ Flowise는 단일 리포지토리에 3개의 서로 다른 모듈이 있습니
|
|||
|
||||
코드가 변경되면 [http://localhost:8080](http://localhost:8080)에서 자동으로 애플리케이션을 새로고침 합니다.
|
||||
|
||||
## 🔒 인증
|
||||
|
||||
애플리케이션 수준의 인증을 사용하려면 `packages/server`의 `.env` 파일에 `FLOWISE_USERNAME` 및 `FLOWISE_PASSWORD`를 추가합니다:
|
||||
|
||||
```
|
||||
FLOWISE_USERNAME=user
|
||||
FLOWISE_PASSWORD=1234
|
||||
```
|
||||
|
||||
## 🌱 환경 변수
|
||||
|
||||
Flowise는 인스턴스 구성을 위한 다양한 환경 변수를 지원합니다. `packages/server` 폴더 내 `.env` 파일에 다양한 환경 변수를 지정할 수 있습니다. [자세히 보기](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)
|
||||
|
|
@ -196,9 +182,9 @@ Flowise는 인스턴스 구성을 위한 다양한 환경 변수를 지원합니
|
|||
<img src="https://contrib.rocks/image?repo=FlowiseAI/Flowise" />
|
||||
</a>
|
||||
|
||||
[contributing guide](CONTRIBUTING.md)를 살펴보세요. 디스코드 [Discord](https://discord.gg/jbaHfsRVBW) 채널에서도 이슈나 질의응답을 진행하실 수 있습니다.
|
||||
[contributing guide](../CONTRIBUTING.md)를 살펴보세요. 디스코드 [Discord](https://discord.gg/jbaHfsRVBW) 채널에서도 이슈나 질의응답을 진행하실 수 있습니다.
|
||||
[](https://star-history.com/#FlowiseAI/Flowise&Date)
|
||||
|
||||
## 📄 라이센스
|
||||
|
||||
본 리포지토리의 소스코드는 [Apache License Version 2.0](LICENSE.md) 라이센스가 적용됩니다.
|
||||
본 리포지토리의 소스코드는 [Apache License Version 2.0](../LICENSE.md) 라이센스가 적용됩니다.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,202 @@
|
|||
<!-- markdownlint-disable MD030 -->
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_white.svg#gh-light-mode-only">
|
||||
<img src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_dark.svg#gh-dark-mode-only">
|
||||
</p>
|
||||
|
||||
[](https://github.com/FlowiseAI/Flowise/releases)
|
||||
[](https://discord.gg/jbaHfsRVBW)
|
||||
[](https://twitter.com/FlowiseAI)
|
||||
[](https://star-history.com/#FlowiseAI/Flowise)
|
||||
[](https://github.com/FlowiseAI/Flowise/fork)
|
||||
|
||||
[English](../README.md) | 繁體中文 | [简体中文](./README-ZH.md) | [日本語](./README-JA.md) | [한국어](./README-KR.md)
|
||||
|
||||
<h3>可視化建置 AI/LLM 流程</h3>
|
||||
<a href="https://github.com/FlowiseAI/Flowise">
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_agentflow.gif?raw=true"></a>
|
||||
|
||||
## ⚡ 快速開始
|
||||
|
||||
下載並安裝 [NodeJS](https://nodejs.org/en/download) >= 18.15.0
|
||||
|
||||
1. 安裝 Flowise
|
||||
```bash
|
||||
npm install -g flowise
|
||||
```
|
||||
2. 啟動 Flowise
|
||||
|
||||
```bash
|
||||
npx flowise start
|
||||
```
|
||||
|
||||
3. 打開 [http://localhost:3000](http://localhost:3000)
|
||||
|
||||
## 🐳 Docker
|
||||
|
||||
### Docker Compose
|
||||
|
||||
1. 複製 Flowise 專案
|
||||
2. 進入專案根目錄的 `docker` 資料夾
|
||||
3. 複製 `.env.example` 文件,貼到相同位置,並重新命名為 `.env` 文件
|
||||
4. `docker compose up -d`
|
||||
5. 打開 [http://localhost:3000](http://localhost:3000)
|
||||
6. 您可以透過 `docker compose stop` 停止容器
|
||||
|
||||
### Docker 映像
|
||||
|
||||
1. 本地建置映像:
|
||||
```bash
|
||||
docker build --no-cache -t flowise .
|
||||
```
|
||||
2. 運行映像:
|
||||
|
||||
```bash
|
||||
docker run -d --name flowise -p 3000:3000 flowise
|
||||
```
|
||||
|
||||
3. 停止映像:
|
||||
```bash
|
||||
docker stop flowise
|
||||
```
|
||||
|
||||
## 👨💻 開發者
|
||||
|
||||
Flowise 在單個 mono 儲存庫中有 3 個不同的模組。
|
||||
|
||||
- `server`: 提供 API 邏輯的 Node 後端
|
||||
- `ui`: React 前端
|
||||
- `components`: 第三方節點集成
|
||||
- `api-documentation`: 從 express 自動生成的 swagger-ui API 文檔
|
||||
|
||||
### 先決條件
|
||||
|
||||
- 安裝 [PNPM](https://pnpm.io/installation)
|
||||
```bash
|
||||
npm i -g pnpm
|
||||
```
|
||||
|
||||
### 設置
|
||||
|
||||
1. 複製儲存庫
|
||||
|
||||
```bash
|
||||
git clone https://github.com/FlowiseAI/Flowise.git
|
||||
```
|
||||
|
||||
2. 進入儲存庫文件夾
|
||||
|
||||
```bash
|
||||
cd Flowise
|
||||
```
|
||||
|
||||
3. 安裝所有模組的所有依賴項:
|
||||
|
||||
```bash
|
||||
pnpm install
|
||||
```
|
||||
|
||||
4. 建置所有程式碼:
|
||||
|
||||
```bash
|
||||
pnpm build
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>Exit code 134(JavaScript heap out of memory)</summary>
|
||||
如果在運行上述 `build` 腳本時遇到此錯誤,請嘗試增加 Node.js 中的 Heap 記憶體大小並重新運行腳本:
|
||||
|
||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||
pnpm build
|
||||
|
||||
</details>
|
||||
|
||||
5. 啟動應用:
|
||||
|
||||
```bash
|
||||
pnpm start
|
||||
```
|
||||
|
||||
您現在可以開啟 [http://localhost:3000](http://localhost:3000)
|
||||
|
||||
6. 對於開發建置:
|
||||
|
||||
- 在 `packages/ui` 中創建 `.env` 文件並指定 `VITE_PORT`(參考 `.env.example`)
|
||||
- 在 `packages/server` 中創建 `.env` 文件並指定 `PORT`(參考 `.env.example`)
|
||||
- 運行
|
||||
|
||||
```bash
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
任何程式碼更改都會自動重新加載應用程式 [http://localhost:8080](http://localhost:8080)
|
||||
|
||||
## 🌱 環境變數
|
||||
|
||||
Flowise 支持不同的環境變數來配置您的實例。您可以在 `packages/server` 文件夾中的 `.env` 文件中指定以下變數。閱讀 [更多](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)
|
||||
|
||||
## 📖 文檔
|
||||
|
||||
[Flowise 文檔](https://docs.flowiseai.com/)
|
||||
|
||||
## 🌐 自行架設
|
||||
|
||||
在您現有的基礎設施中部署 Flowise,我們支持各種自行架設選項 [部署](https://docs.flowiseai.com/configuration/deployment)
|
||||
|
||||
- [AWS](https://docs.flowiseai.com/configuration/deployment/aws)
|
||||
- [Azure](https://docs.flowiseai.com/configuration/deployment/azure)
|
||||
- [Digital Ocean](https://docs.flowiseai.com/configuration/deployment/digital-ocean)
|
||||
- [GCP](https://docs.flowiseai.com/configuration/deployment/gcp)
|
||||
- [阿里雲](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Flowise社区版)
|
||||
- <details>
|
||||
<summary>其他</summary>
|
||||
|
||||
- [Railway](https://docs.flowiseai.com/configuration/deployment/railway)
|
||||
|
||||
[](https://railway.app/template/pn4G8S?referralCode=WVNPD9)
|
||||
|
||||
- [Render](https://docs.flowiseai.com/configuration/deployment/render)
|
||||
|
||||
[](https://docs.flowiseai.com/configuration/deployment/render)
|
||||
|
||||
- [HuggingFace Spaces](https://docs.flowiseai.com/deployment/hugging-face)
|
||||
|
||||
<a href="https://huggingface.co/spaces/FlowiseAI/Flowise"><img src="https://huggingface.co/datasets/huggingface/badges/raw/main/open-in-hf-spaces-sm.svg" alt="HuggingFace Spaces"></a>
|
||||
|
||||
- [Elestio](https://elest.io/open-source/flowiseai)
|
||||
|
||||
[](https://elest.io/open-source/flowiseai)
|
||||
|
||||
- [Sealos](https://cloud.sealos.io/?openapp=system-template%3FtemplateName%3Dflowise)
|
||||
|
||||
[](https://cloud.sealos.io/?openapp=system-template%3FtemplateName%3Dflowise)
|
||||
|
||||
- [RepoCloud](https://repocloud.io/details/?app_id=29)
|
||||
|
||||
[](https://repocloud.io/details/?app_id=29)
|
||||
|
||||
</details>
|
||||
|
||||
## ☁️ Flowise 雲端平台
|
||||
|
||||
[開始使用 Flowise 雲端平台](https://flowiseai.com/)
|
||||
|
||||
## 🙋 支持
|
||||
|
||||
隨時在 [討論](https://github.com/FlowiseAI/Flowise/discussions) 中提出任何問題、提出問題和請求新功能
|
||||
|
||||
## 🙌 貢獻
|
||||
|
||||
感謝這些出色的貢獻者
|
||||
|
||||
<a href="https://github.com/FlowiseAI/Flowise/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=FlowiseAI/Flowise" />
|
||||
</a>
|
||||
|
||||
請參閱 [貢獻指南](../CONTRIBUTING.md)。如果您有任何問題或問題,請透過 [Discord](https://discord.gg/jbaHfsRVBW) 與我們聯繫。
|
||||
[](https://star-history.com/#FlowiseAI/Flowise&Date)
|
||||
|
||||
## 📄 許可證
|
||||
|
||||
此儲存庫中的原始碼根據 [Apache 2.0 授權條款](../LICENSE.md) 授權使用。
|
||||
|
|
@ -1,8 +1,9 @@
|
|||
<!-- markdownlint-disable MD030 -->
|
||||
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.png?raw=true"></a>
|
||||
|
||||
# Flowise - 轻松构建 LLM 应用程序
|
||||
<p align="center">
|
||||
<img src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_white.svg#gh-light-mode-only">
|
||||
<img src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_dark.svg#gh-dark-mode-only">
|
||||
</p>
|
||||
|
||||
[](https://github.com/FlowiseAI/Flowise/releases)
|
||||
[](https://discord.gg/jbaHfsRVBW)
|
||||
|
|
@ -10,11 +11,11 @@
|
|||
[](https://star-history.com/#FlowiseAI/Flowise)
|
||||
[](https://github.com/FlowiseAI/Flowise/fork)
|
||||
|
||||
[English](../README.md) | 中文 | [日本語](./README-JA.md) | [한국어](./README-KR.md)
|
||||
[English](../README.md) | [繁體中文](./README-TW.md) | 简体中文 | [日本語](./README-JA.md) | [한국어](./README-KR.md)
|
||||
|
||||
<h3>拖放界面构建定制化的LLM流程</h3>
|
||||
<h3>可视化构建 AI/LLM 流程</h3>
|
||||
<a href="https://github.com/FlowiseAI/Flowise">
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise.gif?raw=true"></a>
|
||||
<img width="100%" src="https://github.com/FlowiseAI/Flowise/blob/main/images/flowise_agentflow.gif?raw=true"></a>
|
||||
|
||||
## ⚡ 快速入门
|
||||
|
||||
|
|
@ -30,12 +31,6 @@
|
|||
npx flowise start
|
||||
```
|
||||
|
||||
使用用户名和密码
|
||||
|
||||
```bash
|
||||
npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234
|
||||
```
|
||||
|
||||
3. 打开 [http://localhost:3000](http://localhost:3000)
|
||||
|
||||
## 🐳 Docker
|
||||
|
|
@ -126,15 +121,6 @@ Flowise 在一个单一的代码库中有 3 个不同的模块。
|
|||
|
||||
任何代码更改都会自动重新加载应用程序,访问 [http://localhost:8080](http://localhost:8080)
|
||||
|
||||
## 🔒 认证
|
||||
|
||||
要启用应用程序级身份验证,在 `packages/server` 的 `.env` 文件中添加 `FLOWISE_USERNAME` 和 `FLOWISE_PASSWORD`:
|
||||
|
||||
```
|
||||
FLOWISE_USERNAME=user
|
||||
FLOWISE_PASSWORD=1234
|
||||
```
|
||||
|
||||
## 🌱 环境变量
|
||||
|
||||
Flowise 支持不同的环境变量来配置您的实例。您可以在 `packages/server` 文件夹中的 `.env` 文件中指定以下变量。了解更多信息,请阅读[文档](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)
|
||||
|
|
@ -170,9 +156,9 @@ Flowise 支持不同的环境变量来配置您的实例。您可以在 `package
|
|||
|
||||
[](https://elest.io/open-source/flowiseai)
|
||||
|
||||
- [Sealos](https://cloud.sealos.io/?openapp=system-template%3FtemplateName%3Dflowise)
|
||||
- [Sealos](https://template.sealos.io/deploy?templateName=flowise)
|
||||
|
||||
[](https://cloud.sealos.io/?openapp=system-template%3FtemplateName%3Dflowise)
|
||||
[](https://template.sealos.io/deploy?templateName=flowise)
|
||||
|
||||
- [RepoCloud](https://repocloud.io/details/?app_id=29)
|
||||
|
||||
|
|
@ -196,8 +182,8 @@ Flowise 支持不同的环境变量来配置您的实例。您可以在 `package
|
|||
<img src="https://contrib.rocks/image?repo=FlowiseAI/Flowise" />
|
||||
</a>
|
||||
|
||||
参见[贡献指南](CONTRIBUTING.md)。如果您有任何问题或问题,请在[Discord](https://discord.gg/jbaHfsRVBW)上与我们联系。
|
||||
参见[贡献指南](CONTRIBUTING-ZH.md)。如果您有任何问题或问题,请在[Discord](https://discord.gg/jbaHfsRVBW)上与我们联系。
|
||||
|
||||
## 📄 许可证
|
||||
|
||||
此代码库中的源代码在[Apache License Version 2.0 许可证](LICENSE.md)下提供。
|
||||
此代码库中的源代码在[Apache License Version 2.0 许可证](../LICENSE.md)下提供。
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 14 MiB |
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 40 KiB |
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 40 KiB |
|
|
@ -1,6 +1,7 @@
|
|||
version: "2"
|
||||
services:
|
||||
otel-collector:
|
||||
read_only: true
|
||||
image: otel/opentelemetry-collector-contrib
|
||||
command: ["--config=/etc/otelcol-contrib/config.yaml", "--feature-gates=-exporter.datadogexporter.DisableAPMStats", "${OTELCOL_ARGS}"]
|
||||
volumes:
|
||||
|
|
|
|||
41
package.json
41
package.json
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise",
|
||||
"version": "2.1.5",
|
||||
"version": "3.0.11",
|
||||
"private": true,
|
||||
"homepage": "https://flowiseai.com",
|
||||
"workspaces": [
|
||||
|
|
@ -13,10 +13,17 @@
|
|||
"scripts": {
|
||||
"build": "turbo run build",
|
||||
"build-force": "pnpm clean && turbo run build --force",
|
||||
"dev": "turbo run dev --parallel",
|
||||
"dev": "turbo run dev --parallel --no-cache",
|
||||
"start": "run-script-os",
|
||||
"start:windows": "cd packages/server/bin && run start",
|
||||
"start:default": "cd packages/server/bin && ./run start",
|
||||
"start-worker": "run-script-os",
|
||||
"start-worker:windows": "cd packages/server/bin && run worker",
|
||||
"start-worker:default": "cd packages/server/bin && ./run worker",
|
||||
"user": "run-script-os",
|
||||
"user:windows": "cd packages/server/bin && run user",
|
||||
"user:default": "cd packages/server/bin && ./run user",
|
||||
"test": "turbo run test",
|
||||
"clean": "pnpm --filter \"./packages/**\" clean",
|
||||
"nuke": "pnpm --filter \"./packages/**\" nuke && rimraf node_modules .turbo",
|
||||
"format": "prettier --write \"**/*.{ts,tsx,md}\"",
|
||||
|
|
@ -44,7 +51,7 @@
|
|||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-unused-imports": "^2.0.0",
|
||||
"husky": "^8.0.1",
|
||||
"kill-port": "^2.0.1",
|
||||
"kill-port": "2.0.1",
|
||||
"lint-staged": "^13.0.3",
|
||||
"prettier": "^2.7.1",
|
||||
"pretty-quick": "^3.1.3",
|
||||
|
|
@ -59,7 +66,26 @@
|
|||
"sqlite3"
|
||||
],
|
||||
"overrides": {
|
||||
"set-value": "^3.0.3"
|
||||
"axios": "1.12.0",
|
||||
"body-parser": "2.0.2",
|
||||
"braces": "3.0.3",
|
||||
"cross-spawn": "7.0.6",
|
||||
"form-data": "4.0.4",
|
||||
"glob-parent": "6.0.2",
|
||||
"http-proxy-middleware": "3.0.3",
|
||||
"json5": "2.2.3",
|
||||
"nth-check": "2.1.1",
|
||||
"path-to-regexp": "0.1.12",
|
||||
"prismjs": "1.29.0",
|
||||
"rollup": "4.45.0",
|
||||
"semver": "7.7.1",
|
||||
"set-value": "4.1.0",
|
||||
"solid-js": "1.9.7",
|
||||
"tar-fs": "3.1.0",
|
||||
"unset-value": "2.0.1",
|
||||
"webpack-dev-middleware": "7.4.2",
|
||||
"ws": "8.18.3",
|
||||
"xlsx": "https://cdn.sheetjs.com/xlsx-0.20.3/xlsx-0.20.3.tgz"
|
||||
}
|
||||
},
|
||||
"engines": {
|
||||
|
|
@ -67,10 +93,11 @@
|
|||
"pnpm": ">=9"
|
||||
},
|
||||
"resolutions": {
|
||||
"@google/generative-ai": "^0.15.0",
|
||||
"@langchain/core": "0.2.18",
|
||||
"@google/generative-ai": "^0.24.0",
|
||||
"@grpc/grpc-js": "^1.10.10",
|
||||
"@langchain/core": "0.3.61",
|
||||
"@qdrant/openapi-typescript-fetch": "1.2.6",
|
||||
"openai": "4.57.3",
|
||||
"openai": "4.96.0",
|
||||
"protobufjs": "7.4.0"
|
||||
},
|
||||
"eslintIgnore": [
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
{
|
||||
"name": "flowise-api",
|
||||
"version": "1.0.0",
|
||||
"version": "1.0.3",
|
||||
"description": "Flowise API documentation server",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "node dist/index.js",
|
||||
"dev": "concurrently \"tsc-watch --noClear -p ./tsconfig.json\" \"nodemon\"",
|
||||
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0"
|
||||
},
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
Flowise 的应用集成。包含节点和凭据。
|
||||
|
||||

|
||||

|
||||
|
||||
安装:
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ English | [中文](./README-ZH.md)
|
|||
|
||||
Apps integration for Flowise. Contain Nodes and Credentials.
|
||||
|
||||

|
||||

|
||||
|
||||
Install:
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class AgentflowApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Agentflow API'
|
||||
this.name = 'agentflowApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Agentflow Api Key',
|
||||
name: 'agentflowApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: AgentflowApi }
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class ArizeApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Arize API'
|
||||
this.name = 'arizeApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://docs.arize.com/arize">official guide</a> on how to get API keys on Arize.'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'API Key',
|
||||
name: 'arizeApiKey',
|
||||
type: 'password',
|
||||
placeholder: '<ARIZE_API_KEY>'
|
||||
},
|
||||
{
|
||||
label: 'Space ID',
|
||||
name: 'arizeSpaceId',
|
||||
type: 'string',
|
||||
placeholder: '<ARIZE_SPACE_ID>'
|
||||
},
|
||||
{
|
||||
label: 'Endpoint',
|
||||
name: 'arizeEndpoint',
|
||||
type: 'string',
|
||||
default: 'https://otlp.arize.com'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: ArizeApi }
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class AzureCognitiveServices implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Azure Cognitive Services'
|
||||
this.name = 'azureCognitiveServices'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Azure Subscription Key',
|
||||
name: 'azureSubscriptionKey',
|
||||
type: 'password',
|
||||
description: 'Your Azure Cognitive Services subscription key'
|
||||
},
|
||||
{
|
||||
label: 'Service Region',
|
||||
name: 'serviceRegion',
|
||||
type: 'string',
|
||||
description: 'The Azure service region (e.g., "westus", "eastus")',
|
||||
placeholder: 'westus'
|
||||
},
|
||||
{
|
||||
label: 'API Version',
|
||||
name: 'apiVersion',
|
||||
type: 'string',
|
||||
description: 'The API version to use (e.g., "2024-05-15-preview")',
|
||||
placeholder: '2024-05-15-preview',
|
||||
default: '2024-05-15-preview'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: AzureCognitiveServices }
|
||||
|
|
@ -1,28 +1,28 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class BaiduApi implements INodeCredential {
|
||||
class BaiduQianfanApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Baidu API'
|
||||
this.name = 'baiduApi'
|
||||
this.version = 1.0
|
||||
this.label = 'Baidu Qianfan API'
|
||||
this.name = 'baiduQianfanApi'
|
||||
this.version = 2.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Baidu Api Key',
|
||||
name: 'baiduApiKey',
|
||||
type: 'password'
|
||||
label: 'Qianfan Access Key',
|
||||
name: 'qianfanAccessKey',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Baidu Secret Key',
|
||||
name: 'baiduSecretKey',
|
||||
label: 'Qianfan Secret Key',
|
||||
name: 'qianfanSecretKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: BaiduApi }
|
||||
module.exports = { credClass: BaiduQianfanApi }
|
||||
|
|
|
|||
|
|
@ -16,6 +16,16 @@ class ChromaApi implements INodeCredential {
|
|||
label: 'Chroma Api Key',
|
||||
name: 'chromaApiKey',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Chroma Tenant',
|
||||
name: 'chromaTenant',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Chroma Database',
|
||||
name: 'chromaDatabase',
|
||||
type: 'string'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
import { INodeCredential, INodeParams } from '../src/Interface'
|
||||
|
||||
class CometApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Comet API'
|
||||
this.name = 'cometApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Comet API Key',
|
||||
name: 'cometApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: CometApi }
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class ComposioApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Composio API'
|
||||
this.name = 'composioApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Composio API Key',
|
||||
name: 'composioApi',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: ComposioApi }
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import { INodeCredential, INodeParams } from '../src/Interface'
|
||||
|
||||
class DeepseekApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'DeepseekAI API'
|
||||
this.name = 'deepseekApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'DeepseekAI API Key',
|
||||
name: 'deepseekApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: DeepseekApi }
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class ElevenLabsApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Eleven Labs API'
|
||||
this.name = 'elevenLabsApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Sign up for a Eleven Labs account and <a target="_blank" href="https://elevenlabs.io/app/settings/api-keys">create an API Key</a>.'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Eleven Labs API Key',
|
||||
name: 'elevenLabsApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: ElevenLabsApi }
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
const scopes = [
|
||||
'https://www.googleapis.com/auth/gmail.readonly',
|
||||
'https://www.googleapis.com/auth/gmail.compose',
|
||||
'https://www.googleapis.com/auth/gmail.modify',
|
||||
'https://www.googleapis.com/auth/gmail.labels'
|
||||
]
|
||||
|
||||
class GmailOAuth2 implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
description: string
|
||||
|
||||
constructor() {
|
||||
this.label = 'Gmail OAuth2'
|
||||
this.name = 'gmailOAuth2'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'You can find the setup instructions <a target="_blank" href="https://docs.flowiseai.com/integrations/langchain/tools/gmail">here</a>'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Authorization URL',
|
||||
name: 'authorizationUrl',
|
||||
type: 'string',
|
||||
default: 'https://accounts.google.com/o/oauth2/v2/auth'
|
||||
},
|
||||
{
|
||||
label: 'Access Token URL',
|
||||
name: 'accessTokenUrl',
|
||||
type: 'string',
|
||||
default: 'https://oauth2.googleapis.com/token'
|
||||
},
|
||||
{
|
||||
label: 'Client ID',
|
||||
name: 'clientId',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Client Secret',
|
||||
name: 'clientSecret',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Additional Parameters',
|
||||
name: 'additionalParameters',
|
||||
type: 'string',
|
||||
default: 'access_type=offline&prompt=consent',
|
||||
hidden: true
|
||||
},
|
||||
{
|
||||
label: 'Scope',
|
||||
name: 'scope',
|
||||
type: 'string',
|
||||
hidden: true,
|
||||
default: scopes.join(' ')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: GmailOAuth2 }
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
const scopes = ['https://www.googleapis.com/auth/calendar', 'https://www.googleapis.com/auth/calendar.events']
|
||||
|
||||
class GoogleCalendarOAuth2 implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
description: string
|
||||
|
||||
constructor() {
|
||||
this.label = 'Google Calendar OAuth2'
|
||||
this.name = 'googleCalendarOAuth2'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'You can find the setup instructions <a target="_blank" href="https://docs.flowiseai.com/integrations/langchain/tools/google-calendar">here</a>'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Authorization URL',
|
||||
name: 'authorizationUrl',
|
||||
type: 'string',
|
||||
default: 'https://accounts.google.com/o/oauth2/v2/auth'
|
||||
},
|
||||
{
|
||||
label: 'Access Token URL',
|
||||
name: 'accessTokenUrl',
|
||||
type: 'string',
|
||||
default: 'https://oauth2.googleapis.com/token'
|
||||
},
|
||||
{
|
||||
label: 'Client ID',
|
||||
name: 'clientId',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Client Secret',
|
||||
name: 'clientSecret',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Additional Parameters',
|
||||
name: 'additionalParameters',
|
||||
type: 'string',
|
||||
default: 'access_type=offline&prompt=consent',
|
||||
hidden: true
|
||||
},
|
||||
{
|
||||
label: 'Scope',
|
||||
name: 'scope',
|
||||
type: 'string',
|
||||
hidden: true,
|
||||
default: scopes.join(' ')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: GoogleCalendarOAuth2 }
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
const scopes = [
|
||||
'https://www.googleapis.com/auth/documents',
|
||||
'https://www.googleapis.com/auth/drive',
|
||||
'https://www.googleapis.com/auth/drive.file'
|
||||
]
|
||||
|
||||
class GoogleDocsOAuth2 implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
description: string
|
||||
|
||||
constructor() {
|
||||
this.label = 'Google Docs OAuth2'
|
||||
this.name = 'googleDocsOAuth2'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'You can find the setup instructions <a target="_blank" href="https://docs.flowiseai.com/integrations/langchain/tools/google-sheets">here</a>'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Authorization URL',
|
||||
name: 'authorizationUrl',
|
||||
type: 'string',
|
||||
default: 'https://accounts.google.com/o/oauth2/v2/auth'
|
||||
},
|
||||
{
|
||||
label: 'Access Token URL',
|
||||
name: 'accessTokenUrl',
|
||||
type: 'string',
|
||||
default: 'https://oauth2.googleapis.com/token'
|
||||
},
|
||||
{
|
||||
label: 'Client ID',
|
||||
name: 'clientId',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Client Secret',
|
||||
name: 'clientSecret',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Additional Parameters',
|
||||
name: 'additionalParameters',
|
||||
type: 'string',
|
||||
default: 'access_type=offline&prompt=consent',
|
||||
hidden: true
|
||||
},
|
||||
{
|
||||
label: 'Scope',
|
||||
name: 'scope',
|
||||
type: 'string',
|
||||
hidden: true,
|
||||
default: scopes.join(' ')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: GoogleDocsOAuth2 }
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
const scopes = [
|
||||
'https://www.googleapis.com/auth/drive',
|
||||
'https://www.googleapis.com/auth/drive.appdata',
|
||||
'https://www.googleapis.com/auth/drive.photos.readonly'
|
||||
]
|
||||
|
||||
class GoogleDriveOAuth2 implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
description: string
|
||||
|
||||
constructor() {
|
||||
this.label = 'Google Drive OAuth2'
|
||||
this.name = 'googleDriveOAuth2'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'You can find the setup instructions <a target="_blank" href="https://docs.flowiseai.com/integrations/langchain/tools/google-drive">here</a>'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Authorization URL',
|
||||
name: 'authorizationUrl',
|
||||
type: 'string',
|
||||
default: 'https://accounts.google.com/o/oauth2/v2/auth'
|
||||
},
|
||||
{
|
||||
label: 'Access Token URL',
|
||||
name: 'accessTokenUrl',
|
||||
type: 'string',
|
||||
default: 'https://oauth2.googleapis.com/token'
|
||||
},
|
||||
{
|
||||
label: 'Client ID',
|
||||
name: 'clientId',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Client Secret',
|
||||
name: 'clientSecret',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Additional Parameters',
|
||||
name: 'additionalParameters',
|
||||
type: 'string',
|
||||
default: 'access_type=offline&prompt=consent',
|
||||
hidden: true
|
||||
},
|
||||
{
|
||||
label: 'Scope',
|
||||
name: 'scope',
|
||||
type: 'string',
|
||||
hidden: true,
|
||||
default: scopes.join(' ')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: GoogleDriveOAuth2 }
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
const scopes = [
|
||||
'https://www.googleapis.com/auth/drive.file',
|
||||
'https://www.googleapis.com/auth/spreadsheets',
|
||||
'https://www.googleapis.com/auth/drive.metadata'
|
||||
]
|
||||
|
||||
class GoogleSheetsOAuth2 implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
description: string
|
||||
|
||||
constructor() {
|
||||
this.label = 'Google Sheets OAuth2'
|
||||
this.name = 'googleSheetsOAuth2'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'You can find the setup instructions <a target="_blank" href="https://docs.flowiseai.com/integrations/langchain/tools/google-sheets">here</a>'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Authorization URL',
|
||||
name: 'authorizationUrl',
|
||||
type: 'string',
|
||||
default: 'https://accounts.google.com/o/oauth2/v2/auth'
|
||||
},
|
||||
{
|
||||
label: 'Access Token URL',
|
||||
name: 'accessTokenUrl',
|
||||
type: 'string',
|
||||
default: 'https://oauth2.googleapis.com/token'
|
||||
},
|
||||
{
|
||||
label: 'Client ID',
|
||||
name: 'clientId',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Client Secret',
|
||||
name: 'clientSecret',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Additional Parameters',
|
||||
name: 'additionalParameters',
|
||||
type: 'string',
|
||||
default: 'access_type=offline&prompt=consent',
|
||||
hidden: true
|
||||
},
|
||||
{
|
||||
label: 'Scope',
|
||||
name: 'scope',
|
||||
type: 'string',
|
||||
hidden: true,
|
||||
default: scopes.join(' ')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: GoogleSheetsOAuth2 }
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class HTTPApiKeyCredential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'HTTP Api Key'
|
||||
this.name = 'httpApiKey'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
name: 'value',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: HTTPApiKeyCredential }
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class HttpBasicAuthCredential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'HTTP Basic Auth'
|
||||
this.name = 'httpBasicAuth'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Basic Auth Username',
|
||||
name: 'basicAuthUsername',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Basic Auth Password',
|
||||
name: 'basicAuthPassword',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: HttpBasicAuthCredential }
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class HTTPBearerTokenCredential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'HTTP Bearer Token'
|
||||
this.name = 'httpBearerToken'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Token',
|
||||
name: 'token',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: HTTPBearerTokenCredential }
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class IBMWatsonxCredential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'IBM Watsonx'
|
||||
this.name = 'ibmWatsonx'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Version',
|
||||
name: 'version',
|
||||
type: 'string',
|
||||
placeholder: 'YYYY-MM-DD'
|
||||
},
|
||||
{
|
||||
label: 'Service URL',
|
||||
name: 'serviceUrl',
|
||||
type: 'string',
|
||||
placeholder: '<SERVICE_URL>'
|
||||
},
|
||||
{
|
||||
label: 'Project ID',
|
||||
name: 'projectId',
|
||||
type: 'string',
|
||||
placeholder: '<PROJECT_ID>'
|
||||
},
|
||||
{
|
||||
label: 'Watsonx AI Auth Type',
|
||||
name: 'watsonxAIAuthType',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'IAM',
|
||||
name: 'iam'
|
||||
},
|
||||
{
|
||||
label: 'Bearer Token',
|
||||
name: 'bearertoken'
|
||||
}
|
||||
],
|
||||
default: 'iam'
|
||||
},
|
||||
{
|
||||
label: 'Watsonx AI IAM API Key',
|
||||
name: 'watsonxAIApikey',
|
||||
type: 'password',
|
||||
description: 'API Key for Watsonx AI when using IAM',
|
||||
placeholder: '<YOUR-APIKEY>',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Watsonx AI Bearer Token',
|
||||
name: 'watsonxAIBearerToken',
|
||||
type: 'password',
|
||||
description: 'Bearer Token for Watsonx AI when using Bearer Token',
|
||||
placeholder: '<YOUR-BEARER-TOKEN>',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: IBMWatsonxCredential }
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class JiraApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Jira API'
|
||||
this.name = 'jiraApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://support.atlassian.com/atlassian-account/docs/manage-api-tokens-for-your-atlassian-account/">official guide</a> on how to get accessToken on Github'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'User Name',
|
||||
name: 'username',
|
||||
type: 'string',
|
||||
placeholder: 'username@example.com'
|
||||
},
|
||||
{
|
||||
label: 'Access Token',
|
||||
name: 'accessToken',
|
||||
type: 'password',
|
||||
placeholder: '<JIRA_ACCESS_TOKEN>'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: JiraApi }
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class LitellmApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Litellm API'
|
||||
this.name = 'litellmApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'API Key',
|
||||
name: 'litellmApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: LitellmApi }
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class Mem0MemoryApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Mem0 Memory API'
|
||||
this.name = 'mem0MemoryApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Visit <a target="_blank" href="https://app.mem0.ai/settings/api-keys">Mem0 Platform</a> to get your API credentials'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'API Key',
|
||||
name: 'apiKey',
|
||||
type: 'password',
|
||||
description: 'API Key from Mem0 dashboard'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: Mem0MemoryApi }
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
const scopes = [
|
||||
'openid',
|
||||
'offline_access',
|
||||
'Contacts.Read',
|
||||
'Contacts.ReadWrite',
|
||||
'Calendars.Read',
|
||||
'Calendars.Read.Shared',
|
||||
'Calendars.ReadWrite',
|
||||
'Mail.Read',
|
||||
'Mail.ReadWrite',
|
||||
'Mail.ReadWrite.Shared',
|
||||
'Mail.Send',
|
||||
'Mail.Send.Shared',
|
||||
'MailboxSettings.Read'
|
||||
]
|
||||
|
||||
class MsoftOutlookOAuth2 implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Microsoft Outlook OAuth2'
|
||||
this.name = 'microsoftOutlookOAuth2'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'You can find the setup instructions <a target="_blank" href="https://docs.flowiseai.com/integrations/langchain/tools/microsoft-outlook">here</a>'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Authorization URL',
|
||||
name: 'authorizationUrl',
|
||||
type: 'string',
|
||||
default: 'https://login.microsoftonline.com/<tenantId>/oauth2/v2.0/authorize'
|
||||
},
|
||||
{
|
||||
label: 'Access Token URL',
|
||||
name: 'accessTokenUrl',
|
||||
type: 'string',
|
||||
default: 'https://login.microsoftonline.com/<tenantId>/oauth2/v2.0/token'
|
||||
},
|
||||
{
|
||||
label: 'Client ID',
|
||||
name: 'clientId',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Client Secret',
|
||||
name: 'clientSecret',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Scope',
|
||||
name: 'scope',
|
||||
type: 'string',
|
||||
hidden: true,
|
||||
default: scopes.join(' ')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: MsoftOutlookOAuth2 }
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
// Comprehensive scopes for Microsoft Teams operations
|
||||
const scopes = [
|
||||
// Basic authentication
|
||||
'openid',
|
||||
'offline_access',
|
||||
|
||||
// User permissions
|
||||
'User.Read',
|
||||
'User.ReadWrite.All',
|
||||
|
||||
// Teams and Groups
|
||||
'Group.ReadWrite.All',
|
||||
'Team.ReadBasic.All',
|
||||
'Team.Create',
|
||||
'TeamMember.ReadWrite.All',
|
||||
|
||||
// Channels
|
||||
'Channel.ReadBasic.All',
|
||||
'Channel.Create',
|
||||
'Channel.Delete.All',
|
||||
'ChannelMember.ReadWrite.All',
|
||||
|
||||
// Chat operations
|
||||
'Chat.ReadWrite',
|
||||
'Chat.Create',
|
||||
'ChatMember.ReadWrite',
|
||||
|
||||
// Messages
|
||||
'ChatMessage.Send',
|
||||
'ChatMessage.Read',
|
||||
'ChannelMessage.Send',
|
||||
'ChannelMessage.Read.All',
|
||||
|
||||
// Reactions and advanced features
|
||||
'TeamsActivity.Send'
|
||||
]
|
||||
|
||||
class MsoftTeamsOAuth2 implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
description: string
|
||||
|
||||
constructor() {
|
||||
this.label = 'Microsoft Teams OAuth2'
|
||||
this.name = 'microsoftTeamsOAuth2'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'You can find the setup instructions <a target="_blank" href="https://docs.flowiseai.com/integrations/langchain/tools/microsoft-teams">here</a>'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Authorization URL',
|
||||
name: 'authorizationUrl',
|
||||
type: 'string',
|
||||
default: 'https://login.microsoftonline.com/<tenantId>/oauth2/v2.0/authorize'
|
||||
},
|
||||
{
|
||||
label: 'Access Token URL',
|
||||
name: 'accessTokenUrl',
|
||||
type: 'string',
|
||||
default: 'https://login.microsoftonline.com/<tenantId>/oauth2/v2.0/token'
|
||||
},
|
||||
{
|
||||
label: 'Client ID',
|
||||
name: 'clientId',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Client Secret',
|
||||
name: 'clientSecret',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Scope',
|
||||
name: 'scope',
|
||||
type: 'string',
|
||||
hidden: true,
|
||||
default: scopes.join(' ')
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: MsoftTeamsOAuth2 }
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class Neo4jApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Neo4j API'
|
||||
this.name = 'neo4jApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://neo4j.com/docs/operations-manual/current/authentication-authorization/">official guide</a> on Neo4j authentication'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Neo4j URL',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
description: 'Your Neo4j instance URL (e.g., neo4j://localhost:7687)'
|
||||
},
|
||||
{
|
||||
label: 'Username',
|
||||
name: 'username',
|
||||
type: 'string',
|
||||
description: 'Neo4j database username'
|
||||
},
|
||||
{
|
||||
label: 'Password',
|
||||
name: 'password',
|
||||
type: 'password',
|
||||
description: 'Neo4j database password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: Neo4jApi }
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
import { INodeCredential, INodeParams } from '../src/Interface'
|
||||
|
||||
class NvidiaNIMApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'NVIDIA NGC API Key'
|
||||
this.name = 'nvidiaNIMApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'NVIDIA NGC API Key',
|
||||
name: 'nvidiaNIMApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: NvidiaNIMApi }
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class OpenAPIAuth implements INodeCredential {
|
||||
class OpenRouterAPIAuth implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
|
|
@ -8,18 +8,18 @@ class OpenAPIAuth implements INodeCredential {
|
|||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'OpenAPI Auth Token'
|
||||
this.name = 'openAPIAuth'
|
||||
this.label = 'OpenRouter API Key'
|
||||
this.name = 'openRouterApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'OpenAPI Token',
|
||||
name: 'openAPIToken',
|
||||
label: 'OpenRouter API Key',
|
||||
name: 'openRouterApiKey',
|
||||
type: 'password',
|
||||
description: 'Auth Token. For example: Bearer <TOKEN>'
|
||||
description: 'API Key'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: OpenAPIAuth }
|
||||
module.exports = { credClass: OpenRouterAPIAuth }
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class OpikApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Opik API'
|
||||
this.name = 'opikApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://www.comet.com/docs/opik/tracing/sdk_configuration">Opik documentation</a> on how to configure Opik credentials'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'API Key',
|
||||
name: 'opikApiKey',
|
||||
type: 'password',
|
||||
placeholder: '<OPIK_API_KEY>'
|
||||
},
|
||||
{
|
||||
label: 'URL',
|
||||
name: 'opikUrl',
|
||||
type: 'string',
|
||||
placeholder: 'https://www.comet.com/opik/api'
|
||||
},
|
||||
{
|
||||
label: 'Workspace',
|
||||
name: 'opikWorkspace',
|
||||
type: 'string',
|
||||
placeholder: 'default'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: OpikApi }
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class OxylabsApiCredential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Oxylabs API'
|
||||
this.name = 'oxylabsApi'
|
||||
this.version = 1.0
|
||||
this.description = 'Oxylabs API credentials description, to add more info'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Oxylabs Username',
|
||||
name: 'username',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Oxylabs Password',
|
||||
name: 'password',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: OxylabsApiCredential }
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class PerplexityApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Perplexity API'
|
||||
this.name = 'perplexityApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://docs.perplexity.ai/docs/getting-started">official guide</a> on how to get API key'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Perplexity API Key',
|
||||
name: 'perplexityApiKey',
|
||||
type: 'password',
|
||||
placeholder: '<PERPLEXITY_API_KEY>'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: PerplexityApi }
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class PhoenixApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Phoenix API'
|
||||
this.name = 'phoenixApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://docs.arize.com/phoenix">official guide</a> on how to get API keys on Phoenix.'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'API Key',
|
||||
name: 'phoenixApiKey',
|
||||
type: 'password',
|
||||
placeholder: '<PHOENIX_API_KEY>'
|
||||
},
|
||||
{
|
||||
label: 'Endpoint',
|
||||
name: 'phoenixEndpoint',
|
||||
type: 'string',
|
||||
default: 'https://app.phoenix.arize.com'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: PhoenixApi }
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class PostgresUrl implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Postgres URL'
|
||||
this.name = 'PostgresUrl'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Postgres URL',
|
||||
name: 'postgresUrl',
|
||||
type: 'string',
|
||||
placeholder: 'postgresql://localhost/mydb'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: PostgresUrl }
|
||||
|
|
@ -22,7 +22,7 @@ class RedisCacheApi implements INodeCredential {
|
|||
label: 'Port',
|
||||
name: 'redisCachePort',
|
||||
type: 'number',
|
||||
default: '6789'
|
||||
default: '6379'
|
||||
},
|
||||
{
|
||||
label: 'User',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class SambanovaApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Sambanova API'
|
||||
this.name = 'sambanovaApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Sambanova Api Key',
|
||||
name: 'sambanovaApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: SambanovaApi }
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class SlackApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Slack API'
|
||||
this.name = 'slackApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://github.com/modelcontextprotocol/servers/tree/main/src/slack">official guide</a> on how to get botToken and teamId on Slack'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Bot Token',
|
||||
name: 'botToken',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Team ID',
|
||||
name: 'teamId',
|
||||
type: 'string',
|
||||
placeholder: '<SLACK_TEAM_ID>'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: SlackApi }
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class TavilyApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Tavily API'
|
||||
this.name = 'tavilyApi'
|
||||
this.version = 1.1
|
||||
this.description = 'Tavily API is a search engine designed for LLMs and AI agents'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Tavily Api Key',
|
||||
name: 'tavilyApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: TavilyApi }
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class TeradataBearerTokenCredential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
description: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Teradata Bearer Token'
|
||||
this.name = 'teradataBearerToken'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://docs.teradata.com/r/Enterprise_IntelliFlex_VMware/Teradata-Vector-Store-User-Guide/Setting-up-Vector-Store/Importing-Modules-Required-for-Vector-Store">official guide</a> on how to get Teradata Bearer Token'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Token',
|
||||
name: 'token',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: TeradataBearerTokenCredential }
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class TeradataTD2Credential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Teradata TD2 Auth'
|
||||
this.name = 'teradataTD2Auth'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Teradata TD2 Auth Username',
|
||||
name: 'tdUsername',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Teradata TD2 Auth Password',
|
||||
name: 'tdPassword',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: TeradataTD2Credential }
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class TeradataVectorStoreApiCredentials implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Teradata Vector Store API Credentials'
|
||||
this.name = 'teradataVectorStoreApiCredentials'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Teradata Host IP',
|
||||
name: 'tdHostIp',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Username',
|
||||
name: 'tdUsername',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Password',
|
||||
name: 'tdPassword',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Vector_Store_Base_URL',
|
||||
name: 'baseURL',
|
||||
description: 'Teradata Vector Store Base URL',
|
||||
placeholder: `Base_URL`,
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'JWT Token',
|
||||
name: 'jwtToken',
|
||||
type: 'password',
|
||||
description: 'Bearer token for JWT authentication',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: TeradataVectorStoreApiCredentials }
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class WolframAlphaApp implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'WolframAlpha App ID'
|
||||
this.name = 'wolframAlphaAppId'
|
||||
this.version = 1.0
|
||||
this.description = 'Get an App Id from <a target="_blank" href="https://developer.wolframalpha.com">Wolfram Alpha Portal</a>'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'App ID',
|
||||
name: 'wolframAlphaAppId',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: WolframAlphaApp }
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class XaiApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Xai API'
|
||||
this.name = 'xaiApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'X AI API Key',
|
||||
name: 'xaiApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: XaiApi }
|
||||
|
|
@ -0,0 +1,165 @@
|
|||
import { RunCollectorCallbackHandler } from '@langchain/core/tracers/run_collector'
|
||||
import { Run } from '@langchain/core/tracers/base'
|
||||
import { EvaluationRunner } from './EvaluationRunner'
|
||||
import { encoding_for_model, get_encoding } from '@dqbd/tiktoken'
|
||||
|
||||
export class EvaluationRunTracer extends RunCollectorCallbackHandler {
|
||||
evaluationRunId: string
|
||||
model: string
|
||||
|
||||
constructor(id: string) {
|
||||
super()
|
||||
this.evaluationRunId = id
|
||||
}
|
||||
|
||||
async persistRun(run: Run): Promise<void> {
|
||||
return super.persistRun(run)
|
||||
}
|
||||
|
||||
countPromptTokens = (encoding: any, run: Run): number => {
|
||||
let promptTokenCount = 0
|
||||
if (encoding) {
|
||||
if (run.inputs?.messages?.length > 0 && run.inputs?.messages[0]?.length > 0) {
|
||||
run.inputs.messages[0].map((message: any) => {
|
||||
let content = message.content
|
||||
? message.content
|
||||
: message.SystemMessage?.content
|
||||
? message.SystemMessage.content
|
||||
: message.HumanMessage?.content
|
||||
? message.HumanMessage.content
|
||||
: message.AIMessage?.content
|
||||
? message.AIMessage.content
|
||||
: undefined
|
||||
promptTokenCount += content ? encoding.encode(content).length : 0
|
||||
})
|
||||
}
|
||||
if (run.inputs?.prompts?.length > 0) {
|
||||
const content = run.inputs.prompts[0]
|
||||
promptTokenCount += content ? encoding.encode(content).length : 0
|
||||
}
|
||||
}
|
||||
return promptTokenCount
|
||||
}
|
||||
|
||||
countCompletionTokens = (encoding: any, run: Run): number => {
|
||||
let completionTokenCount = 0
|
||||
if (encoding) {
|
||||
if (run.outputs?.generations?.length > 0 && run.outputs?.generations[0]?.length > 0) {
|
||||
run.outputs?.generations[0].map((chunk: any) => {
|
||||
let content = chunk.text ? chunk.text : chunk.message?.content ? chunk.message?.content : undefined
|
||||
completionTokenCount += content ? encoding.encode(content).length : 0
|
||||
})
|
||||
}
|
||||
}
|
||||
return completionTokenCount
|
||||
}
|
||||
|
||||
extractModelName = (run: Run): string => {
|
||||
return (
|
||||
(run?.serialized as any)?.kwargs?.model ||
|
||||
(run?.serialized as any)?.kwargs?.model_name ||
|
||||
(run?.extra as any)?.metadata?.ls_model_name ||
|
||||
(run?.extra as any)?.metadata?.fw_model_name
|
||||
)
|
||||
}
|
||||
|
||||
onLLMEnd?(run: Run): void | Promise<void> {
|
||||
if (run.name) {
|
||||
let provider = run.name
|
||||
if (provider === 'BedrockChat') {
|
||||
provider = 'awsChatBedrock'
|
||||
}
|
||||
EvaluationRunner.addMetrics(
|
||||
this.evaluationRunId,
|
||||
JSON.stringify({
|
||||
provider: provider
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
let model = this.extractModelName(run)
|
||||
if (run.outputs?.llmOutput?.tokenUsage) {
|
||||
const tokenUsage = run.outputs?.llmOutput?.tokenUsage
|
||||
if (tokenUsage) {
|
||||
const metric = {
|
||||
completionTokens: tokenUsage.completionTokens,
|
||||
promptTokens: tokenUsage.promptTokens,
|
||||
model: model,
|
||||
totalTokens: tokenUsage.totalTokens
|
||||
}
|
||||
EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric))
|
||||
}
|
||||
} else if (
|
||||
run.outputs?.generations?.length > 0 &&
|
||||
run.outputs?.generations[0].length > 0 &&
|
||||
run.outputs?.generations[0][0]?.message?.usage_metadata?.total_tokens
|
||||
) {
|
||||
const usage_metadata = run.outputs?.generations[0][0]?.message?.usage_metadata
|
||||
if (usage_metadata) {
|
||||
const metric = {
|
||||
completionTokens: usage_metadata.output_tokens,
|
||||
promptTokens: usage_metadata.input_tokens,
|
||||
model: model || this.model,
|
||||
totalTokens: usage_metadata.total_tokens
|
||||
}
|
||||
EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric))
|
||||
}
|
||||
} else {
|
||||
let encoding: any = undefined
|
||||
let promptInputTokens = 0
|
||||
let completionTokenCount = 0
|
||||
try {
|
||||
encoding = encoding_for_model(model as any)
|
||||
promptInputTokens = this.countPromptTokens(encoding, run)
|
||||
completionTokenCount = this.countCompletionTokens(encoding, run)
|
||||
} catch (e) {
|
||||
try {
|
||||
// as tiktoken will fail for non openai models, assume that is 'cl100k_base'
|
||||
encoding = get_encoding('cl100k_base')
|
||||
promptInputTokens = this.countPromptTokens(encoding, run)
|
||||
completionTokenCount = this.countCompletionTokens(encoding, run)
|
||||
} catch (e) {
|
||||
// stay silent
|
||||
}
|
||||
}
|
||||
const metric = {
|
||||
completionTokens: completionTokenCount,
|
||||
promptTokens: promptInputTokens,
|
||||
model: model,
|
||||
totalTokens: promptInputTokens + completionTokenCount
|
||||
}
|
||||
EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric))
|
||||
//cleanup
|
||||
this.model = ''
|
||||
}
|
||||
}
|
||||
|
||||
async onRunUpdate(run: Run): Promise<void> {
|
||||
const json = {
|
||||
[run.run_type]: elapsed(run)
|
||||
}
|
||||
let metric = JSON.stringify(json)
|
||||
if (metric) {
|
||||
EvaluationRunner.addMetrics(this.evaluationRunId, metric)
|
||||
}
|
||||
|
||||
if (run.run_type === 'llm') {
|
||||
let model = this.extractModelName(run)
|
||||
if (model) {
|
||||
EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify({ model: model }))
|
||||
this.model = model
|
||||
}
|
||||
// OpenAI non streaming models
|
||||
const estimatedTokenUsage = run.outputs?.llmOutput?.estimatedTokenUsage
|
||||
if (estimatedTokenUsage && typeof estimatedTokenUsage === 'object' && Object.keys(estimatedTokenUsage).length > 0) {
|
||||
EvaluationRunner.addMetrics(this.evaluationRunId, estimatedTokenUsage)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function elapsed(run: Run) {
|
||||
if (!run.end_time) return ''
|
||||
const elapsed = run.end_time - run.start_time
|
||||
return `${elapsed.toFixed(2)}`
|
||||
}
|
||||
|
|
@ -0,0 +1,186 @@
|
|||
import { ChatMessage, LLMEndEvent, LLMStartEvent, LLMStreamEvent, MessageContentTextDetail, RetrievalEndEvent, Settings } from 'llamaindex'
|
||||
import { EvaluationRunner } from './EvaluationRunner'
|
||||
import { additionalCallbacks, ICommonObject, INodeData } from '../src'
|
||||
import { RetrievalStartEvent } from 'llamaindex/dist/type/llm/types'
|
||||
import { AgentEndEvent, AgentStartEvent } from 'llamaindex/dist/type/agent/types'
|
||||
import { encoding_for_model } from '@dqbd/tiktoken'
|
||||
import { MessageContent } from '@langchain/core/messages'
|
||||
|
||||
export class EvaluationRunTracerLlama {
|
||||
evaluationRunId: string
|
||||
static cbInit = false
|
||||
static startTimes = new Map<string, number>()
|
||||
static models = new Map<string, string>()
|
||||
static tokenCounts = new Map<string, number>()
|
||||
|
||||
constructor(id: string) {
|
||||
this.evaluationRunId = id
|
||||
EvaluationRunTracerLlama.constructCallBacks()
|
||||
}
|
||||
|
||||
static constructCallBacks = () => {
|
||||
if (!EvaluationRunTracerLlama.cbInit) {
|
||||
Settings.callbackManager.on('llm-start', (event: LLMStartEvent) => {
|
||||
const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId
|
||||
if (!evalID) return
|
||||
const model = (event as any).reason?.caller?.model
|
||||
if (model) {
|
||||
EvaluationRunTracerLlama.models.set(evalID, model)
|
||||
try {
|
||||
const encoding = encoding_for_model(model)
|
||||
if (encoding) {
|
||||
const { messages } = event.detail.payload
|
||||
let tokenCount = messages.reduce((count: number, message: ChatMessage) => {
|
||||
return count + encoding.encode(extractText(message.content)).length
|
||||
}, 0)
|
||||
EvaluationRunTracerLlama.tokenCounts.set(evalID + '_promptTokens', tokenCount)
|
||||
EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', 0)
|
||||
}
|
||||
} catch (e) {
|
||||
// catch the error and continue to work.
|
||||
}
|
||||
}
|
||||
EvaluationRunTracerLlama.startTimes.set(evalID + '_llm', event.timeStamp)
|
||||
})
|
||||
Settings.callbackManager.on('llm-end', (event: LLMEndEvent) => {
|
||||
this.calculateAndSetMetrics(event, 'llm')
|
||||
})
|
||||
Settings.callbackManager.on('llm-stream', (event: LLMStreamEvent) => {
|
||||
const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId
|
||||
if (!evalID) return
|
||||
const { chunk } = event.detail.payload
|
||||
const { delta } = chunk
|
||||
const model = (event as any).reason?.caller?.model
|
||||
try {
|
||||
const encoding = encoding_for_model(model)
|
||||
if (encoding) {
|
||||
let tokenCount = EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0
|
||||
tokenCount += encoding.encode(extractText(delta)).length
|
||||
EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', tokenCount)
|
||||
}
|
||||
} catch (e) {
|
||||
// catch the error and continue to work.
|
||||
}
|
||||
})
|
||||
Settings.callbackManager.on('retrieve-start', (event: RetrievalStartEvent) => {
|
||||
const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId
|
||||
if (evalID) {
|
||||
EvaluationRunTracerLlama.startTimes.set(evalID + '_retriever', event.timeStamp)
|
||||
}
|
||||
})
|
||||
Settings.callbackManager.on('retrieve-end', (event: RetrievalEndEvent) => {
|
||||
this.calculateAndSetMetrics(event, 'retriever')
|
||||
})
|
||||
Settings.callbackManager.on('agent-start', (event: AgentStartEvent) => {
|
||||
const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId
|
||||
if (evalID) {
|
||||
EvaluationRunTracerLlama.startTimes.set(evalID + '_agent', event.timeStamp)
|
||||
}
|
||||
})
|
||||
Settings.callbackManager.on('agent-end', (event: AgentEndEvent) => {
|
||||
this.calculateAndSetMetrics(event, 'agent')
|
||||
})
|
||||
EvaluationRunTracerLlama.cbInit = true
|
||||
}
|
||||
}
|
||||
|
||||
private static calculateAndSetMetrics(event: any, label: string) {
|
||||
const evalID = event.reason.parent?.caller?.evaluationRunId || event.reason.caller?.evaluationRunId
|
||||
if (!evalID) return
|
||||
const startTime = EvaluationRunTracerLlama.startTimes.get(evalID + '_' + label) as number
|
||||
let model =
|
||||
(event as any).reason?.caller?.model || (event as any).reason?.caller?.llm?.model || EvaluationRunTracerLlama.models.get(evalID)
|
||||
|
||||
if (event.detail.payload?.response?.message && model) {
|
||||
try {
|
||||
const encoding = encoding_for_model(model)
|
||||
if (encoding) {
|
||||
let tokenCount = EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0
|
||||
tokenCount += encoding.encode(event.detail.payload.response?.message?.content || '').length
|
||||
EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', tokenCount)
|
||||
}
|
||||
} catch (e) {
|
||||
// catch the error and continue to work.
|
||||
}
|
||||
}
|
||||
|
||||
// Anthropic
|
||||
if (event.detail?.payload?.response?.raw?.usage) {
|
||||
const usage = event.detail.payload.response.raw.usage
|
||||
if (usage.output_tokens) {
|
||||
const metric = {
|
||||
completionTokens: usage.output_tokens,
|
||||
promptTokens: usage.input_tokens,
|
||||
model: model,
|
||||
totalTokens: usage.input_tokens + usage.output_tokens
|
||||
}
|
||||
EvaluationRunner.addMetrics(evalID, JSON.stringify(metric))
|
||||
} else if (usage.completion_tokens) {
|
||||
const metric = {
|
||||
completionTokens: usage.completion_tokens,
|
||||
promptTokens: usage.prompt_tokens,
|
||||
model: model,
|
||||
totalTokens: usage.total_tokens
|
||||
}
|
||||
EvaluationRunner.addMetrics(evalID, JSON.stringify(metric))
|
||||
}
|
||||
} else if (event.detail?.payload?.response?.raw['amazon-bedrock-invocationMetrics']) {
|
||||
const usage = event.detail?.payload?.response?.raw['amazon-bedrock-invocationMetrics']
|
||||
const metric = {
|
||||
completionTokens: usage.outputTokenCount,
|
||||
promptTokens: usage.inputTokenCount,
|
||||
model: event.detail?.payload?.response?.raw.model,
|
||||
totalTokens: usage.inputTokenCount + usage.outputTokenCount
|
||||
}
|
||||
EvaluationRunner.addMetrics(evalID, JSON.stringify(metric))
|
||||
} else {
|
||||
const metric = {
|
||||
[label]: (event.timeStamp - startTime).toFixed(2),
|
||||
completionTokens: EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens'),
|
||||
promptTokens: EvaluationRunTracerLlama.tokenCounts.get(evalID + '_promptTokens'),
|
||||
model: model || EvaluationRunTracerLlama.models.get(evalID) || '',
|
||||
totalTokens:
|
||||
(EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0) +
|
||||
(EvaluationRunTracerLlama.tokenCounts.get(evalID + '_promptTokens') || 0)
|
||||
}
|
||||
EvaluationRunner.addMetrics(evalID, JSON.stringify(metric))
|
||||
}
|
||||
|
||||
//cleanup
|
||||
EvaluationRunTracerLlama.startTimes.delete(evalID + '_' + label)
|
||||
EvaluationRunTracerLlama.startTimes.delete(evalID + '_outputTokens')
|
||||
EvaluationRunTracerLlama.startTimes.delete(evalID + '_promptTokens')
|
||||
EvaluationRunTracerLlama.models.delete(evalID)
|
||||
}
|
||||
|
||||
static async injectEvaluationMetadata(nodeData: INodeData, options: ICommonObject, callerObj: any) {
|
||||
if (options.evaluationRunId && callerObj) {
|
||||
// these are needed for evaluation runs
|
||||
options.llamaIndex = true
|
||||
await additionalCallbacks(nodeData, options)
|
||||
Object.defineProperty(callerObj, 'evaluationRunId', {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: options.evaluationRunId
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// from https://github.com/run-llama/LlamaIndexTS/blob/main/packages/core/src/llm/utils.ts
|
||||
export function extractText(message: MessageContent): string {
|
||||
if (typeof message !== 'string' && !Array.isArray(message)) {
|
||||
console.warn('extractText called with non-MessageContent message, this is likely a bug.')
|
||||
return `${message}`
|
||||
} else if (typeof message !== 'string' && Array.isArray(message)) {
|
||||
// message is of type MessageContentDetail[] - retrieve just the text parts and concatenate them
|
||||
// so we can pass them to the context generator
|
||||
return message
|
||||
.filter((c): c is MessageContentTextDetail => c.type === 'text')
|
||||
.map((c) => c.text)
|
||||
.join('\n\n')
|
||||
} else {
|
||||
return message
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,226 @@
|
|||
import axios from 'axios'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { ICommonObject } from '../src'
|
||||
|
||||
import { getModelConfigByModelName, MODEL_TYPE } from '../src/modelLoader'
|
||||
|
||||
export class EvaluationRunner {
|
||||
static metrics = new Map<string, string[]>()
|
||||
|
||||
static getCostMetrics = async (selectedProvider: string, selectedModel: string) => {
|
||||
let modelConfig = await getModelConfigByModelName(MODEL_TYPE.CHAT, selectedProvider, selectedModel)
|
||||
if (modelConfig) {
|
||||
if (modelConfig['cost_values']) {
|
||||
return modelConfig.cost_values
|
||||
}
|
||||
return { cost_values: modelConfig }
|
||||
} else {
|
||||
modelConfig = await getModelConfigByModelName(MODEL_TYPE.LLM, selectedProvider, selectedModel)
|
||||
if (modelConfig) {
|
||||
if (modelConfig['cost_values']) {
|
||||
return modelConfig.cost_values
|
||||
}
|
||||
return { cost_values: modelConfig }
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
static async getAndDeleteMetrics(id: string) {
|
||||
const val = EvaluationRunner.metrics.get(id)
|
||||
if (val) {
|
||||
try {
|
||||
//first lets get the provider and model
|
||||
let selectedModel = undefined
|
||||
let selectedProvider = undefined
|
||||
if (val && val.length > 0) {
|
||||
let modelName = ''
|
||||
let providerName = ''
|
||||
for (let i = 0; i < val.length; i++) {
|
||||
const metric = val[i]
|
||||
if (typeof metric === 'object') {
|
||||
modelName = metric['model']
|
||||
providerName = metric['provider']
|
||||
} else {
|
||||
modelName = JSON.parse(metric)['model']
|
||||
providerName = JSON.parse(metric)['provider']
|
||||
}
|
||||
|
||||
if (modelName) {
|
||||
selectedModel = modelName
|
||||
}
|
||||
if (providerName) {
|
||||
selectedProvider = providerName
|
||||
}
|
||||
}
|
||||
}
|
||||
if (selectedProvider && selectedModel) {
|
||||
const modelConfig = await EvaluationRunner.getCostMetrics(selectedProvider, selectedModel)
|
||||
if (modelConfig) {
|
||||
val.push(JSON.stringify({ cost_values: modelConfig }))
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
//stay silent
|
||||
}
|
||||
}
|
||||
EvaluationRunner.metrics.delete(id)
|
||||
return val
|
||||
}
|
||||
|
||||
static addMetrics(id: string, metric: string) {
|
||||
if (EvaluationRunner.metrics.has(id)) {
|
||||
EvaluationRunner.metrics.get(id)?.push(metric)
|
||||
} else {
|
||||
EvaluationRunner.metrics.set(id, [metric])
|
||||
}
|
||||
}
|
||||
|
||||
baseURL = ''
|
||||
|
||||
constructor(baseURL: string) {
|
||||
this.baseURL = baseURL
|
||||
}
|
||||
|
||||
getChatflowApiKey(chatflowId: string, apiKeys: { chatflowId: string; apiKey: string }[] = []) {
|
||||
return apiKeys.find((item) => item.chatflowId === chatflowId)?.apiKey || ''
|
||||
}
|
||||
|
||||
public async runEvaluations(data: ICommonObject) {
|
||||
const chatflowIds = JSON.parse(data.chatflowId)
|
||||
const returnData: ICommonObject = {}
|
||||
returnData.evaluationId = data.evaluationId
|
||||
returnData.runDate = new Date()
|
||||
returnData.rows = []
|
||||
for (let i = 0; i < data.dataset.rows.length; i++) {
|
||||
returnData.rows.push({
|
||||
input: data.dataset.rows[i].input,
|
||||
expectedOutput: data.dataset.rows[i].output,
|
||||
itemNo: data.dataset.rows[i].sequenceNo,
|
||||
evaluations: [],
|
||||
status: 'pending'
|
||||
})
|
||||
}
|
||||
for (let i = 0; i < chatflowIds.length; i++) {
|
||||
const chatflowId = chatflowIds[i]
|
||||
await this.evaluateChatflow(chatflowId, this.getChatflowApiKey(chatflowId, data.apiKeys), data, returnData)
|
||||
}
|
||||
return returnData
|
||||
}
|
||||
|
||||
async evaluateChatflow(chatflowId: string, apiKey: string, data: any, returnData: any) {
|
||||
for (let i = 0; i < data.dataset.rows.length; i++) {
|
||||
const item = data.dataset.rows[i]
|
||||
const uuid = uuidv4()
|
||||
|
||||
const headers: any = {
|
||||
'X-Request-ID': uuid,
|
||||
'X-Flowise-Evaluation': 'true'
|
||||
}
|
||||
if (apiKey) {
|
||||
headers['Authorization'] = `Bearer ${apiKey}`
|
||||
}
|
||||
let axiosConfig = {
|
||||
headers: headers
|
||||
}
|
||||
let startTime = performance.now()
|
||||
const runData: any = {}
|
||||
runData.chatflowId = chatflowId
|
||||
runData.startTime = startTime
|
||||
const postData: any = { question: item.input, evaluationRunId: uuid, evaluation: true }
|
||||
if (data.sessionId) {
|
||||
postData.overrideConfig = { sessionId: data.sessionId }
|
||||
}
|
||||
try {
|
||||
let response = await axios.post(`${this.baseURL}/api/v1/prediction/${chatflowId}`, postData, axiosConfig)
|
||||
let agentFlowMetrics: any[] = []
|
||||
if (response?.data?.agentFlowExecutedData) {
|
||||
for (let i = 0; i < response.data.agentFlowExecutedData.length; i++) {
|
||||
const agentFlowExecutedData = response.data.agentFlowExecutedData[i]
|
||||
const input_tokens = agentFlowExecutedData?.data?.output?.usageMetadata?.input_tokens || 0
|
||||
const output_tokens = agentFlowExecutedData?.data?.output?.usageMetadata?.output_tokens || 0
|
||||
const total_tokens =
|
||||
agentFlowExecutedData?.data?.output?.usageMetadata?.total_tokens || input_tokens + output_tokens
|
||||
const metrics: any = {
|
||||
promptTokens: input_tokens,
|
||||
completionTokens: output_tokens,
|
||||
totalTokens: total_tokens,
|
||||
provider:
|
||||
agentFlowExecutedData.data?.input?.llmModelConfig?.llmModel ||
|
||||
agentFlowExecutedData.data?.input?.agentModelConfig?.agentModel,
|
||||
model:
|
||||
agentFlowExecutedData.data?.input?.llmModelConfig?.modelName ||
|
||||
agentFlowExecutedData.data?.input?.agentModelConfig?.modelName,
|
||||
nodeLabel: agentFlowExecutedData?.nodeLabel,
|
||||
nodeId: agentFlowExecutedData?.nodeId
|
||||
}
|
||||
if (metrics.provider && metrics.model) {
|
||||
const modelConfig = await EvaluationRunner.getCostMetrics(metrics.provider, metrics.model)
|
||||
if (modelConfig) {
|
||||
metrics.cost_values = {
|
||||
input_cost: (modelConfig.cost_values.input_cost || 0) * (input_tokens / 1000),
|
||||
output_cost: (modelConfig.cost_values.output_cost || 0) * (output_tokens / 1000)
|
||||
}
|
||||
metrics.cost_values.total_cost = metrics.cost_values.input_cost + metrics.cost_values.output_cost
|
||||
}
|
||||
}
|
||||
agentFlowMetrics.push(metrics)
|
||||
}
|
||||
}
|
||||
const endTime = performance.now()
|
||||
const timeTaken = (endTime - startTime).toFixed(2)
|
||||
if (response?.data?.metrics) {
|
||||
runData.metrics = response.data.metrics
|
||||
runData.metrics.push({
|
||||
apiLatency: timeTaken
|
||||
})
|
||||
} else {
|
||||
runData.metrics = [
|
||||
{
|
||||
apiLatency: timeTaken
|
||||
}
|
||||
]
|
||||
}
|
||||
if (agentFlowMetrics.length > 0) {
|
||||
runData.nested_metrics = agentFlowMetrics
|
||||
}
|
||||
runData.status = 'complete'
|
||||
let resultText = ''
|
||||
if (response.data.text) resultText = response.data.text
|
||||
else if (response.data.json) resultText = '```json\n' + JSON.stringify(response.data.json, null, 2)
|
||||
else resultText = JSON.stringify(response.data, null, 2)
|
||||
|
||||
runData.actualOutput = resultText
|
||||
runData.latency = timeTaken
|
||||
runData.error = ''
|
||||
} catch (error: any) {
|
||||
runData.status = 'error'
|
||||
runData.actualOutput = ''
|
||||
runData.error = error?.response?.data?.message
|
||||
? error.response.data.message
|
||||
: error?.message
|
||||
? error.message
|
||||
: 'Unknown error'
|
||||
try {
|
||||
if (runData.error.indexOf('-') > -1) {
|
||||
// if there is a dash, remove all content before
|
||||
runData.error = 'Error: ' + runData.error.substr(runData.error.indexOf('-') + 1).trim()
|
||||
}
|
||||
} catch (error) {
|
||||
//stay silent
|
||||
}
|
||||
const endTime = performance.now()
|
||||
const timeTaken = (endTime - startTime).toFixed(2)
|
||||
runData.metrics = [
|
||||
{
|
||||
apiLatency: timeTaken
|
||||
}
|
||||
]
|
||||
runData.latency = timeTaken
|
||||
}
|
||||
runData.uuid = uuid
|
||||
returnData.rows[i].evaluations.push(runData)
|
||||
}
|
||||
return returnData
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
roots: ['<rootDir>/nodes'],
|
||||
transform: {
|
||||
'^.+\\.tsx?$': 'ts-jest'
|
||||
},
|
||||
testRegex: '(/__tests__/.*|(\\.|/)(test|spec))\\.tsx?$',
|
||||
moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'],
|
||||
verbose: true,
|
||||
testPathIgnorePatterns: ['/node_modules/', '/dist/'],
|
||||
moduleNameMapper: {
|
||||
'^../../../src/(.*)$': '<rootDir>/src/$1'
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,351 @@
|
|||
import { CommonType, ICommonObject, ICondition, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import removeMarkdown from 'remove-markdown'
|
||||
|
||||
class Condition_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
tags: string[]
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Condition'
|
||||
this.name = 'conditionAgentflow'
|
||||
this.version = 1.0
|
||||
this.type = 'Condition'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = `Split flows based on If Else conditions`
|
||||
this.baseClasses = [this.type]
|
||||
this.color = '#FFB938'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Conditions',
|
||||
name: 'conditions',
|
||||
type: 'array',
|
||||
description: 'Values to compare',
|
||||
acceptVariable: true,
|
||||
default: [
|
||||
{
|
||||
type: 'string',
|
||||
value1: '',
|
||||
operation: 'equal',
|
||||
value2: ''
|
||||
}
|
||||
],
|
||||
array: [
|
||||
{
|
||||
label: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'String',
|
||||
name: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Number',
|
||||
name: 'number'
|
||||
},
|
||||
{
|
||||
label: 'Boolean',
|
||||
name: 'boolean'
|
||||
}
|
||||
],
|
||||
default: 'string'
|
||||
},
|
||||
/////////////////////////////////////// STRING ////////////////////////////////////////
|
||||
{
|
||||
label: 'Value 1',
|
||||
name: 'value1',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'First value to be compared with',
|
||||
acceptVariable: true,
|
||||
show: {
|
||||
'conditions[$index].type': 'string'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'Contains',
|
||||
name: 'contains'
|
||||
},
|
||||
{
|
||||
label: 'Ends With',
|
||||
name: 'endsWith'
|
||||
},
|
||||
{
|
||||
label: 'Equal',
|
||||
name: 'equal'
|
||||
},
|
||||
{
|
||||
label: 'Not Contains',
|
||||
name: 'notContains'
|
||||
},
|
||||
{
|
||||
label: 'Not Equal',
|
||||
name: 'notEqual'
|
||||
},
|
||||
{
|
||||
label: 'Regex',
|
||||
name: 'regex'
|
||||
},
|
||||
{
|
||||
label: 'Starts With',
|
||||
name: 'startsWith'
|
||||
},
|
||||
{
|
||||
label: 'Is Empty',
|
||||
name: 'isEmpty'
|
||||
},
|
||||
{
|
||||
label: 'Not Empty',
|
||||
name: 'notEmpty'
|
||||
}
|
||||
],
|
||||
default: 'equal',
|
||||
description: 'Type of operation',
|
||||
show: {
|
||||
'conditions[$index].type': 'string'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Value 2',
|
||||
name: 'value2',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'Second value to be compared with',
|
||||
acceptVariable: true,
|
||||
show: {
|
||||
'conditions[$index].type': 'string'
|
||||
},
|
||||
hide: {
|
||||
'conditions[$index].operation': ['isEmpty', 'notEmpty']
|
||||
}
|
||||
},
|
||||
/////////////////////////////////////// NUMBER ////////////////////////////////////////
|
||||
{
|
||||
label: 'Value 1',
|
||||
name: 'value1',
|
||||
type: 'number',
|
||||
default: '',
|
||||
description: 'First value to be compared with',
|
||||
acceptVariable: true,
|
||||
show: {
|
||||
'conditions[$index].type': 'number'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'Smaller',
|
||||
name: 'smaller'
|
||||
},
|
||||
{
|
||||
label: 'Smaller Equal',
|
||||
name: 'smallerEqual'
|
||||
},
|
||||
{
|
||||
label: 'Equal',
|
||||
name: 'equal'
|
||||
},
|
||||
{
|
||||
label: 'Not Equal',
|
||||
name: 'notEqual'
|
||||
},
|
||||
{
|
||||
label: 'Larger',
|
||||
name: 'larger'
|
||||
},
|
||||
{
|
||||
label: 'Larger Equal',
|
||||
name: 'largerEqual'
|
||||
},
|
||||
{
|
||||
label: 'Is Empty',
|
||||
name: 'isEmpty'
|
||||
},
|
||||
{
|
||||
label: 'Not Empty',
|
||||
name: 'notEmpty'
|
||||
}
|
||||
],
|
||||
default: 'equal',
|
||||
description: 'Type of operation',
|
||||
show: {
|
||||
'conditions[$index].type': 'number'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Value 2',
|
||||
name: 'value2',
|
||||
type: 'number',
|
||||
default: 0,
|
||||
description: 'Second value to be compared with',
|
||||
acceptVariable: true,
|
||||
show: {
|
||||
'conditions[$index].type': 'number'
|
||||
}
|
||||
},
|
||||
/////////////////////////////////////// BOOLEAN ////////////////////////////////////////
|
||||
{
|
||||
label: 'Value 1',
|
||||
name: 'value1',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'First value to be compared with',
|
||||
show: {
|
||||
'conditions[$index].type': 'boolean'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'Equal',
|
||||
name: 'equal'
|
||||
},
|
||||
{
|
||||
label: 'Not Equal',
|
||||
name: 'notEqual'
|
||||
}
|
||||
],
|
||||
default: 'equal',
|
||||
description: 'Type of operation',
|
||||
show: {
|
||||
'conditions[$index].type': 'boolean'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Value 2',
|
||||
name: 'value2',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Second value to be compared with',
|
||||
show: {
|
||||
'conditions[$index].type': 'boolean'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: '0',
|
||||
name: '0',
|
||||
description: 'Condition 0'
|
||||
},
|
||||
{
|
||||
label: '1',
|
||||
name: '1',
|
||||
description: 'Else'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
|
||||
const compareOperationFunctions: {
|
||||
[key: string]: (value1: CommonType, value2: CommonType) => boolean
|
||||
} = {
|
||||
contains: (value1: CommonType, value2: CommonType) => (value1 || '').toString().includes((value2 || '').toString()),
|
||||
notContains: (value1: CommonType, value2: CommonType) => !(value1 || '').toString().includes((value2 || '').toString()),
|
||||
endsWith: (value1: CommonType, value2: CommonType) => (value1 as string).endsWith(value2 as string),
|
||||
equal: (value1: CommonType, value2: CommonType) => value1 === value2,
|
||||
notEqual: (value1: CommonType, value2: CommonType) => value1 !== value2,
|
||||
larger: (value1: CommonType, value2: CommonType) => (Number(value1) || 0) > (Number(value2) || 0),
|
||||
largerEqual: (value1: CommonType, value2: CommonType) => (Number(value1) || 0) >= (Number(value2) || 0),
|
||||
smaller: (value1: CommonType, value2: CommonType) => (Number(value1) || 0) < (Number(value2) || 0),
|
||||
smallerEqual: (value1: CommonType, value2: CommonType) => (Number(value1) || 0) <= (Number(value2) || 0),
|
||||
startsWith: (value1: CommonType, value2: CommonType) => (value1 as string).startsWith(value2 as string),
|
||||
isEmpty: (value1: CommonType) => [undefined, null, ''].includes(value1 as string),
|
||||
notEmpty: (value1: CommonType) => ![undefined, null, ''].includes(value1 as string)
|
||||
}
|
||||
|
||||
const _conditions = nodeData.inputs?.conditions
|
||||
const conditions: ICondition[] = typeof _conditions === 'string' ? JSON.parse(_conditions) : _conditions
|
||||
const initialConditions = { ...conditions }
|
||||
|
||||
for (const condition of conditions) {
|
||||
const _value1 = condition.value1
|
||||
const _value2 = condition.value2
|
||||
const operation = condition.operation
|
||||
|
||||
let value1: CommonType
|
||||
let value2: CommonType
|
||||
|
||||
switch (condition.type) {
|
||||
case 'boolean':
|
||||
value1 = _value1
|
||||
value2 = _value2
|
||||
break
|
||||
case 'number':
|
||||
value1 = parseFloat(_value1 as string) || 0
|
||||
value2 = parseFloat(_value2 as string) || 0
|
||||
break
|
||||
default: // string
|
||||
value1 = removeMarkdown((_value1 as string) || '')
|
||||
value2 = removeMarkdown((_value2 as string) || '')
|
||||
}
|
||||
|
||||
const compareOperationResult = compareOperationFunctions[operation](value1, value2)
|
||||
if (compareOperationResult) {
|
||||
// find the matching condition
|
||||
const conditionIndex = conditions.findIndex((c) => JSON.stringify(c) === JSON.stringify(condition))
|
||||
// add isFulfilled to the condition
|
||||
if (conditionIndex > -1) {
|
||||
conditions[conditionIndex] = { ...condition, isFulfilled: true }
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// If no condition is fulfilled, add isFulfilled to the ELSE condition
|
||||
const dummyElseConditionData = {
|
||||
type: 'string',
|
||||
value1: '',
|
||||
operation: 'equal',
|
||||
value2: ''
|
||||
}
|
||||
if (!conditions.some((c) => c.isFulfilled)) {
|
||||
conditions.push({
|
||||
...dummyElseConditionData,
|
||||
isFulfilled: true
|
||||
})
|
||||
} else {
|
||||
conditions.push({
|
||||
...dummyElseConditionData,
|
||||
isFulfilled: false
|
||||
})
|
||||
}
|
||||
|
||||
const returnOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: { conditions: initialConditions },
|
||||
output: { conditions },
|
||||
state
|
||||
}
|
||||
|
||||
return returnOutput
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Condition_Agentflow }
|
||||
|
|
@ -0,0 +1,614 @@
|
|||
import { AnalyticHandler } from '../../../src/handler'
|
||||
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { AIMessageChunk, BaseMessageLike } from '@langchain/core/messages'
|
||||
import {
|
||||
getPastChatHistoryImageMessages,
|
||||
getUniqueImageMessages,
|
||||
processMessagesWithImages,
|
||||
replaceBase64ImagesWithFileReferences
|
||||
} from '../utils'
|
||||
import { CONDITION_AGENT_SYSTEM_PROMPT, DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt'
|
||||
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||
|
||||
class ConditionAgent_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
tags: string[]
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Condition Agent'
|
||||
this.name = 'conditionAgentAgentflow'
|
||||
this.version = 1.1
|
||||
this.type = 'ConditionAgent'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = `Utilize an agent to split flows based on dynamic conditions`
|
||||
this.baseClasses = [this.type]
|
||||
this.color = '#ff8fab'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Model',
|
||||
name: 'conditionAgentModel',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listModels',
|
||||
loadConfig: true
|
||||
},
|
||||
{
|
||||
label: 'Instructions',
|
||||
name: 'conditionAgentInstructions',
|
||||
type: 'string',
|
||||
description: 'A general instructions of what the condition agent should do',
|
||||
rows: 4,
|
||||
acceptVariable: true,
|
||||
placeholder: 'Determine if the user is interested in learning about AI'
|
||||
},
|
||||
{
|
||||
label: 'Input',
|
||||
name: 'conditionAgentInput',
|
||||
type: 'string',
|
||||
description: 'Input to be used for the condition agent',
|
||||
rows: 4,
|
||||
acceptVariable: true,
|
||||
default: '<p><span class="variable" data-type="mention" data-id="question" data-label="question">{{ question }}</span> </p>'
|
||||
},
|
||||
{
|
||||
label: 'Scenarios',
|
||||
name: 'conditionAgentScenarios',
|
||||
description: 'Define the scenarios that will be used as the conditions to split the flow',
|
||||
type: 'array',
|
||||
array: [
|
||||
{
|
||||
label: 'Scenario',
|
||||
name: 'scenario',
|
||||
type: 'string',
|
||||
placeholder: 'User is asking for a pizza'
|
||||
}
|
||||
],
|
||||
default: [
|
||||
{
|
||||
scenario: ''
|
||||
},
|
||||
{
|
||||
scenario: ''
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Override System Prompt',
|
||||
name: 'conditionAgentOverrideSystemPrompt',
|
||||
type: 'boolean',
|
||||
description: 'Override initial system prompt for Condition Agent',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Node System Prompt',
|
||||
name: 'conditionAgentSystemPrompt',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
default: CONDITION_AGENT_SYSTEM_PROMPT,
|
||||
description: 'Expert use only. Modifying this can significantly alter agent behavior. Leave default if unsure',
|
||||
show: {
|
||||
conditionAgentOverrideSystemPrompt: true
|
||||
}
|
||||
}
|
||||
/*{
|
||||
label: 'Enable Memory',
|
||||
name: 'conditionAgentEnableMemory',
|
||||
type: 'boolean',
|
||||
description: 'Enable memory for the conversation thread',
|
||||
default: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Memory Type',
|
||||
name: 'conditionAgentMemoryType',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'All Messages',
|
||||
name: 'allMessages',
|
||||
description: 'Retrieve all messages from the conversation'
|
||||
},
|
||||
{
|
||||
label: 'Window Size',
|
||||
name: 'windowSize',
|
||||
description: 'Uses a fixed window size to surface the last N messages'
|
||||
},
|
||||
{
|
||||
label: 'Conversation Summary',
|
||||
name: 'conversationSummary',
|
||||
description: 'Summarizes the whole conversation'
|
||||
},
|
||||
{
|
||||
label: 'Conversation Summary Buffer',
|
||||
name: 'conversationSummaryBuffer',
|
||||
description: 'Summarize conversations once token limit is reached. Default to 2000'
|
||||
}
|
||||
],
|
||||
optional: true,
|
||||
default: 'allMessages',
|
||||
show: {
|
||||
conditionAgentEnableMemory: true
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Window Size',
|
||||
name: 'conditionAgentMemoryWindowSize',
|
||||
type: 'number',
|
||||
default: '20',
|
||||
description: 'Uses a fixed window size to surface the last N messages',
|
||||
show: {
|
||||
conditionAgentMemoryType: 'windowSize'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Max Token Limit',
|
||||
name: 'conditionAgentMemoryMaxTokenLimit',
|
||||
type: 'number',
|
||||
default: '2000',
|
||||
description: 'Summarize conversations once token limit is reached. Default to 2000',
|
||||
show: {
|
||||
conditionAgentMemoryType: 'conversationSummaryBuffer'
|
||||
}
|
||||
}*/
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: '0',
|
||||
name: '0',
|
||||
description: 'Condition 0'
|
||||
},
|
||||
{
|
||||
label: '1',
|
||||
name: '1',
|
||||
description: 'Else'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
loadMethods = {
|
||||
async listModels(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const componentNodes = options.componentNodes as {
|
||||
[key: string]: INode
|
||||
}
|
||||
|
||||
const returnOptions: INodeOptionsValue[] = []
|
||||
for (const nodeName in componentNodes) {
|
||||
const componentNode = componentNodes[nodeName]
|
||||
if (componentNode.category === 'Chat Models') {
|
||||
if (componentNode.tags?.includes('LlamaIndex')) {
|
||||
continue
|
||||
}
|
||||
returnOptions.push({
|
||||
label: componentNode.label,
|
||||
name: nodeName,
|
||||
imageSrc: componentNode.icon
|
||||
})
|
||||
}
|
||||
}
|
||||
return returnOptions
|
||||
}
|
||||
}
|
||||
|
||||
private parseJsonMarkdown(jsonString: string): any {
|
||||
// Strip whitespace
|
||||
jsonString = jsonString.trim()
|
||||
const starts = ['```json', '```', '``', '`', '{']
|
||||
const ends = ['```', '``', '`', '}']
|
||||
|
||||
let startIndex = -1
|
||||
let endIndex = -1
|
||||
|
||||
// Find start of JSON
|
||||
for (const s of starts) {
|
||||
startIndex = jsonString.indexOf(s)
|
||||
if (startIndex !== -1) {
|
||||
if (jsonString[startIndex] !== '{') {
|
||||
startIndex += s.length
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Find end of JSON
|
||||
if (startIndex !== -1) {
|
||||
for (const e of ends) {
|
||||
endIndex = jsonString.lastIndexOf(e, jsonString.length)
|
||||
if (endIndex !== -1) {
|
||||
if (jsonString[endIndex] === '}') {
|
||||
endIndex += 1
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (startIndex !== -1 && endIndex !== -1 && startIndex < endIndex) {
|
||||
const extractedContent = jsonString.slice(startIndex, endIndex).trim()
|
||||
try {
|
||||
return JSON.parse(extractedContent)
|
||||
} catch (error) {
|
||||
throw new Error(`Invalid JSON object. Error: ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Could not find JSON block in the output.')
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, question: string, options: ICommonObject): Promise<any> {
|
||||
let llmIds: ICommonObject | undefined
|
||||
let analyticHandlers = options.analyticHandlers as AnalyticHandler
|
||||
|
||||
try {
|
||||
const abortController = options.abortController as AbortController
|
||||
|
||||
// Extract input parameters
|
||||
const model = nodeData.inputs?.conditionAgentModel as string
|
||||
const modelConfig = nodeData.inputs?.conditionAgentModelConfig as ICommonObject
|
||||
if (!model) {
|
||||
throw new Error('Model is required')
|
||||
}
|
||||
const conditionAgentInput = nodeData.inputs?.conditionAgentInput as string
|
||||
let input = conditionAgentInput || question
|
||||
const conditionAgentInstructions = nodeData.inputs?.conditionAgentInstructions as string
|
||||
const conditionAgentSystemPrompt = nodeData.inputs?.conditionAgentSystemPrompt as string
|
||||
const conditionAgentOverrideSystemPrompt = nodeData.inputs?.conditionAgentOverrideSystemPrompt as boolean
|
||||
let systemPrompt = CONDITION_AGENT_SYSTEM_PROMPT
|
||||
if (conditionAgentSystemPrompt && conditionAgentOverrideSystemPrompt) {
|
||||
systemPrompt = conditionAgentSystemPrompt
|
||||
}
|
||||
|
||||
// Extract memory and configuration options
|
||||
const enableMemory = nodeData.inputs?.conditionAgentEnableMemory as boolean
|
||||
const memoryType = nodeData.inputs?.conditionAgentMemoryType as string
|
||||
const _conditionAgentScenarios = nodeData.inputs?.conditionAgentScenarios as { scenario: string }[]
|
||||
|
||||
// Extract runtime state and history
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
const pastChatHistory = (options.pastChatHistory as BaseMessageLike[]) ?? []
|
||||
const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? []
|
||||
|
||||
// Initialize the LLM model instance
|
||||
const nodeInstanceFilePath = options.componentNodes[model].filePath as string
|
||||
const nodeModule = await import(nodeInstanceFilePath)
|
||||
const newLLMNodeInstance = new nodeModule.nodeClass()
|
||||
const newNodeData = {
|
||||
...nodeData,
|
||||
credential: modelConfig['FLOWISE_CREDENTIAL_ID'],
|
||||
inputs: {
|
||||
...nodeData.inputs,
|
||||
...modelConfig
|
||||
}
|
||||
}
|
||||
let llmNodeInstance = (await newLLMNodeInstance.init(newNodeData, '', options)) as BaseChatModel
|
||||
|
||||
const isStructuredOutput =
|
||||
_conditionAgentScenarios && Array.isArray(_conditionAgentScenarios) && _conditionAgentScenarios.length > 0
|
||||
if (!isStructuredOutput) {
|
||||
throw new Error('Scenarios are required')
|
||||
}
|
||||
|
||||
// Prepare messages array
|
||||
const messages: BaseMessageLike[] = [
|
||||
{
|
||||
role: 'system',
|
||||
content: systemPrompt
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: `{"input": "Hello", "scenarios": ["user is asking about AI", "user is not asking about AI"], "instruction": "Your task is to check if the user is asking about AI."}`
|
||||
},
|
||||
{
|
||||
role: 'assistant',
|
||||
content: `\`\`\`json\n{"output": "user is not asking about AI"}\n\`\`\``
|
||||
}
|
||||
]
|
||||
// Use to store messages with image file references as we do not want to store the base64 data into database
|
||||
let runtimeImageMessagesWithFileRef: BaseMessageLike[] = []
|
||||
// Use to keep track of past messages with image file references
|
||||
let pastImageMessagesWithFileRef: BaseMessageLike[] = []
|
||||
|
||||
input = `{"input": ${input}, "scenarios": ${JSON.stringify(
|
||||
_conditionAgentScenarios.map((scenario) => scenario.scenario)
|
||||
)}, "instruction": ${conditionAgentInstructions}}`
|
||||
|
||||
// Handle memory management if enabled
|
||||
if (enableMemory) {
|
||||
await this.handleMemory({
|
||||
messages,
|
||||
memoryType,
|
||||
pastChatHistory,
|
||||
runtimeChatHistory,
|
||||
llmNodeInstance,
|
||||
nodeData,
|
||||
input,
|
||||
abortController,
|
||||
options,
|
||||
modelConfig,
|
||||
runtimeImageMessagesWithFileRef,
|
||||
pastImageMessagesWithFileRef
|
||||
})
|
||||
} else {
|
||||
/*
|
||||
* If this is the first node:
|
||||
* - Add images to messages if exist
|
||||
*/
|
||||
if (!runtimeChatHistory.length && options.uploads) {
|
||||
const imageContents = await getUniqueImageMessages(options, messages, modelConfig)
|
||||
if (imageContents) {
|
||||
const { imageMessageWithBase64, imageMessageWithFileRef } = imageContents
|
||||
messages.push(imageMessageWithBase64)
|
||||
runtimeImageMessagesWithFileRef.push(imageMessageWithFileRef)
|
||||
}
|
||||
}
|
||||
messages.push({
|
||||
role: 'user',
|
||||
content: input
|
||||
})
|
||||
}
|
||||
|
||||
// Initialize response and determine if streaming is possible
|
||||
let response: AIMessageChunk = new AIMessageChunk('')
|
||||
|
||||
// Start analytics
|
||||
if (analyticHandlers && options.parentTraceIds) {
|
||||
const llmLabel = options?.componentNodes?.[model]?.label || model
|
||||
llmIds = await analyticHandlers.onLLMStart(llmLabel, messages, options.parentTraceIds)
|
||||
}
|
||||
|
||||
// Track execution time
|
||||
const startTime = Date.now()
|
||||
|
||||
response = await llmNodeInstance.invoke(messages, { signal: abortController?.signal })
|
||||
|
||||
// Calculate execution time
|
||||
const endTime = Date.now()
|
||||
const timeDelta = endTime - startTime
|
||||
|
||||
// End analytics tracking
|
||||
if (analyticHandlers && llmIds) {
|
||||
await analyticHandlers.onLLMEnd(
|
||||
llmIds,
|
||||
typeof response.content === 'string' ? response.content : JSON.stringify(response.content)
|
||||
)
|
||||
}
|
||||
|
||||
let calledOutputName: string
|
||||
try {
|
||||
const parsedResponse = this.parseJsonMarkdown(response.content as string)
|
||||
if (!parsedResponse.output || typeof parsedResponse.output !== 'string') {
|
||||
throw new Error('LLM response is missing the "output" key or it is not a string.')
|
||||
}
|
||||
calledOutputName = parsedResponse.output
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to parse a valid scenario from the LLM's response. Please check if the model is capable of following JSON output instructions. Raw LLM Response: "${
|
||||
response.content as string
|
||||
}"`
|
||||
)
|
||||
}
|
||||
|
||||
// Clean up empty inputs
|
||||
for (const key in nodeData.inputs) {
|
||||
if (nodeData.inputs[key] === '') {
|
||||
delete nodeData.inputs[key]
|
||||
}
|
||||
}
|
||||
|
||||
// Find the first exact match
|
||||
const matchedScenarioIndex = _conditionAgentScenarios.findIndex(
|
||||
(scenario) => calledOutputName.toLowerCase() === scenario.scenario.toLowerCase()
|
||||
)
|
||||
|
||||
const conditions = _conditionAgentScenarios.map((scenario, index) => {
|
||||
return {
|
||||
output: scenario.scenario,
|
||||
isFulfilled: index === matchedScenarioIndex
|
||||
}
|
||||
})
|
||||
|
||||
// Replace the actual messages array with one that includes the file references for images instead of base64 data
|
||||
const messagesWithFileReferences = replaceBase64ImagesWithFileReferences(
|
||||
messages,
|
||||
runtimeImageMessagesWithFileRef,
|
||||
pastImageMessagesWithFileRef
|
||||
)
|
||||
|
||||
// Only add to runtime chat history if this is the first node
|
||||
const inputMessages = []
|
||||
if (!runtimeChatHistory.length) {
|
||||
if (runtimeImageMessagesWithFileRef.length) {
|
||||
inputMessages.push(...runtimeImageMessagesWithFileRef)
|
||||
}
|
||||
if (input && typeof input === 'string') {
|
||||
inputMessages.push({ role: 'user', content: question })
|
||||
}
|
||||
}
|
||||
|
||||
const returnOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: { messages: messagesWithFileReferences },
|
||||
output: {
|
||||
conditions,
|
||||
content: typeof response.content === 'string' ? response.content : JSON.stringify(response.content),
|
||||
timeMetadata: {
|
||||
start: startTime,
|
||||
end: endTime,
|
||||
delta: timeDelta
|
||||
}
|
||||
},
|
||||
state,
|
||||
chatHistory: [...inputMessages]
|
||||
}
|
||||
|
||||
return returnOutput
|
||||
} catch (error) {
|
||||
if (options.analyticHandlers && llmIds) {
|
||||
await options.analyticHandlers.onLLMError(llmIds, error instanceof Error ? error.message : String(error))
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message === 'Aborted') {
|
||||
throw error
|
||||
}
|
||||
throw new Error(`Error in Condition Agent node: ${error instanceof Error ? error.message : String(error)}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles memory management based on the specified memory type
|
||||
*/
|
||||
private async handleMemory({
|
||||
messages,
|
||||
memoryType,
|
||||
pastChatHistory,
|
||||
runtimeChatHistory,
|
||||
llmNodeInstance,
|
||||
nodeData,
|
||||
input,
|
||||
abortController,
|
||||
options,
|
||||
modelConfig,
|
||||
runtimeImageMessagesWithFileRef,
|
||||
pastImageMessagesWithFileRef
|
||||
}: {
|
||||
messages: BaseMessageLike[]
|
||||
memoryType: string
|
||||
pastChatHistory: BaseMessageLike[]
|
||||
runtimeChatHistory: BaseMessageLike[]
|
||||
llmNodeInstance: BaseChatModel
|
||||
nodeData: INodeData
|
||||
input: string
|
||||
abortController: AbortController
|
||||
options: ICommonObject
|
||||
modelConfig: ICommonObject
|
||||
runtimeImageMessagesWithFileRef: BaseMessageLike[]
|
||||
pastImageMessagesWithFileRef: BaseMessageLike[]
|
||||
}): Promise<void> {
|
||||
const { updatedPastMessages, transformedPastMessages } = await getPastChatHistoryImageMessages(pastChatHistory, options)
|
||||
pastChatHistory = updatedPastMessages
|
||||
pastImageMessagesWithFileRef.push(...transformedPastMessages)
|
||||
|
||||
let pastMessages = [...pastChatHistory, ...runtimeChatHistory]
|
||||
if (!runtimeChatHistory.length) {
|
||||
/*
|
||||
* If this is the first node:
|
||||
* - Add images to messages if exist
|
||||
*/
|
||||
if (options.uploads) {
|
||||
const imageContents = await getUniqueImageMessages(options, messages, modelConfig)
|
||||
if (imageContents) {
|
||||
const { imageMessageWithBase64, imageMessageWithFileRef } = imageContents
|
||||
pastMessages.push(imageMessageWithBase64)
|
||||
runtimeImageMessagesWithFileRef.push(imageMessageWithFileRef)
|
||||
}
|
||||
}
|
||||
}
|
||||
const { updatedMessages, transformedMessages } = await processMessagesWithImages(pastMessages, options)
|
||||
pastMessages = updatedMessages
|
||||
pastImageMessagesWithFileRef.push(...transformedMessages)
|
||||
|
||||
if (pastMessages.length > 0) {
|
||||
if (memoryType === 'windowSize') {
|
||||
// Window memory: Keep the last N messages
|
||||
const windowSize = nodeData.inputs?.conditionAgentMemoryWindowSize as number
|
||||
const windowedMessages = pastMessages.slice(-windowSize * 2)
|
||||
messages.push(...windowedMessages)
|
||||
} else if (memoryType === 'conversationSummary') {
|
||||
// Summary memory: Summarize all past messages
|
||||
const summary = await llmNodeInstance.invoke(
|
||||
[
|
||||
{
|
||||
role: 'user',
|
||||
content: DEFAULT_SUMMARIZER_TEMPLATE.replace(
|
||||
'{conversation}',
|
||||
pastMessages.map((msg: any) => `${msg.role}: ${msg.content}`).join('\n')
|
||||
)
|
||||
}
|
||||
],
|
||||
{ signal: abortController?.signal }
|
||||
)
|
||||
messages.push({ role: 'assistant', content: summary.content as string })
|
||||
} else if (memoryType === 'conversationSummaryBuffer') {
|
||||
// Summary buffer: Summarize messages that exceed token limit
|
||||
await this.handleSummaryBuffer(messages, pastMessages, llmNodeInstance, nodeData, abortController)
|
||||
} else {
|
||||
// Default: Use all messages
|
||||
messages.push(...pastMessages)
|
||||
}
|
||||
}
|
||||
|
||||
messages.push({
|
||||
role: 'user',
|
||||
content: input
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles conversation summary buffer memory type
|
||||
*/
|
||||
private async handleSummaryBuffer(
|
||||
messages: BaseMessageLike[],
|
||||
pastMessages: BaseMessageLike[],
|
||||
llmNodeInstance: BaseChatModel,
|
||||
nodeData: INodeData,
|
||||
abortController: AbortController
|
||||
): Promise<void> {
|
||||
const maxTokenLimit = (nodeData.inputs?.conditionAgentMemoryMaxTokenLimit as number) || 2000
|
||||
|
||||
// Convert past messages to a format suitable for token counting
|
||||
const messagesString = pastMessages.map((msg: any) => `${msg.role}: ${msg.content}`).join('\n')
|
||||
const tokenCount = await llmNodeInstance.getNumTokens(messagesString)
|
||||
|
||||
if (tokenCount > maxTokenLimit) {
|
||||
// Calculate how many messages to summarize (messages that exceed the token limit)
|
||||
let currBufferLength = tokenCount
|
||||
const messagesToSummarize = []
|
||||
const remainingMessages = [...pastMessages]
|
||||
|
||||
// Remove messages from the beginning until we're under the token limit
|
||||
while (currBufferLength > maxTokenLimit && remainingMessages.length > 0) {
|
||||
const poppedMessage = remainingMessages.shift()
|
||||
if (poppedMessage) {
|
||||
messagesToSummarize.push(poppedMessage)
|
||||
// Recalculate token count for remaining messages
|
||||
const remainingMessagesString = remainingMessages.map((msg: any) => `${msg.role}: ${msg.content}`).join('\n')
|
||||
currBufferLength = await llmNodeInstance.getNumTokens(remainingMessagesString)
|
||||
}
|
||||
}
|
||||
|
||||
// Summarize the messages that were removed
|
||||
const messagesToSummarizeString = messagesToSummarize.map((msg: any) => `${msg.role}: ${msg.content}`).join('\n')
|
||||
|
||||
const summary = await llmNodeInstance.invoke(
|
||||
[
|
||||
{
|
||||
role: 'user',
|
||||
content: DEFAULT_SUMMARIZER_TEMPLATE.replace('{conversation}', messagesToSummarizeString)
|
||||
}
|
||||
],
|
||||
{ signal: abortController?.signal }
|
||||
)
|
||||
|
||||
// Add summary as a system message at the beginning, then add remaining messages
|
||||
messages.push({ role: 'system', content: `Previous conversation summary: ${summary.content}` })
|
||||
messages.push(...remainingMessages)
|
||||
} else {
|
||||
// If under token limit, use all messages
|
||||
messages.push(...pastMessages)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ConditionAgent_Agentflow }
|
||||
|
|
@ -0,0 +1,219 @@
|
|||
import { DataSource } from 'typeorm'
|
||||
import {
|
||||
ICommonObject,
|
||||
IDatabaseEntity,
|
||||
INode,
|
||||
INodeData,
|
||||
INodeOptionsValue,
|
||||
INodeParams,
|
||||
IServerSideEventStreamer
|
||||
} from '../../../src/Interface'
|
||||
import { getVars, executeJavaScriptCode, createCodeExecutionSandbox, processTemplateVariables } from '../../../src/utils'
|
||||
import { updateFlowState } from '../utils'
|
||||
|
||||
interface ICustomFunctionInputVariables {
|
||||
variableName: string
|
||||
variableValue: string
|
||||
}
|
||||
|
||||
const exampleFunc = `/*
|
||||
* You can use any libraries imported in Flowise
|
||||
* You can use properties specified in Input Variables with the prefix $. For example: $foo
|
||||
* You can get default flow config: $flow.sessionId, $flow.chatId, $flow.chatflowId, $flow.input, $flow.state
|
||||
* You can get global variables: $vars.<variable-name>
|
||||
* Must return a string value at the end of function
|
||||
*/
|
||||
|
||||
const fetch = require('node-fetch');
|
||||
const url = 'https://api.open-meteo.com/v1/forecast?latitude=52.52&longitude=13.41¤t_weather=true';
|
||||
const options = {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
};
|
||||
try {
|
||||
const response = await fetch(url, options);
|
||||
const text = await response.text();
|
||||
return text;
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return '';
|
||||
}`
|
||||
|
||||
class CustomFunction_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
hideOutput: boolean
|
||||
hint: string
|
||||
baseClasses: string[]
|
||||
documentation?: string
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Custom Function'
|
||||
this.name = 'customFunctionAgentflow'
|
||||
this.version = 1.1
|
||||
this.type = 'CustomFunction'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Execute custom function'
|
||||
this.baseClasses = [this.type]
|
||||
this.color = '#E4B7FF'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Input Variables',
|
||||
name: 'customFunctionInputVariables',
|
||||
description: 'Input variables can be used in the function with prefix $. For example: $foo',
|
||||
type: 'array',
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Variable Name',
|
||||
name: 'variableName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Variable Value',
|
||||
name: 'variableValue',
|
||||
type: 'string',
|
||||
acceptVariable: true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Javascript Function',
|
||||
name: 'customFunctionJavascriptFunction',
|
||||
type: 'code',
|
||||
codeExample: exampleFunc,
|
||||
description: 'The function to execute. Must return a string or an object that can be converted to a string.'
|
||||
},
|
||||
{
|
||||
label: 'Update Flow State',
|
||||
name: 'customFunctionUpdateState',
|
||||
description: 'Update runtime state during the execution of the workflow',
|
||||
type: 'array',
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
name: 'value',
|
||||
type: 'string',
|
||||
acceptVariable: true,
|
||||
acceptNodeOutputAsVariable: true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
loadMethods = {
|
||||
async listRuntimeStateKeys(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const previousNodes = options.previousNodes as ICommonObject[]
|
||||
const startAgentflowNode = previousNodes.find((node) => node.name === 'startAgentflow')
|
||||
const state = startAgentflowNode?.inputs?.startState as ICommonObject[]
|
||||
return state.map((item) => ({ label: item.key, name: item.key }))
|
||||
}
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||
const javascriptFunction = nodeData.inputs?.customFunctionJavascriptFunction as string
|
||||
const functionInputVariables = (nodeData.inputs?.customFunctionInputVariables as ICustomFunctionInputVariables[]) ?? []
|
||||
const _customFunctionUpdateState = nodeData.inputs?.customFunctionUpdateState
|
||||
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
const chatId = options.chatId as string
|
||||
const isLastNode = options.isLastNode as boolean
|
||||
const isStreamable = isLastNode && options.sseStreamer !== undefined
|
||||
|
||||
const appDataSource = options.appDataSource as DataSource
|
||||
const databaseEntities = options.databaseEntities as IDatabaseEntity
|
||||
|
||||
const variables = await getVars(appDataSource, databaseEntities, nodeData, options)
|
||||
const flow = {
|
||||
input,
|
||||
state,
|
||||
chatflowId: options.chatflowid,
|
||||
sessionId: options.sessionId,
|
||||
chatId: options.chatId,
|
||||
rawOutput: options.postProcessing?.rawOutput || '',
|
||||
chatHistory: options.postProcessing?.chatHistory || [],
|
||||
sourceDocuments: options.postProcessing?.sourceDocuments,
|
||||
usedTools: options.postProcessing?.usedTools,
|
||||
artifacts: options.postProcessing?.artifacts,
|
||||
fileAnnotations: options.postProcessing?.fileAnnotations
|
||||
}
|
||||
|
||||
// Create additional sandbox variables for custom function inputs
|
||||
const additionalSandbox: ICommonObject = {}
|
||||
for (const item of functionInputVariables) {
|
||||
const variableName = item.variableName
|
||||
const variableValue = item.variableValue
|
||||
additionalSandbox[`$${variableName}`] = variableValue
|
||||
}
|
||||
|
||||
const sandbox = createCodeExecutionSandbox(input, variables, flow, additionalSandbox)
|
||||
|
||||
// Setup streaming function if needed
|
||||
const streamOutput = isStreamable
|
||||
? (output: string) => {
|
||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer
|
||||
sseStreamer.streamTokenEvent(chatId, output)
|
||||
}
|
||||
: undefined
|
||||
|
||||
try {
|
||||
const response = await executeJavaScriptCode(javascriptFunction, sandbox, {
|
||||
libraries: ['axios'],
|
||||
streamOutput
|
||||
})
|
||||
|
||||
let finalOutput = response
|
||||
if (typeof response === 'object') {
|
||||
finalOutput = JSON.stringify(response, null, 2)
|
||||
}
|
||||
|
||||
// Update flow state if needed
|
||||
let newState = { ...state }
|
||||
if (_customFunctionUpdateState && Array.isArray(_customFunctionUpdateState) && _customFunctionUpdateState.length > 0) {
|
||||
newState = updateFlowState(state, _customFunctionUpdateState)
|
||||
}
|
||||
|
||||
newState = processTemplateVariables(newState, finalOutput)
|
||||
|
||||
const returnOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: {
|
||||
inputVariables: functionInputVariables,
|
||||
code: javascriptFunction
|
||||
},
|
||||
output: {
|
||||
content: finalOutput
|
||||
},
|
||||
state: newState
|
||||
}
|
||||
|
||||
return returnOutput
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: CustomFunction_Agentflow }
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
import { ICommonObject, INode, INodeData, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
||||
|
||||
class DirectReply_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
hideOutput: boolean
|
||||
hint: string
|
||||
baseClasses: string[]
|
||||
documentation?: string
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Direct Reply'
|
||||
this.name = 'directReplyAgentflow'
|
||||
this.version = 1.0
|
||||
this.type = 'DirectReply'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Directly reply to the user with a message'
|
||||
this.baseClasses = [this.type]
|
||||
this.color = '#4DDBBB'
|
||||
this.hideOutput = true
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Message',
|
||||
name: 'directReplyMessage',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
acceptVariable: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const directReplyMessage = nodeData.inputs?.directReplyMessage as string
|
||||
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
const chatId = options.chatId as string
|
||||
const isLastNode = options.isLastNode as boolean
|
||||
const isStreamable = isLastNode && options.sseStreamer !== undefined
|
||||
|
||||
if (isStreamable) {
|
||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer
|
||||
sseStreamer.streamTokenEvent(chatId, directReplyMessage)
|
||||
}
|
||||
|
||||
const returnOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: {},
|
||||
output: {
|
||||
content: directReplyMessage
|
||||
},
|
||||
state
|
||||
}
|
||||
|
||||
return returnOutput
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: DirectReply_Agentflow }
|
||||
|
|
@ -0,0 +1,296 @@
|
|||
import {
|
||||
ICommonObject,
|
||||
IDatabaseEntity,
|
||||
INode,
|
||||
INodeData,
|
||||
INodeOptionsValue,
|
||||
INodeParams,
|
||||
IServerSideEventStreamer
|
||||
} from '../../../src/Interface'
|
||||
import axios, { AxiosRequestConfig } from 'axios'
|
||||
import { getCredentialData, getCredentialParam, processTemplateVariables, parseJsonBody } from '../../../src/utils'
|
||||
import { DataSource } from 'typeorm'
|
||||
import { BaseMessageLike } from '@langchain/core/messages'
|
||||
import { updateFlowState } from '../utils'
|
||||
|
||||
class ExecuteFlow_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
baseClasses: string[]
|
||||
documentation?: string
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Execute Flow'
|
||||
this.name = 'executeFlowAgentflow'
|
||||
this.version = 1.2
|
||||
this.type = 'ExecuteFlow'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Execute another flow'
|
||||
this.baseClasses = [this.type]
|
||||
this.color = '#a3b18a'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['chatflowApi'],
|
||||
optional: true
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Select Flow',
|
||||
name: 'executeFlowSelectedFlow',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listFlows'
|
||||
},
|
||||
{
|
||||
label: 'Input',
|
||||
name: 'executeFlowInput',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
acceptVariable: true
|
||||
},
|
||||
{
|
||||
label: 'Override Config',
|
||||
name: 'executeFlowOverrideConfig',
|
||||
description: 'Override the config passed to the flow',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
acceptVariable: true
|
||||
},
|
||||
{
|
||||
label: 'Base URL',
|
||||
name: 'executeFlowBaseURL',
|
||||
type: 'string',
|
||||
description:
|
||||
'Base URL to Flowise. By default, it is the URL of the incoming request. Useful when you need to execute flow through an alternative route.',
|
||||
placeholder: 'http://localhost:3000',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Return Response As',
|
||||
name: 'executeFlowReturnResponseAs',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'User Message',
|
||||
name: 'userMessage'
|
||||
},
|
||||
{
|
||||
label: 'Assistant Message',
|
||||
name: 'assistantMessage'
|
||||
}
|
||||
],
|
||||
default: 'userMessage'
|
||||
},
|
||||
{
|
||||
label: 'Update Flow State',
|
||||
name: 'executeFlowUpdateState',
|
||||
description: 'Update runtime state during the execution of the workflow',
|
||||
type: 'array',
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
name: 'value',
|
||||
type: 'string',
|
||||
acceptVariable: true,
|
||||
acceptNodeOutputAsVariable: true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
loadMethods = {
|
||||
async listFlows(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const returnData: INodeOptionsValue[] = []
|
||||
|
||||
const appDataSource = options.appDataSource as DataSource
|
||||
const databaseEntities = options.databaseEntities as IDatabaseEntity
|
||||
if (appDataSource === undefined || !appDataSource) {
|
||||
return returnData
|
||||
}
|
||||
|
||||
const searchOptions = options.searchOptions || {}
|
||||
const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).findBy(searchOptions)
|
||||
|
||||
for (let i = 0; i < chatflows.length; i += 1) {
|
||||
let cfType = 'Chatflow'
|
||||
if (chatflows[i].type === 'AGENTFLOW') {
|
||||
cfType = 'Agentflow V2'
|
||||
} else if (chatflows[i].type === 'MULTIAGENT') {
|
||||
cfType = 'Agentflow V1'
|
||||
}
|
||||
const data = {
|
||||
label: chatflows[i].name,
|
||||
name: chatflows[i].id,
|
||||
description: cfType
|
||||
} as INodeOptionsValue
|
||||
returnData.push(data)
|
||||
}
|
||||
|
||||
// order by label
|
||||
return returnData.sort((a, b) => a.label.localeCompare(b.label))
|
||||
},
|
||||
async listRuntimeStateKeys(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const previousNodes = options.previousNodes as ICommonObject[]
|
||||
const startAgentflowNode = previousNodes.find((node) => node.name === 'startAgentflow')
|
||||
const state = startAgentflowNode?.inputs?.startState as ICommonObject[]
|
||||
return state.map((item) => ({ label: item.key, name: item.key }))
|
||||
}
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const baseURL = (nodeData.inputs?.executeFlowBaseURL as string) || (options.baseURL as string)
|
||||
const selectedFlowId = nodeData.inputs?.executeFlowSelectedFlow as string
|
||||
const flowInput = nodeData.inputs?.executeFlowInput as string
|
||||
const returnResponseAs = nodeData.inputs?.executeFlowReturnResponseAs as string
|
||||
const _executeFlowUpdateState = nodeData.inputs?.executeFlowUpdateState
|
||||
|
||||
let overrideConfig = nodeData.inputs?.executeFlowOverrideConfig
|
||||
if (typeof overrideConfig === 'string' && overrideConfig.startsWith('{') && overrideConfig.endsWith('}')) {
|
||||
try {
|
||||
overrideConfig = parseJsonBody(overrideConfig)
|
||||
} catch (parseError) {
|
||||
throw new Error(`Invalid JSON in executeFlowOverrideConfig: ${parseError.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? []
|
||||
const isLastNode = options.isLastNode as boolean
|
||||
const sseStreamer: IServerSideEventStreamer | undefined = options.sseStreamer
|
||||
|
||||
try {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const chatflowApiKey = getCredentialParam('chatflowApiKey', credentialData, nodeData)
|
||||
|
||||
if (selectedFlowId === options.chatflowid) throw new Error('Cannot call the same agentflow!')
|
||||
|
||||
let headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
'flowise-tool': 'true'
|
||||
}
|
||||
if (chatflowApiKey) headers = { ...headers, Authorization: `Bearer ${chatflowApiKey}` }
|
||||
|
||||
const finalUrl = `${baseURL}/api/v1/prediction/${selectedFlowId}`
|
||||
const requestConfig: AxiosRequestConfig = {
|
||||
method: 'POST',
|
||||
url: finalUrl,
|
||||
headers,
|
||||
data: {
|
||||
question: flowInput,
|
||||
chatId: options.chatId,
|
||||
overrideConfig
|
||||
}
|
||||
}
|
||||
|
||||
const response = await axios(requestConfig)
|
||||
|
||||
let resultText = ''
|
||||
if (response.data.text) resultText = response.data.text
|
||||
else if (response.data.json) resultText = '```json\n' + JSON.stringify(response.data.json, null, 2)
|
||||
else resultText = JSON.stringify(response.data, null, 2)
|
||||
|
||||
if (isLastNode && sseStreamer) {
|
||||
sseStreamer.streamTokenEvent(options.chatId, resultText)
|
||||
}
|
||||
|
||||
// Update flow state if needed
|
||||
let newState = { ...state }
|
||||
if (_executeFlowUpdateState && Array.isArray(_executeFlowUpdateState) && _executeFlowUpdateState.length > 0) {
|
||||
newState = updateFlowState(state, _executeFlowUpdateState)
|
||||
}
|
||||
|
||||
// Process template variables in state
|
||||
newState = processTemplateVariables(newState, resultText)
|
||||
|
||||
// Only add to runtime chat history if this is the first node
|
||||
const inputMessages = []
|
||||
if (!runtimeChatHistory.length) {
|
||||
inputMessages.push({ role: 'user', content: flowInput })
|
||||
}
|
||||
|
||||
let returnRole = 'user'
|
||||
if (returnResponseAs === 'assistantMessage') {
|
||||
returnRole = 'assistant'
|
||||
}
|
||||
|
||||
const returnOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: {
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: flowInput
|
||||
}
|
||||
]
|
||||
},
|
||||
output: {
|
||||
content: resultText
|
||||
},
|
||||
state: newState,
|
||||
chatHistory: [
|
||||
...inputMessages,
|
||||
{
|
||||
role: returnRole,
|
||||
content: resultText,
|
||||
name: nodeData?.label ? nodeData?.label.toLowerCase().replace(/\s/g, '_').trim() : nodeData?.id
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
return returnOutput
|
||||
} catch (error) {
|
||||
console.error('ExecuteFlow Error:', error)
|
||||
|
||||
// Format error response
|
||||
const errorResponse: any = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: {
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: flowInput
|
||||
}
|
||||
]
|
||||
},
|
||||
error: {
|
||||
name: error.name || 'Error',
|
||||
message: error.message || 'An error occurred during the execution of the flow'
|
||||
},
|
||||
state
|
||||
}
|
||||
|
||||
// Add more error details if available
|
||||
if (error.response) {
|
||||
errorResponse.error.status = error.response.status
|
||||
errorResponse.error.statusText = error.response.statusText
|
||||
errorResponse.error.data = error.response.data
|
||||
errorResponse.error.headers = error.response.headers
|
||||
}
|
||||
|
||||
throw new Error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ExecuteFlow_Agentflow }
|
||||
|
|
@ -0,0 +1,380 @@
|
|||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { AxiosRequestConfig, Method, ResponseType } from 'axios'
|
||||
import FormData from 'form-data'
|
||||
import * as querystring from 'querystring'
|
||||
import { getCredentialData, getCredentialParam, parseJsonBody } from '../../../src/utils'
|
||||
import { secureAxiosRequest } from '../../../src/httpSecurity'
|
||||
|
||||
class HTTP_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
baseClasses: string[]
|
||||
documentation?: string
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'HTTP'
|
||||
this.name = 'httpAgentflow'
|
||||
this.version = 1.1
|
||||
this.type = 'HTTP'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Send a HTTP request'
|
||||
this.baseClasses = [this.type]
|
||||
this.color = '#FF7F7F'
|
||||
this.credential = {
|
||||
label: 'HTTP Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['httpBasicAuth', 'httpBearerToken', 'httpApiKey'],
|
||||
optional: true
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Method',
|
||||
name: 'method',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'GET',
|
||||
name: 'GET'
|
||||
},
|
||||
{
|
||||
label: 'POST',
|
||||
name: 'POST'
|
||||
},
|
||||
{
|
||||
label: 'PUT',
|
||||
name: 'PUT'
|
||||
},
|
||||
{
|
||||
label: 'DELETE',
|
||||
name: 'DELETE'
|
||||
},
|
||||
{
|
||||
label: 'PATCH',
|
||||
name: 'PATCH'
|
||||
}
|
||||
],
|
||||
default: 'GET'
|
||||
},
|
||||
{
|
||||
label: 'URL',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
acceptVariable: true
|
||||
},
|
||||
{
|
||||
label: 'Headers',
|
||||
name: 'headers',
|
||||
type: 'array',
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'string',
|
||||
default: ''
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
name: 'value',
|
||||
type: 'string',
|
||||
default: '',
|
||||
acceptVariable: true
|
||||
}
|
||||
],
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Query Params',
|
||||
name: 'queryParams',
|
||||
type: 'array',
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'string',
|
||||
default: ''
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
name: 'value',
|
||||
type: 'string',
|
||||
default: '',
|
||||
acceptVariable: true
|
||||
}
|
||||
],
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Body Type',
|
||||
name: 'bodyType',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'JSON',
|
||||
name: 'json'
|
||||
},
|
||||
{
|
||||
label: 'Raw',
|
||||
name: 'raw'
|
||||
},
|
||||
{
|
||||
label: 'Form Data',
|
||||
name: 'formData'
|
||||
},
|
||||
{
|
||||
label: 'x-www-form-urlencoded',
|
||||
name: 'xWwwFormUrlencoded'
|
||||
}
|
||||
],
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Body',
|
||||
name: 'body',
|
||||
type: 'string',
|
||||
acceptVariable: true,
|
||||
rows: 4,
|
||||
show: {
|
||||
bodyType: ['raw', 'json']
|
||||
},
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Body',
|
||||
name: 'body',
|
||||
type: 'array',
|
||||
acceptVariable: true,
|
||||
show: {
|
||||
bodyType: ['xWwwFormUrlencoded', 'formData']
|
||||
},
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'string',
|
||||
default: ''
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
name: 'value',
|
||||
type: 'string',
|
||||
default: '',
|
||||
acceptVariable: true
|
||||
}
|
||||
],
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Response Type',
|
||||
name: 'responseType',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'JSON',
|
||||
name: 'json'
|
||||
},
|
||||
{
|
||||
label: 'Text',
|
||||
name: 'text'
|
||||
},
|
||||
{
|
||||
label: 'Array Buffer',
|
||||
name: 'arraybuffer'
|
||||
},
|
||||
{
|
||||
label: 'Raw (Base64)',
|
||||
name: 'base64'
|
||||
}
|
||||
],
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const method = nodeData.inputs?.method as 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH'
|
||||
const url = nodeData.inputs?.url as string
|
||||
const headers = nodeData.inputs?.headers as ICommonObject
|
||||
const queryParams = nodeData.inputs?.queryParams as ICommonObject
|
||||
const bodyType = nodeData.inputs?.bodyType as 'json' | 'raw' | 'formData' | 'xWwwFormUrlencoded'
|
||||
const body = nodeData.inputs?.body as ICommonObject | string | ICommonObject[]
|
||||
const responseType = nodeData.inputs?.responseType as 'json' | 'text' | 'arraybuffer' | 'base64'
|
||||
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
|
||||
try {
|
||||
// Prepare headers
|
||||
const requestHeaders: Record<string, string> = {}
|
||||
|
||||
// Add headers from inputs
|
||||
if (headers && Array.isArray(headers)) {
|
||||
for (const header of headers) {
|
||||
if (header.key && header.value) {
|
||||
requestHeaders[header.key] = header.value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add credentials if provided
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
if (credentialData && Object.keys(credentialData).length !== 0) {
|
||||
const basicAuthUsername = getCredentialParam('basicAuthUsername', credentialData, nodeData)
|
||||
const basicAuthPassword = getCredentialParam('basicAuthPassword', credentialData, nodeData)
|
||||
const bearerToken = getCredentialParam('token', credentialData, nodeData)
|
||||
const apiKeyName = getCredentialParam('key', credentialData, nodeData)
|
||||
const apiKeyValue = getCredentialParam('value', credentialData, nodeData)
|
||||
|
||||
// Determine which type of auth to use based on available credentials
|
||||
if (basicAuthUsername || basicAuthPassword) {
|
||||
// Basic Auth
|
||||
const auth = Buffer.from(`${basicAuthUsername}:${basicAuthPassword}`).toString('base64')
|
||||
requestHeaders['Authorization'] = `Basic ${auth}`
|
||||
} else if (bearerToken) {
|
||||
// Bearer Token
|
||||
requestHeaders['Authorization'] = `Bearer ${bearerToken}`
|
||||
} else if (apiKeyName && apiKeyValue) {
|
||||
// API Key in header
|
||||
requestHeaders[apiKeyName] = apiKeyValue
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare query parameters
|
||||
let queryString = ''
|
||||
if (queryParams && Array.isArray(queryParams)) {
|
||||
const params = new URLSearchParams()
|
||||
for (const param of queryParams) {
|
||||
if (param.key && param.value) {
|
||||
params.append(param.key, param.value)
|
||||
}
|
||||
}
|
||||
queryString = params.toString()
|
||||
}
|
||||
|
||||
// Build final URL with query parameters
|
||||
const finalUrl = queryString ? `${url}${url.includes('?') ? '&' : '?'}${queryString}` : url
|
||||
|
||||
// Prepare request config
|
||||
const requestConfig: AxiosRequestConfig = {
|
||||
method: method as Method,
|
||||
url: finalUrl,
|
||||
headers: requestHeaders,
|
||||
responseType: (responseType || 'json') as ResponseType
|
||||
}
|
||||
|
||||
// Handle request body based on body type
|
||||
if (method !== 'GET' && body) {
|
||||
switch (bodyType) {
|
||||
case 'json': {
|
||||
requestConfig.data = typeof body === 'string' ? parseJsonBody(body) : body
|
||||
requestHeaders['Content-Type'] = 'application/json'
|
||||
break
|
||||
}
|
||||
case 'raw':
|
||||
requestConfig.data = body
|
||||
break
|
||||
case 'formData': {
|
||||
const formData = new FormData()
|
||||
if (Array.isArray(body) && body.length > 0) {
|
||||
for (const item of body) {
|
||||
formData.append(item.key, item.value)
|
||||
}
|
||||
}
|
||||
requestConfig.data = formData
|
||||
break
|
||||
}
|
||||
case 'xWwwFormUrlencoded':
|
||||
requestConfig.data = querystring.stringify(typeof body === 'string' ? parseJsonBody(body) : body)
|
||||
requestHeaders['Content-Type'] = 'application/x-www-form-urlencoded'
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Make the secure HTTP request that validates all URLs in redirect chains
|
||||
const response = await secureAxiosRequest(requestConfig)
|
||||
|
||||
// Process response based on response type
|
||||
let responseData
|
||||
if (responseType === 'base64' && response.data) {
|
||||
responseData = Buffer.from(response.data, 'binary').toString('base64')
|
||||
} else {
|
||||
responseData = response.data
|
||||
}
|
||||
|
||||
const returnOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: {
|
||||
http: {
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
queryParams,
|
||||
bodyType,
|
||||
body,
|
||||
responseType
|
||||
}
|
||||
},
|
||||
output: {
|
||||
http: {
|
||||
data: responseData,
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: response.headers
|
||||
}
|
||||
},
|
||||
state
|
||||
}
|
||||
|
||||
return returnOutput
|
||||
} catch (error) {
|
||||
console.error('HTTP Request Error:', error)
|
||||
|
||||
const errorMessage =
|
||||
error.response?.data?.message || error.response?.data?.error || error.message || 'An error occurred during the HTTP request'
|
||||
|
||||
// Format error response
|
||||
const errorResponse: any = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: {
|
||||
http: {
|
||||
method,
|
||||
url,
|
||||
headers,
|
||||
queryParams,
|
||||
bodyType,
|
||||
body,
|
||||
responseType
|
||||
}
|
||||
},
|
||||
error: {
|
||||
name: error.name || 'Error',
|
||||
message: errorMessage
|
||||
},
|
||||
state
|
||||
}
|
||||
|
||||
// Add more error details if available
|
||||
if (error.response) {
|
||||
errorResponse.error.status = error.response.status
|
||||
errorResponse.error.statusText = error.response.statusText
|
||||
errorResponse.error.data = error.response.data
|
||||
errorResponse.error.headers = error.response.headers
|
||||
}
|
||||
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: HTTP_Agentflow }
|
||||
|
|
@ -0,0 +1,274 @@
|
|||
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||
import {
|
||||
ICommonObject,
|
||||
ICondition,
|
||||
IHumanInput,
|
||||
INode,
|
||||
INodeData,
|
||||
INodeOptionsValue,
|
||||
INodeOutputsValue,
|
||||
INodeParams,
|
||||
IServerSideEventStreamer
|
||||
} from '../../../src/Interface'
|
||||
import { AIMessageChunk, BaseMessageLike } from '@langchain/core/messages'
|
||||
import { DEFAULT_HUMAN_INPUT_DESCRIPTION, DEFAULT_HUMAN_INPUT_DESCRIPTION_HTML } from '../prompt'
|
||||
|
||||
class HumanInput_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
baseClasses: string[]
|
||||
documentation?: string
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Human Input'
|
||||
this.name = 'humanInputAgentflow'
|
||||
this.version = 1.0
|
||||
this.type = 'HumanInput'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Request human input, approval or rejection during execution'
|
||||
this.color = '#6E6EFD'
|
||||
this.baseClasses = [this.type]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Description Type',
|
||||
name: 'humanInputDescriptionType',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'Fixed',
|
||||
name: 'fixed',
|
||||
description: 'Specify a fixed description'
|
||||
},
|
||||
{
|
||||
label: 'Dynamic',
|
||||
name: 'dynamic',
|
||||
description: 'Use LLM to generate a description'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Description',
|
||||
name: 'humanInputDescription',
|
||||
type: 'string',
|
||||
placeholder: 'Are you sure you want to proceed?',
|
||||
acceptVariable: true,
|
||||
rows: 4,
|
||||
show: {
|
||||
humanInputDescriptionType: 'fixed'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Model',
|
||||
name: 'humanInputModel',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listModels',
|
||||
loadConfig: true,
|
||||
show: {
|
||||
humanInputDescriptionType: 'dynamic'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Prompt',
|
||||
name: 'humanInputModelPrompt',
|
||||
type: 'string',
|
||||
default: DEFAULT_HUMAN_INPUT_DESCRIPTION_HTML,
|
||||
acceptVariable: true,
|
||||
generateInstruction: true,
|
||||
rows: 4,
|
||||
show: {
|
||||
humanInputDescriptionType: 'dynamic'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Enable Feedback',
|
||||
name: 'humanInputEnableFeedback',
|
||||
type: 'boolean',
|
||||
default: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Proceed',
|
||||
name: 'proceed'
|
||||
},
|
||||
{
|
||||
label: 'Reject',
|
||||
name: 'reject'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
loadMethods = {
|
||||
async listModels(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const componentNodes = options.componentNodes as {
|
||||
[key: string]: INode
|
||||
}
|
||||
|
||||
const returnOptions: INodeOptionsValue[] = []
|
||||
for (const nodeName in componentNodes) {
|
||||
const componentNode = componentNodes[nodeName]
|
||||
if (componentNode.category === 'Chat Models') {
|
||||
if (componentNode.tags?.includes('LlamaIndex')) {
|
||||
continue
|
||||
}
|
||||
returnOptions.push({
|
||||
label: componentNode.label,
|
||||
name: nodeName,
|
||||
imageSrc: componentNode.icon
|
||||
})
|
||||
}
|
||||
}
|
||||
return returnOptions
|
||||
}
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const _humanInput = nodeData.inputs?.humanInput
|
||||
const humanInput: IHumanInput = typeof _humanInput === 'string' ? JSON.parse(_humanInput) : _humanInput
|
||||
|
||||
const humanInputEnableFeedback = nodeData.inputs?.humanInputEnableFeedback as boolean
|
||||
let humanInputDescriptionType = nodeData.inputs?.humanInputDescriptionType as string
|
||||
const model = nodeData.inputs?.humanInputModel as string
|
||||
const modelConfig = nodeData.inputs?.humanInputModelConfig as ICommonObject
|
||||
const _humanInputModelPrompt = nodeData.inputs?.humanInputModelPrompt as string
|
||||
const humanInputModelPrompt = _humanInputModelPrompt ? _humanInputModelPrompt : DEFAULT_HUMAN_INPUT_DESCRIPTION
|
||||
|
||||
// Extract runtime state and history
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
const pastChatHistory = (options.pastChatHistory as BaseMessageLike[]) ?? []
|
||||
const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? []
|
||||
|
||||
const chatId = options.chatId as string
|
||||
const isStreamable = options.sseStreamer !== undefined
|
||||
|
||||
if (humanInput) {
|
||||
const outcomes: Partial<ICondition>[] & Partial<IHumanInput>[] = [
|
||||
{
|
||||
type: 'proceed',
|
||||
startNodeId: humanInput?.startNodeId,
|
||||
feedback: humanInputEnableFeedback && humanInput?.feedback ? humanInput.feedback : undefined,
|
||||
isFulfilled: false
|
||||
},
|
||||
{
|
||||
type: 'reject',
|
||||
startNodeId: humanInput?.startNodeId,
|
||||
feedback: humanInputEnableFeedback && humanInput?.feedback ? humanInput.feedback : undefined,
|
||||
isFulfilled: false
|
||||
}
|
||||
]
|
||||
|
||||
// Only one outcome can be fulfilled at a time
|
||||
switch (humanInput?.type) {
|
||||
case 'proceed':
|
||||
outcomes[0].isFulfilled = true
|
||||
break
|
||||
case 'reject':
|
||||
outcomes[1].isFulfilled = true
|
||||
break
|
||||
}
|
||||
|
||||
const messages = [
|
||||
...pastChatHistory,
|
||||
...runtimeChatHistory,
|
||||
{
|
||||
role: 'user',
|
||||
content: humanInput.feedback || humanInput.type
|
||||
}
|
||||
]
|
||||
const input = { ...humanInput, messages }
|
||||
const output = { conditions: outcomes }
|
||||
|
||||
const nodeOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input,
|
||||
output,
|
||||
state
|
||||
}
|
||||
|
||||
if (humanInput.feedback) {
|
||||
;(nodeOutput as any).chatHistory = [{ role: 'user', content: humanInput.feedback }]
|
||||
}
|
||||
|
||||
return nodeOutput
|
||||
} else {
|
||||
let humanInputDescription = ''
|
||||
|
||||
if (humanInputDescriptionType === 'fixed') {
|
||||
humanInputDescription = (nodeData.inputs?.humanInputDescription as string) || 'Do you want to proceed?'
|
||||
const messages = [...pastChatHistory, ...runtimeChatHistory]
|
||||
// Find the last message in the messages array
|
||||
const lastMessage = messages.length > 0 ? (messages[messages.length - 1] as any).content || '' : ''
|
||||
humanInputDescription = `${lastMessage}\n\n${humanInputDescription}`
|
||||
if (isStreamable) {
|
||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
||||
sseStreamer.streamTokenEvent(chatId, humanInputDescription)
|
||||
}
|
||||
} else {
|
||||
if (model && modelConfig) {
|
||||
const nodeInstanceFilePath = options.componentNodes[model].filePath as string
|
||||
const nodeModule = await import(nodeInstanceFilePath)
|
||||
const newNodeInstance = new nodeModule.nodeClass()
|
||||
const newNodeData = {
|
||||
...nodeData,
|
||||
credential: modelConfig['FLOWISE_CREDENTIAL_ID'],
|
||||
inputs: {
|
||||
...nodeData.inputs,
|
||||
...modelConfig
|
||||
}
|
||||
}
|
||||
const llmNodeInstance = (await newNodeInstance.init(newNodeData, '', options)) as BaseChatModel
|
||||
const messages = [
|
||||
...pastChatHistory,
|
||||
...runtimeChatHistory,
|
||||
{
|
||||
role: 'user',
|
||||
content: humanInputModelPrompt || DEFAULT_HUMAN_INPUT_DESCRIPTION
|
||||
}
|
||||
]
|
||||
|
||||
let response: AIMessageChunk = new AIMessageChunk('')
|
||||
if (isStreamable) {
|
||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
||||
for await (const chunk of await llmNodeInstance.stream(messages)) {
|
||||
const content = typeof chunk === 'string' ? chunk : chunk.content.toString()
|
||||
sseStreamer.streamTokenEvent(chatId, content)
|
||||
|
||||
const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk
|
||||
response = response.concat(messageChunk)
|
||||
}
|
||||
humanInputDescription = response.content as string
|
||||
} else {
|
||||
const response = await llmNodeInstance.invoke(messages)
|
||||
humanInputDescription = response.content as string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const input = { messages: [...pastChatHistory, ...runtimeChatHistory], humanInputEnableFeedback }
|
||||
const output = { content: humanInputDescription }
|
||||
const nodeOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input,
|
||||
output,
|
||||
state,
|
||||
chatHistory: [{ role: 'assistant', content: humanInputDescription }]
|
||||
}
|
||||
|
||||
return nodeOutput
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: HumanInput_Agentflow }
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
export interface ILLMMessage {
|
||||
role: 'system' | 'assistant' | 'user' | 'tool' | 'developer'
|
||||
content: string
|
||||
}
|
||||
|
||||
export interface IStructuredOutput {
|
||||
key: string
|
||||
type: 'string' | 'stringArray' | 'number' | 'boolean' | 'enum' | 'jsonArray'
|
||||
enumValues?: string
|
||||
description?: string
|
||||
jsonSchema?: string
|
||||
}
|
||||
|
||||
export interface IFlowState {
|
||||
key: string
|
||||
value: string
|
||||
}
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { parseJsonBody } from '../../../src/utils'
|
||||
|
||||
class Iteration_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
baseClasses: string[]
|
||||
documentation?: string
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Iteration'
|
||||
this.name = 'iterationAgentflow'
|
||||
this.version = 1.0
|
||||
this.type = 'Iteration'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Execute the nodes within the iteration block through N iterations'
|
||||
this.baseClasses = [this.type]
|
||||
this.color = '#9C89B8'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Array Input',
|
||||
name: 'iterationInput',
|
||||
type: 'string',
|
||||
description: 'The input array to iterate over',
|
||||
acceptVariable: true,
|
||||
rows: 4
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const iterationInput = nodeData.inputs?.iterationInput
|
||||
|
||||
// Helper function to clean JSON strings with redundant backslashes
|
||||
const safeParseJson = (str: string): string => {
|
||||
try {
|
||||
return parseJsonBody(str)
|
||||
} catch {
|
||||
// Try parsing after cleaning
|
||||
return parseJsonBody(str.replace(/\\(["'[\]{}])/g, '$1'))
|
||||
}
|
||||
}
|
||||
|
||||
const iterationInputArray =
|
||||
typeof iterationInput === 'string' && iterationInput !== '' ? safeParseJson(iterationInput) : iterationInput
|
||||
|
||||
if (!iterationInputArray || !Array.isArray(iterationInputArray)) {
|
||||
throw new Error('Invalid input array')
|
||||
}
|
||||
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
|
||||
const returnOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: {
|
||||
iterationInput: iterationInputArray
|
||||
},
|
||||
output: {},
|
||||
state
|
||||
}
|
||||
|
||||
return returnOutput
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Iteration_Agentflow }
|
||||
|
|
@ -0,0 +1,949 @@
|
|||
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
|
||||
import { ICommonObject, IMessage, INode, INodeData, INodeOptionsValue, INodeParams, IServerSideEventStreamer } from '../../../src/Interface'
|
||||
import { AIMessageChunk, BaseMessageLike, MessageContentText } from '@langchain/core/messages'
|
||||
import { DEFAULT_SUMMARIZER_TEMPLATE } from '../prompt'
|
||||
import { AnalyticHandler } from '../../../src/handler'
|
||||
import { ILLMMessage } from '../Interface.Agentflow'
|
||||
import {
|
||||
addImageArtifactsToMessages,
|
||||
extractArtifactsFromResponse,
|
||||
getPastChatHistoryImageMessages,
|
||||
getUniqueImageMessages,
|
||||
processMessagesWithImages,
|
||||
replaceBase64ImagesWithFileReferences,
|
||||
replaceInlineDataWithFileReferences,
|
||||
updateFlowState
|
||||
} from '../utils'
|
||||
import { processTemplateVariables, configureStructuredOutput } from '../../../src/utils'
|
||||
import { flatten } from 'lodash'
|
||||
|
||||
class LLM_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
baseClasses: string[]
|
||||
documentation?: string
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'LLM'
|
||||
this.name = 'llmAgentflow'
|
||||
this.version = 1.1
|
||||
this.type = 'LLM'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Large language models to analyze user-provided inputs and generate responses'
|
||||
this.color = '#64B5F6'
|
||||
this.baseClasses = [this.type]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Model',
|
||||
name: 'llmModel',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listModels',
|
||||
loadConfig: true
|
||||
},
|
||||
{
|
||||
label: 'Messages',
|
||||
name: 'llmMessages',
|
||||
type: 'array',
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Role',
|
||||
name: 'role',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'System',
|
||||
name: 'system'
|
||||
},
|
||||
{
|
||||
label: 'Assistant',
|
||||
name: 'assistant'
|
||||
},
|
||||
{
|
||||
label: 'Developer',
|
||||
name: 'developer'
|
||||
},
|
||||
{
|
||||
label: 'User',
|
||||
name: 'user'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Content',
|
||||
name: 'content',
|
||||
type: 'string',
|
||||
acceptVariable: true,
|
||||
generateInstruction: true,
|
||||
rows: 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Enable Memory',
|
||||
name: 'llmEnableMemory',
|
||||
type: 'boolean',
|
||||
description: 'Enable memory for the conversation thread',
|
||||
default: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Memory Type',
|
||||
name: 'llmMemoryType',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'All Messages',
|
||||
name: 'allMessages',
|
||||
description: 'Retrieve all messages from the conversation'
|
||||
},
|
||||
{
|
||||
label: 'Window Size',
|
||||
name: 'windowSize',
|
||||
description: 'Uses a fixed window size to surface the last N messages'
|
||||
},
|
||||
{
|
||||
label: 'Conversation Summary',
|
||||
name: 'conversationSummary',
|
||||
description: 'Summarizes the whole conversation'
|
||||
},
|
||||
{
|
||||
label: 'Conversation Summary Buffer',
|
||||
name: 'conversationSummaryBuffer',
|
||||
description: 'Summarize conversations once token limit is reached. Default to 2000'
|
||||
}
|
||||
],
|
||||
optional: true,
|
||||
default: 'allMessages',
|
||||
show: {
|
||||
llmEnableMemory: true
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Window Size',
|
||||
name: 'llmMemoryWindowSize',
|
||||
type: 'number',
|
||||
default: '20',
|
||||
description: 'Uses a fixed window size to surface the last N messages',
|
||||
show: {
|
||||
llmMemoryType: 'windowSize'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Max Token Limit',
|
||||
name: 'llmMemoryMaxTokenLimit',
|
||||
type: 'number',
|
||||
default: '2000',
|
||||
description: 'Summarize conversations once token limit is reached. Default to 2000',
|
||||
show: {
|
||||
llmMemoryType: 'conversationSummaryBuffer'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Input Message',
|
||||
name: 'llmUserMessage',
|
||||
type: 'string',
|
||||
description: 'Add an input message as user message at the end of the conversation',
|
||||
rows: 4,
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
show: {
|
||||
llmEnableMemory: true
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Return Response As',
|
||||
name: 'llmReturnResponseAs',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'User Message',
|
||||
name: 'userMessage'
|
||||
},
|
||||
{
|
||||
label: 'Assistant Message',
|
||||
name: 'assistantMessage'
|
||||
}
|
||||
],
|
||||
default: 'userMessage'
|
||||
},
|
||||
{
|
||||
label: 'JSON Structured Output',
|
||||
name: 'llmStructuredOutput',
|
||||
description: 'Instruct the LLM to give output in a JSON structured schema',
|
||||
type: 'array',
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'String',
|
||||
name: 'string'
|
||||
},
|
||||
{
|
||||
label: 'String Array',
|
||||
name: 'stringArray'
|
||||
},
|
||||
{
|
||||
label: 'Number',
|
||||
name: 'number'
|
||||
},
|
||||
{
|
||||
label: 'Boolean',
|
||||
name: 'boolean'
|
||||
},
|
||||
{
|
||||
label: 'Enum',
|
||||
name: 'enum'
|
||||
},
|
||||
{
|
||||
label: 'JSON Array',
|
||||
name: 'jsonArray'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Enum Values',
|
||||
name: 'enumValues',
|
||||
type: 'string',
|
||||
placeholder: 'value1, value2, value3',
|
||||
description: 'Enum values. Separated by comma',
|
||||
optional: true,
|
||||
show: {
|
||||
'llmStructuredOutput[$index].type': 'enum'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'JSON Schema',
|
||||
name: 'jsonSchema',
|
||||
type: 'code',
|
||||
placeholder: `{
|
||||
"answer": {
|
||||
"type": "string",
|
||||
"description": "Value of the answer"
|
||||
},
|
||||
"reason": {
|
||||
"type": "string",
|
||||
"description": "Reason for the answer"
|
||||
},
|
||||
"optional": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"count": {
|
||||
"type": "number"
|
||||
},
|
||||
"children": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"type": "string",
|
||||
"description": "Value of the children's answer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}`,
|
||||
description: 'JSON schema for the structured output',
|
||||
optional: true,
|
||||
hideCodeExecute: true,
|
||||
show: {
|
||||
'llmStructuredOutput[$index].type': 'jsonArray'
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
placeholder: 'Description of the key'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Update Flow State',
|
||||
name: 'llmUpdateState',
|
||||
description: 'Update runtime state during the execution of the workflow',
|
||||
type: 'array',
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
name: 'value',
|
||||
type: 'string',
|
||||
acceptVariable: true,
|
||||
acceptNodeOutputAsVariable: true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
loadMethods = {
|
||||
async listModels(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const componentNodes = options.componentNodes as {
|
||||
[key: string]: INode
|
||||
}
|
||||
|
||||
const returnOptions: INodeOptionsValue[] = []
|
||||
for (const nodeName in componentNodes) {
|
||||
const componentNode = componentNodes[nodeName]
|
||||
if (componentNode.category === 'Chat Models') {
|
||||
if (componentNode.tags?.includes('LlamaIndex')) {
|
||||
continue
|
||||
}
|
||||
returnOptions.push({
|
||||
label: componentNode.label,
|
||||
name: nodeName,
|
||||
imageSrc: componentNode.icon
|
||||
})
|
||||
}
|
||||
}
|
||||
return returnOptions
|
||||
},
|
||||
async listRuntimeStateKeys(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const previousNodes = options.previousNodes as ICommonObject[]
|
||||
const startAgentflowNode = previousNodes.find((node) => node.name === 'startAgentflow')
|
||||
const state = startAgentflowNode?.inputs?.startState as ICommonObject[]
|
||||
return state.map((item) => ({ label: item.key, name: item.key }))
|
||||
}
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, input: string | Record<string, any>, options: ICommonObject): Promise<any> {
|
||||
let llmIds: ICommonObject | undefined
|
||||
let analyticHandlers = options.analyticHandlers as AnalyticHandler
|
||||
|
||||
try {
|
||||
const abortController = options.abortController as AbortController
|
||||
|
||||
// Extract input parameters
|
||||
const model = nodeData.inputs?.llmModel as string
|
||||
const modelConfig = nodeData.inputs?.llmModelConfig as ICommonObject
|
||||
if (!model) {
|
||||
throw new Error('Model is required')
|
||||
}
|
||||
|
||||
// Extract memory and configuration options
|
||||
const enableMemory = nodeData.inputs?.llmEnableMemory as boolean
|
||||
const memoryType = nodeData.inputs?.llmMemoryType as string
|
||||
const userMessage = nodeData.inputs?.llmUserMessage as string
|
||||
const _llmUpdateState = nodeData.inputs?.llmUpdateState
|
||||
const _llmStructuredOutput = nodeData.inputs?.llmStructuredOutput
|
||||
const llmMessages = (nodeData.inputs?.llmMessages as unknown as ILLMMessage[]) ?? []
|
||||
|
||||
// Extract runtime state and history
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
const pastChatHistory = (options.pastChatHistory as BaseMessageLike[]) ?? []
|
||||
const runtimeChatHistory = (options.agentflowRuntime?.chatHistory as BaseMessageLike[]) ?? []
|
||||
const prependedChatHistory = options.prependedChatHistory as IMessage[]
|
||||
const chatId = options.chatId as string
|
||||
|
||||
// Initialize the LLM model instance
|
||||
const nodeInstanceFilePath = options.componentNodes[model].filePath as string
|
||||
const nodeModule = await import(nodeInstanceFilePath)
|
||||
const newLLMNodeInstance = new nodeModule.nodeClass()
|
||||
const newNodeData = {
|
||||
...nodeData,
|
||||
credential: modelConfig['FLOWISE_CREDENTIAL_ID'],
|
||||
inputs: {
|
||||
...nodeData.inputs,
|
||||
...modelConfig
|
||||
}
|
||||
}
|
||||
let llmNodeInstance = (await newLLMNodeInstance.init(newNodeData, '', options)) as BaseChatModel
|
||||
|
||||
// Prepare messages array
|
||||
const messages: BaseMessageLike[] = []
|
||||
// Use to store messages with image file references as we do not want to store the base64 data into database
|
||||
let runtimeImageMessagesWithFileRef: BaseMessageLike[] = []
|
||||
// Use to keep track of past messages with image file references
|
||||
let pastImageMessagesWithFileRef: BaseMessageLike[] = []
|
||||
|
||||
// Prepend history ONLY if it is the first node
|
||||
if (prependedChatHistory.length > 0 && !runtimeChatHistory.length) {
|
||||
for (const msg of prependedChatHistory) {
|
||||
const role: string = msg.role === 'apiMessage' ? 'assistant' : 'user'
|
||||
const content: string = msg.content ?? ''
|
||||
messages.push({
|
||||
role,
|
||||
content
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for (const msg of llmMessages) {
|
||||
const role = msg.role
|
||||
const content = msg.content
|
||||
if (role && content) {
|
||||
if (role === 'system') {
|
||||
messages.unshift({ role, content })
|
||||
} else {
|
||||
messages.push({ role, content })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle memory management if enabled
|
||||
if (enableMemory) {
|
||||
await this.handleMemory({
|
||||
messages,
|
||||
memoryType,
|
||||
pastChatHistory,
|
||||
runtimeChatHistory,
|
||||
llmNodeInstance,
|
||||
nodeData,
|
||||
userMessage,
|
||||
input,
|
||||
abortController,
|
||||
options,
|
||||
modelConfig,
|
||||
runtimeImageMessagesWithFileRef,
|
||||
pastImageMessagesWithFileRef
|
||||
})
|
||||
} else if (!runtimeChatHistory.length) {
|
||||
/*
|
||||
* If this is the first node:
|
||||
* - Add images to messages if exist
|
||||
* - Add user message if it does not exist in the llmMessages array
|
||||
*/
|
||||
if (options.uploads) {
|
||||
const imageContents = await getUniqueImageMessages(options, messages, modelConfig)
|
||||
if (imageContents) {
|
||||
const { imageMessageWithBase64, imageMessageWithFileRef } = imageContents
|
||||
messages.push(imageMessageWithBase64)
|
||||
runtimeImageMessagesWithFileRef.push(imageMessageWithFileRef)
|
||||
}
|
||||
}
|
||||
|
||||
if (input && typeof input === 'string' && !llmMessages.some((msg) => msg.role === 'user')) {
|
||||
messages.push({
|
||||
role: 'user',
|
||||
content: input
|
||||
})
|
||||
}
|
||||
}
|
||||
delete nodeData.inputs?.llmMessages
|
||||
|
||||
/**
|
||||
* Add image artifacts from previous assistant responses as user messages
|
||||
* Images are converted from FILE-STORAGE::<image_path> to base 64 image_url format
|
||||
*/
|
||||
await addImageArtifactsToMessages(messages, options)
|
||||
|
||||
// Configure structured output if specified
|
||||
const isStructuredOutput = _llmStructuredOutput && Array.isArray(_llmStructuredOutput) && _llmStructuredOutput.length > 0
|
||||
if (isStructuredOutput) {
|
||||
llmNodeInstance = configureStructuredOutput(llmNodeInstance, _llmStructuredOutput)
|
||||
}
|
||||
|
||||
// Initialize response and determine if streaming is possible
|
||||
let response: AIMessageChunk = new AIMessageChunk('')
|
||||
const isLastNode = options.isLastNode as boolean
|
||||
const isStreamable = isLastNode && options.sseStreamer !== undefined && modelConfig?.streaming !== false && !isStructuredOutput
|
||||
|
||||
// Start analytics
|
||||
if (analyticHandlers && options.parentTraceIds) {
|
||||
const llmLabel = options?.componentNodes?.[model]?.label || model
|
||||
llmIds = await analyticHandlers.onLLMStart(llmLabel, messages, options.parentTraceIds)
|
||||
}
|
||||
|
||||
// Track execution time
|
||||
const startTime = Date.now()
|
||||
const sseStreamer: IServerSideEventStreamer | undefined = options.sseStreamer
|
||||
|
||||
/*
|
||||
* Invoke LLM
|
||||
*/
|
||||
if (isStreamable) {
|
||||
response = await this.handleStreamingResponse(sseStreamer, llmNodeInstance, messages, chatId, abortController)
|
||||
} else {
|
||||
response = await llmNodeInstance.invoke(messages, { signal: abortController?.signal })
|
||||
|
||||
// Stream whole response back to UI if this is the last node
|
||||
if (isLastNode && options.sseStreamer) {
|
||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
||||
let finalResponse = ''
|
||||
if (response.content && Array.isArray(response.content)) {
|
||||
finalResponse = response.content.map((item: any) => item.text).join('\n')
|
||||
} else if (response.content && typeof response.content === 'string') {
|
||||
finalResponse = response.content
|
||||
} else {
|
||||
finalResponse = JSON.stringify(response, null, 2)
|
||||
}
|
||||
sseStreamer.streamTokenEvent(chatId, finalResponse)
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate execution time
|
||||
const endTime = Date.now()
|
||||
const timeDelta = endTime - startTime
|
||||
|
||||
// Extract artifacts and file annotations from response metadata
|
||||
let artifacts: any[] = []
|
||||
let fileAnnotations: any[] = []
|
||||
if (response.response_metadata) {
|
||||
const {
|
||||
artifacts: extractedArtifacts,
|
||||
fileAnnotations: extractedFileAnnotations,
|
||||
savedInlineImages
|
||||
} = await extractArtifactsFromResponse(response.response_metadata, newNodeData, options)
|
||||
|
||||
if (extractedArtifacts.length > 0) {
|
||||
artifacts = extractedArtifacts
|
||||
|
||||
// Stream artifacts if this is the last node
|
||||
if (isLastNode && sseStreamer) {
|
||||
sseStreamer.streamArtifactsEvent(chatId, artifacts)
|
||||
}
|
||||
}
|
||||
|
||||
if (extractedFileAnnotations.length > 0) {
|
||||
fileAnnotations = extractedFileAnnotations
|
||||
|
||||
// Stream file annotations if this is the last node
|
||||
if (isLastNode && sseStreamer) {
|
||||
sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations)
|
||||
}
|
||||
}
|
||||
|
||||
// Replace inlineData base64 with file references in the response
|
||||
if (savedInlineImages && savedInlineImages.length > 0) {
|
||||
replaceInlineDataWithFileReferences(response, savedInlineImages)
|
||||
}
|
||||
}
|
||||
|
||||
// Update flow state if needed
|
||||
let newState = { ...state }
|
||||
if (_llmUpdateState && Array.isArray(_llmUpdateState) && _llmUpdateState.length > 0) {
|
||||
newState = updateFlowState(state, _llmUpdateState)
|
||||
}
|
||||
|
||||
// Clean up empty inputs
|
||||
for (const key in nodeData.inputs) {
|
||||
if (nodeData.inputs[key] === '') {
|
||||
delete nodeData.inputs[key]
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare final response and output object
|
||||
let finalResponse = ''
|
||||
if (response.content && Array.isArray(response.content)) {
|
||||
finalResponse = response.content.map((item: any) => item.text).join('\n')
|
||||
} else if (response.content && typeof response.content === 'string') {
|
||||
finalResponse = response.content
|
||||
} else if (response.content === '') {
|
||||
// Empty response content, this could happen when there is only image data
|
||||
finalResponse = ''
|
||||
} else {
|
||||
finalResponse = JSON.stringify(response, null, 2)
|
||||
}
|
||||
const output = this.prepareOutputObject(
|
||||
response,
|
||||
finalResponse,
|
||||
startTime,
|
||||
endTime,
|
||||
timeDelta,
|
||||
isStructuredOutput,
|
||||
artifacts,
|
||||
fileAnnotations
|
||||
)
|
||||
|
||||
// End analytics tracking
|
||||
if (analyticHandlers && llmIds) {
|
||||
await analyticHandlers.onLLMEnd(llmIds, finalResponse)
|
||||
}
|
||||
|
||||
// Send additional streaming events if needed
|
||||
if (isStreamable) {
|
||||
this.sendStreamingEvents(options, chatId, response)
|
||||
}
|
||||
|
||||
// Stream file annotations if any were extracted
|
||||
if (fileAnnotations.length > 0 && isLastNode && sseStreamer) {
|
||||
sseStreamer.streamFileAnnotationsEvent(chatId, fileAnnotations)
|
||||
}
|
||||
|
||||
// Process template variables in state
|
||||
newState = processTemplateVariables(newState, finalResponse)
|
||||
|
||||
/**
|
||||
* Remove the temporarily added image artifact messages before storing
|
||||
* This is to avoid storing the actual base64 data into database
|
||||
*/
|
||||
const messagesToStore = messages.filter((msg: any) => !msg._isTemporaryImageMessage)
|
||||
|
||||
// Replace the actual messages array with one that includes the file references for images instead of base64 data
|
||||
const messagesWithFileReferences = replaceBase64ImagesWithFileReferences(
|
||||
messagesToStore,
|
||||
runtimeImageMessagesWithFileRef,
|
||||
pastImageMessagesWithFileRef
|
||||
)
|
||||
|
||||
// Only add to runtime chat history if this is the first node
|
||||
const inputMessages = []
|
||||
if (!runtimeChatHistory.length) {
|
||||
if (runtimeImageMessagesWithFileRef.length) {
|
||||
inputMessages.push(...runtimeImageMessagesWithFileRef)
|
||||
}
|
||||
if (input && typeof input === 'string') {
|
||||
if (!enableMemory) {
|
||||
if (!llmMessages.some((msg) => msg.role === 'user')) {
|
||||
inputMessages.push({ role: 'user', content: input })
|
||||
} else {
|
||||
llmMessages.map((msg) => {
|
||||
if (msg.role === 'user') {
|
||||
inputMessages.push({ role: 'user', content: msg.content })
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
inputMessages.push({ role: 'user', content: input })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const returnResponseAs = nodeData.inputs?.llmReturnResponseAs as string
|
||||
let returnRole = 'user'
|
||||
if (returnResponseAs === 'assistantMessage') {
|
||||
returnRole = 'assistant'
|
||||
}
|
||||
|
||||
// Prepare and return the final output
|
||||
return {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: {
|
||||
messages: messagesWithFileReferences,
|
||||
...nodeData.inputs
|
||||
},
|
||||
output,
|
||||
state: newState,
|
||||
chatHistory: [
|
||||
...inputMessages,
|
||||
|
||||
// LLM response
|
||||
{
|
||||
role: returnRole,
|
||||
content: finalResponse,
|
||||
name: nodeData?.label ? nodeData?.label.toLowerCase().replace(/\s/g, '_').trim() : nodeData?.id,
|
||||
...(((artifacts && artifacts.length > 0) || (fileAnnotations && fileAnnotations.length > 0)) && {
|
||||
additional_kwargs: {
|
||||
...(artifacts && artifacts.length > 0 && { artifacts }),
|
||||
...(fileAnnotations && fileAnnotations.length > 0 && { fileAnnotations })
|
||||
}
|
||||
})
|
||||
}
|
||||
]
|
||||
}
|
||||
} catch (error) {
|
||||
if (options.analyticHandlers && llmIds) {
|
||||
await options.analyticHandlers.onLLMError(llmIds, error instanceof Error ? error.message : String(error))
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.message === 'Aborted') {
|
||||
throw error
|
||||
}
|
||||
throw new Error(`Error in LLM node: ${error instanceof Error ? error.message : String(error)}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles memory management based on the specified memory type
|
||||
*/
|
||||
private async handleMemory({
|
||||
messages,
|
||||
memoryType,
|
||||
pastChatHistory,
|
||||
runtimeChatHistory,
|
||||
llmNodeInstance,
|
||||
nodeData,
|
||||
userMessage,
|
||||
input,
|
||||
abortController,
|
||||
options,
|
||||
modelConfig,
|
||||
runtimeImageMessagesWithFileRef,
|
||||
pastImageMessagesWithFileRef
|
||||
}: {
|
||||
messages: BaseMessageLike[]
|
||||
memoryType: string
|
||||
pastChatHistory: BaseMessageLike[]
|
||||
runtimeChatHistory: BaseMessageLike[]
|
||||
llmNodeInstance: BaseChatModel
|
||||
nodeData: INodeData
|
||||
userMessage: string
|
||||
input: string | Record<string, any>
|
||||
abortController: AbortController
|
||||
options: ICommonObject
|
||||
modelConfig: ICommonObject
|
||||
runtimeImageMessagesWithFileRef: BaseMessageLike[]
|
||||
pastImageMessagesWithFileRef: BaseMessageLike[]
|
||||
}): Promise<void> {
|
||||
const { updatedPastMessages, transformedPastMessages } = await getPastChatHistoryImageMessages(pastChatHistory, options)
|
||||
pastChatHistory = updatedPastMessages
|
||||
pastImageMessagesWithFileRef.push(...transformedPastMessages)
|
||||
|
||||
let pastMessages = [...pastChatHistory, ...runtimeChatHistory]
|
||||
if (!runtimeChatHistory.length && input && typeof input === 'string') {
|
||||
/*
|
||||
* If this is the first node:
|
||||
* - Add images to messages if exist
|
||||
* - Add user message
|
||||
*/
|
||||
if (options.uploads) {
|
||||
const imageContents = await getUniqueImageMessages(options, messages, modelConfig)
|
||||
if (imageContents) {
|
||||
const { imageMessageWithBase64, imageMessageWithFileRef } = imageContents
|
||||
pastMessages.push(imageMessageWithBase64)
|
||||
runtimeImageMessagesWithFileRef.push(imageMessageWithFileRef)
|
||||
}
|
||||
}
|
||||
pastMessages.push({
|
||||
role: 'user',
|
||||
content: input
|
||||
})
|
||||
}
|
||||
const { updatedMessages, transformedMessages } = await processMessagesWithImages(pastMessages, options)
|
||||
pastMessages = updatedMessages
|
||||
pastImageMessagesWithFileRef.push(...transformedMessages)
|
||||
|
||||
if (pastMessages.length > 0) {
|
||||
if (memoryType === 'windowSize') {
|
||||
// Window memory: Keep the last N messages
|
||||
const windowSize = nodeData.inputs?.llmMemoryWindowSize as number
|
||||
const windowedMessages = pastMessages.slice(-windowSize * 2)
|
||||
messages.push(...windowedMessages)
|
||||
} else if (memoryType === 'conversationSummary') {
|
||||
// Summary memory: Summarize all past messages
|
||||
const summary = await llmNodeInstance.invoke(
|
||||
[
|
||||
{
|
||||
role: 'user',
|
||||
content: DEFAULT_SUMMARIZER_TEMPLATE.replace(
|
||||
'{conversation}',
|
||||
pastMessages.map((msg: any) => `${msg.role}: ${msg.content}`).join('\n')
|
||||
)
|
||||
}
|
||||
],
|
||||
{ signal: abortController?.signal }
|
||||
)
|
||||
messages.push({ role: 'assistant', content: summary.content as string })
|
||||
} else if (memoryType === 'conversationSummaryBuffer') {
|
||||
// Summary buffer: Summarize messages that exceed token limit
|
||||
await this.handleSummaryBuffer(messages, pastMessages, llmNodeInstance, nodeData, abortController)
|
||||
} else {
|
||||
// Default: Use all messages
|
||||
messages.push(...pastMessages)
|
||||
}
|
||||
}
|
||||
|
||||
// Add user message
|
||||
if (userMessage) {
|
||||
messages.push({
|
||||
role: 'user',
|
||||
content: userMessage
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles conversation summary buffer memory type
|
||||
*/
|
||||
private async handleSummaryBuffer(
|
||||
messages: BaseMessageLike[],
|
||||
pastMessages: BaseMessageLike[],
|
||||
llmNodeInstance: BaseChatModel,
|
||||
nodeData: INodeData,
|
||||
abortController: AbortController
|
||||
): Promise<void> {
|
||||
const maxTokenLimit = (nodeData.inputs?.llmMemoryMaxTokenLimit as number) || 2000
|
||||
|
||||
// Convert past messages to a format suitable for token counting
|
||||
const messagesString = pastMessages.map((msg: any) => `${msg.role}: ${msg.content}`).join('\n')
|
||||
const tokenCount = await llmNodeInstance.getNumTokens(messagesString)
|
||||
|
||||
if (tokenCount > maxTokenLimit) {
|
||||
// Calculate how many messages to summarize (messages that exceed the token limit)
|
||||
let currBufferLength = tokenCount
|
||||
const messagesToSummarize = []
|
||||
const remainingMessages = [...pastMessages]
|
||||
|
||||
// Remove messages from the beginning until we're under the token limit
|
||||
while (currBufferLength > maxTokenLimit && remainingMessages.length > 0) {
|
||||
const poppedMessage = remainingMessages.shift()
|
||||
if (poppedMessage) {
|
||||
messagesToSummarize.push(poppedMessage)
|
||||
// Recalculate token count for remaining messages
|
||||
const remainingMessagesString = remainingMessages.map((msg: any) => `${msg.role}: ${msg.content}`).join('\n')
|
||||
currBufferLength = await llmNodeInstance.getNumTokens(remainingMessagesString)
|
||||
}
|
||||
}
|
||||
|
||||
// Summarize the messages that were removed
|
||||
const messagesToSummarizeString = messagesToSummarize.map((msg: any) => `${msg.role}: ${msg.content}`).join('\n')
|
||||
|
||||
const summary = await llmNodeInstance.invoke(
|
||||
[
|
||||
{
|
||||
role: 'user',
|
||||
content: DEFAULT_SUMMARIZER_TEMPLATE.replace('{conversation}', messagesToSummarizeString)
|
||||
}
|
||||
],
|
||||
{ signal: abortController?.signal }
|
||||
)
|
||||
|
||||
// Add summary as a system message at the beginning, then add remaining messages
|
||||
messages.push({ role: 'system', content: `Previous conversation summary: ${summary.content}` })
|
||||
messages.push(...remainingMessages)
|
||||
} else {
|
||||
// If under token limit, use all messages
|
||||
messages.push(...pastMessages)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles streaming response from the LLM
|
||||
*/
|
||||
private async handleStreamingResponse(
|
||||
sseStreamer: IServerSideEventStreamer | undefined,
|
||||
llmNodeInstance: BaseChatModel,
|
||||
messages: BaseMessageLike[],
|
||||
chatId: string,
|
||||
abortController: AbortController
|
||||
): Promise<AIMessageChunk> {
|
||||
let response = new AIMessageChunk('')
|
||||
|
||||
try {
|
||||
for await (const chunk of await llmNodeInstance.stream(messages, { signal: abortController?.signal })) {
|
||||
if (sseStreamer) {
|
||||
let content = ''
|
||||
|
||||
if (typeof chunk === 'string') {
|
||||
content = chunk
|
||||
} else if (Array.isArray(chunk.content) && chunk.content.length > 0) {
|
||||
const contents = chunk.content as MessageContentText[]
|
||||
content = contents.map((item) => item.text).join('')
|
||||
} else if (chunk.content) {
|
||||
content = chunk.content.toString()
|
||||
}
|
||||
sseStreamer.streamTokenEvent(chatId, content)
|
||||
}
|
||||
|
||||
const messageChunk = typeof chunk === 'string' ? new AIMessageChunk(chunk) : chunk
|
||||
response = response.concat(messageChunk)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error during streaming:', error)
|
||||
throw error
|
||||
}
|
||||
if (Array.isArray(response.content) && response.content.length > 0) {
|
||||
const responseContents = response.content as MessageContentText[]
|
||||
response.content = responseContents.map((item) => item.text).join('')
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the output object with response and metadata
|
||||
*/
|
||||
private prepareOutputObject(
|
||||
response: AIMessageChunk,
|
||||
finalResponse: string,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
timeDelta: number,
|
||||
isStructuredOutput: boolean,
|
||||
artifacts: any[] = [],
|
||||
fileAnnotations: any[] = []
|
||||
): any {
|
||||
const output: any = {
|
||||
content: finalResponse,
|
||||
timeMetadata: {
|
||||
start: startTime,
|
||||
end: endTime,
|
||||
delta: timeDelta
|
||||
}
|
||||
}
|
||||
|
||||
if (response.tool_calls) {
|
||||
output.calledTools = response.tool_calls
|
||||
}
|
||||
|
||||
if (response.usage_metadata) {
|
||||
output.usageMetadata = response.usage_metadata
|
||||
}
|
||||
|
||||
if (response.response_metadata) {
|
||||
output.responseMetadata = response.response_metadata
|
||||
}
|
||||
|
||||
if (isStructuredOutput && typeof response === 'object') {
|
||||
const structuredOutput = response as Record<string, any>
|
||||
for (const key in structuredOutput) {
|
||||
if (structuredOutput[key] !== undefined && structuredOutput[key] !== null) {
|
||||
output[key] = structuredOutput[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (artifacts && artifacts.length > 0) {
|
||||
output.artifacts = flatten(artifacts)
|
||||
}
|
||||
|
||||
if (fileAnnotations && fileAnnotations.length > 0) {
|
||||
output.fileAnnotations = fileAnnotations
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends additional streaming events for tool calls and metadata
|
||||
*/
|
||||
private sendStreamingEvents(options: ICommonObject, chatId: string, response: AIMessageChunk): void {
|
||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer
|
||||
|
||||
if (response.tool_calls) {
|
||||
const formattedToolCalls = response.tool_calls.map((toolCall: any) => ({
|
||||
tool: toolCall.name || 'tool',
|
||||
toolInput: toolCall.args,
|
||||
toolOutput: ''
|
||||
}))
|
||||
sseStreamer.streamCalledToolsEvent(chatId, flatten(formattedToolCalls))
|
||||
}
|
||||
|
||||
if (response.usage_metadata) {
|
||||
sseStreamer.streamUsageMetadataEvent(chatId, response.usage_metadata)
|
||||
}
|
||||
|
||||
sseStreamer.streamEndEvent(chatId)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: LLM_Agentflow }
|
||||
|
|
@ -0,0 +1,154 @@
|
|||
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
|
||||
import { updateFlowState } from '../utils'
|
||||
|
||||
class Loop_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
hideOutput: boolean
|
||||
hint: string
|
||||
baseClasses: string[]
|
||||
documentation?: string
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Loop'
|
||||
this.name = 'loopAgentflow'
|
||||
this.version = 1.2
|
||||
this.type = 'Loop'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Loop back to a previous node'
|
||||
this.baseClasses = [this.type]
|
||||
this.color = '#FFA07A'
|
||||
this.hint = 'Make sure to have memory enabled in the LLM/Agent node to retain the chat history'
|
||||
this.hideOutput = true
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Loop Back To',
|
||||
name: 'loopBackToNode',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listPreviousNodes',
|
||||
freeSolo: true
|
||||
},
|
||||
{
|
||||
label: 'Max Loop Count',
|
||||
name: 'maxLoopCount',
|
||||
type: 'number',
|
||||
default: 5
|
||||
},
|
||||
{
|
||||
label: 'Fallback Message',
|
||||
name: 'fallbackMessage',
|
||||
type: 'string',
|
||||
description: 'Message to display if the loop count is exceeded',
|
||||
placeholder: 'Enter your fallback message here',
|
||||
rows: 4,
|
||||
acceptVariable: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Update Flow State',
|
||||
name: 'loopUpdateState',
|
||||
description: 'Update runtime state during the execution of the workflow',
|
||||
type: 'array',
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
name: 'value',
|
||||
type: 'string',
|
||||
acceptVariable: true,
|
||||
acceptNodeOutputAsVariable: true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
loadMethods = {
|
||||
async listPreviousNodes(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const previousNodes = options.previousNodes as ICommonObject[]
|
||||
|
||||
const returnOptions: INodeOptionsValue[] = []
|
||||
for (const node of previousNodes) {
|
||||
returnOptions.push({
|
||||
label: node.label,
|
||||
name: `${node.id}-${node.label}`,
|
||||
description: node.id
|
||||
})
|
||||
}
|
||||
return returnOptions
|
||||
},
|
||||
async listRuntimeStateKeys(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const previousNodes = options.previousNodes as ICommonObject[]
|
||||
const startAgentflowNode = previousNodes.find((node) => node.name === 'startAgentflow')
|
||||
const state = startAgentflowNode?.inputs?.startState as ICommonObject[]
|
||||
return state.map((item) => ({ label: item.key, name: item.key }))
|
||||
}
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const loopBackToNode = nodeData.inputs?.loopBackToNode as string
|
||||
const _maxLoopCount = nodeData.inputs?.maxLoopCount as string
|
||||
const fallbackMessage = nodeData.inputs?.fallbackMessage as string
|
||||
const _loopUpdateState = nodeData.inputs?.loopUpdateState
|
||||
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
|
||||
const loopBackToNodeId = loopBackToNode.split('-')[0]
|
||||
const loopBackToNodeLabel = loopBackToNode.split('-')[1]
|
||||
|
||||
const data = {
|
||||
nodeID: loopBackToNodeId,
|
||||
maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5
|
||||
}
|
||||
|
||||
const finalOutput = 'Loop back to ' + `${loopBackToNodeLabel} (${loopBackToNodeId})`
|
||||
|
||||
// Update flow state if needed
|
||||
let newState = { ...state }
|
||||
if (_loopUpdateState && Array.isArray(_loopUpdateState) && _loopUpdateState.length > 0) {
|
||||
newState = updateFlowState(state, _loopUpdateState)
|
||||
}
|
||||
|
||||
// Process template variables in state
|
||||
if (newState && Object.keys(newState).length > 0) {
|
||||
for (const key in newState) {
|
||||
if (newState[key].toString().includes('{{ output }}')) {
|
||||
newState[key] = finalOutput
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const returnOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: data,
|
||||
output: {
|
||||
content: finalOutput,
|
||||
nodeID: loopBackToNodeId,
|
||||
maxLoopCount: _maxLoopCount ? parseInt(_maxLoopCount) : 5,
|
||||
fallbackMessage
|
||||
},
|
||||
state: newState
|
||||
}
|
||||
|
||||
return returnOutput
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Loop_Agentflow }
|
||||
|
|
@ -0,0 +1,221 @@
|
|||
import {
|
||||
ICommonObject,
|
||||
IDatabaseEntity,
|
||||
INode,
|
||||
INodeData,
|
||||
INodeOptionsValue,
|
||||
INodeParams,
|
||||
IServerSideEventStreamer
|
||||
} from '../../../src/Interface'
|
||||
import { updateFlowState } from '../utils'
|
||||
import { processTemplateVariables } from '../../../src/utils'
|
||||
import { DataSource } from 'typeorm'
|
||||
import { BaseRetriever } from '@langchain/core/retrievers'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
|
||||
interface IKnowledgeBase {
|
||||
documentStore: string
|
||||
}
|
||||
|
||||
class Retriever_Agentflow implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
color: string
|
||||
hideOutput: boolean
|
||||
hint: string
|
||||
baseClasses: string[]
|
||||
documentation?: string
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Retriever'
|
||||
this.name = 'retrieverAgentflow'
|
||||
this.version = 1.1
|
||||
this.type = 'Retriever'
|
||||
this.category = 'Agent Flows'
|
||||
this.description = 'Retrieve information from vector database'
|
||||
this.baseClasses = [this.type]
|
||||
this.color = '#b8bedd'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Knowledge (Document Stores)',
|
||||
name: 'retrieverKnowledgeDocumentStores',
|
||||
type: 'array',
|
||||
description: 'Document stores to retrieve information from. Document stores must be upserted in advance.',
|
||||
array: [
|
||||
{
|
||||
label: 'Document Store',
|
||||
name: 'documentStore',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listStores'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Retriever Query',
|
||||
name: 'retrieverQuery',
|
||||
type: 'string',
|
||||
placeholder: 'Enter your query here',
|
||||
rows: 4,
|
||||
acceptVariable: true
|
||||
},
|
||||
{
|
||||
label: 'Output Format',
|
||||
name: 'outputFormat',
|
||||
type: 'options',
|
||||
options: [
|
||||
{ label: 'Text', name: 'text' },
|
||||
{ label: 'Text with Metadata', name: 'textWithMetadata' }
|
||||
],
|
||||
default: 'text'
|
||||
},
|
||||
{
|
||||
label: 'Update Flow State',
|
||||
name: 'retrieverUpdateState',
|
||||
description: 'Update runtime state during the execution of the workflow',
|
||||
type: 'array',
|
||||
optional: true,
|
||||
acceptVariable: true,
|
||||
array: [
|
||||
{
|
||||
label: 'Key',
|
||||
name: 'key',
|
||||
type: 'asyncOptions',
|
||||
loadMethod: 'listRuntimeStateKeys'
|
||||
},
|
||||
{
|
||||
label: 'Value',
|
||||
name: 'value',
|
||||
type: 'string',
|
||||
acceptVariable: true,
|
||||
acceptNodeOutputAsVariable: true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
loadMethods = {
|
||||
async listRuntimeStateKeys(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const previousNodes = options.previousNodes as ICommonObject[]
|
||||
const startAgentflowNode = previousNodes.find((node) => node.name === 'startAgentflow')
|
||||
const state = startAgentflowNode?.inputs?.startState as ICommonObject[]
|
||||
return state.map((item) => ({ label: item.key, name: item.key }))
|
||||
},
|
||||
async listStores(_: INodeData, options: ICommonObject): Promise<INodeOptionsValue[]> {
|
||||
const returnData: INodeOptionsValue[] = []
|
||||
|
||||
const appDataSource = options.appDataSource as DataSource
|
||||
const databaseEntities = options.databaseEntities as IDatabaseEntity
|
||||
|
||||
if (appDataSource === undefined || !appDataSource) {
|
||||
return returnData
|
||||
}
|
||||
|
||||
const searchOptions = options.searchOptions || {}
|
||||
const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions)
|
||||
for (const store of stores) {
|
||||
if (store.status === 'UPSERTED') {
|
||||
const obj = {
|
||||
name: `${store.id}:${store.name}`,
|
||||
label: store.name,
|
||||
description: store.description
|
||||
}
|
||||
returnData.push(obj)
|
||||
}
|
||||
}
|
||||
return returnData
|
||||
}
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||
const retrieverQuery = nodeData.inputs?.retrieverQuery as string
|
||||
const outputFormat = nodeData.inputs?.outputFormat as string
|
||||
const _retrieverUpdateState = nodeData.inputs?.retrieverUpdateState
|
||||
|
||||
const state = options.agentflowRuntime?.state as ICommonObject
|
||||
const chatId = options.chatId as string
|
||||
const isLastNode = options.isLastNode as boolean
|
||||
const isStreamable = isLastNode && options.sseStreamer !== undefined
|
||||
|
||||
const abortController = options.abortController as AbortController
|
||||
|
||||
// Extract knowledge
|
||||
let docs: Document[] = []
|
||||
const knowledgeBases = nodeData.inputs?.retrieverKnowledgeDocumentStores as IKnowledgeBase[]
|
||||
if (knowledgeBases && knowledgeBases.length > 0) {
|
||||
for (const knowledgeBase of knowledgeBases) {
|
||||
const [storeId, _] = knowledgeBase.documentStore.split(':')
|
||||
|
||||
const docStoreVectorInstanceFilePath = options.componentNodes['documentStoreVS'].filePath as string
|
||||
const docStoreVectorModule = await import(docStoreVectorInstanceFilePath)
|
||||
const newDocStoreVectorInstance = new docStoreVectorModule.nodeClass()
|
||||
const docStoreVectorInstance = (await newDocStoreVectorInstance.init(
|
||||
{
|
||||
...nodeData,
|
||||
inputs: {
|
||||
...nodeData.inputs,
|
||||
selectedStore: storeId
|
||||
},
|
||||
outputs: {
|
||||
output: 'retriever'
|
||||
}
|
||||
},
|
||||
'',
|
||||
options
|
||||
)) as BaseRetriever
|
||||
|
||||
docs = await docStoreVectorInstance.invoke(retrieverQuery || input, { signal: abortController?.signal })
|
||||
}
|
||||
}
|
||||
|
||||
const docsText = docs.map((doc) => doc.pageContent).join('\n')
|
||||
|
||||
// Update flow state if needed
|
||||
let newState = { ...state }
|
||||
if (_retrieverUpdateState && Array.isArray(_retrieverUpdateState) && _retrieverUpdateState.length > 0) {
|
||||
newState = updateFlowState(state, _retrieverUpdateState)
|
||||
}
|
||||
|
||||
try {
|
||||
let finalOutput = ''
|
||||
if (outputFormat === 'text') {
|
||||
finalOutput = docsText
|
||||
} else if (outputFormat === 'textWithMetadata') {
|
||||
finalOutput = JSON.stringify(docs, null, 2)
|
||||
}
|
||||
|
||||
if (isStreamable) {
|
||||
const sseStreamer: IServerSideEventStreamer = options.sseStreamer
|
||||
sseStreamer.streamTokenEvent(chatId, finalOutput)
|
||||
}
|
||||
|
||||
newState = processTemplateVariables(newState, finalOutput)
|
||||
|
||||
const returnOutput = {
|
||||
id: nodeData.id,
|
||||
name: this.name,
|
||||
input: {
|
||||
question: retrieverQuery || input
|
||||
},
|
||||
output: {
|
||||
content: finalOutput
|
||||
},
|
||||
state: newState
|
||||
}
|
||||
|
||||
return returnOutput
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Retriever_Agentflow }
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue