mirror of
https://github.com/langgenius/dify.git
synced 2026-01-19 11:45:05 +08:00
Compare commits
993 Commits
1.7.0
...
fix-statel
| Author | SHA1 | Date | |
|---|---|---|---|
| 40f2bee979 | |||
| c14f4495c1 | |||
| c9eabe1612 | |||
| e0fb754e80 | |||
| 695d89ef2d | |||
| be13f79696 | |||
| c20e0ad90d | |||
| 22f64d60bb | |||
| 7b7d332239 | |||
| b1d189324a | |||
| 00fb468f2e | |||
| bbbb6e04cb | |||
| f5161d9add | |||
| 787251f00e | |||
| cfe21f0826 | |||
| 196f691865 | |||
| 7a5bb1cfac | |||
| b80d55b764 | |||
| dd71625f52 | |||
| 19936d23d1 | |||
| decf0f3da0 | |||
| 7242a67f84 | |||
| c4884eb669 | |||
| d49f3327e4 | |||
| 633e68a2f7 | |||
| 809f48f733 | |||
| 578b1b45ea | |||
| 86c3c58e64 | |||
| 8d803a26eb | |||
| aa3129c2a9 | |||
| 97c924fe29 | |||
| 591c463e4b | |||
| e1691fddaa | |||
| b4d4351203 | |||
| f7b1348623 | |||
| 2619c7553a | |||
| f79d8baf63 | |||
| bbdcbac544 | |||
| d552680e72 | |||
| df43c6ab8a | |||
| cd47a47c3b | |||
| e5d4235f1b | |||
| f60aa36fa0 | |||
| b2bcb6d21a | |||
| b6cea71023 | |||
| 6462328620 | |||
| fd86cadf67 | |||
| c43c72c1a3 | |||
| d77c2e4d17 | |||
| 1a7898dff1 | |||
| af662b100b | |||
| 595df172a8 | |||
| 70bc5ca7f4 | |||
| 30617feff8 | |||
| 756864c85b | |||
| c8c94ef870 | |||
| 10d51ada59 | |||
| 00f3a53f1c | |||
| d2f0551170 | |||
| cba2b9b2ad | |||
| 029d5d36ac | |||
| 8d897153a5 | |||
| 2e914808ea | |||
| d00a72a435 | |||
| 36580221aa | |||
| e686cc9eab | |||
| 66196459d5 | |||
| a5387b304e | |||
| beb1448441 | |||
| 272102c06d | |||
| 36406cd62f | |||
| 87c41c88a3 | |||
| 095c56a646 | |||
| 244c132656 | |||
| 043ec46c33 | |||
| 0e4f19eee0 | |||
| ff34969f21 | |||
| 9a7245e1df | |||
| 4906eeac18 | |||
| 4da93ba579 | |||
| 319ecdd312 | |||
| 0c1ec35244 | |||
| 46375aacdb | |||
| e6d4331994 | |||
| 2a0abc51b1 | |||
| 3bb67885ef | |||
| e682749d03 | |||
| 9b83b0aadd | |||
| 0cac330bc2 | |||
| fb8114792a | |||
| eab6f65409 | |||
| 915023b809 | |||
| f104839672 | |||
| 6841a09667 | |||
| e937c8c72e | |||
| 960bb8a9b4 | |||
| 9b36059292 | |||
| a4acc64afd | |||
| 25c69ac540 | |||
| 96a0b9991e | |||
| 2913d17fe2 | |||
| d9e45a1abe | |||
| 24b4289d6c | |||
| fb6ccccc3d | |||
| 8b74ae683a | |||
| dd08957381 | |||
| 407323f817 | |||
| 2e2c87c5a1 | |||
| f4522fd695 | |||
| 760a2c656c | |||
| 8940decd1b | |||
| 0c4193bd91 | |||
| cd40cde790 | |||
| c60c754ac9 | |||
| ef80d3b707 | |||
| 24e8d21b3f | |||
| d823da18db | |||
| 1e3df09fc6 | |||
| 75a10c276c | |||
| 50050527eb | |||
| a39b185627 | |||
| 15270f09af | |||
| f6a5ac0698 | |||
| 2b79da722b | |||
| 71d69e43cd | |||
| 5bc6e8a433 | |||
| 68076f2e22 | |||
| 8c38363038 | |||
| 345ac8333c | |||
| 2375047ef0 | |||
| 857a48012e | |||
| 208fe3d7de | |||
| 92cddbcc02 | |||
| 599b53c9cb | |||
| 062b173c66 | |||
| db690013fd | |||
| e93bfe3d41 | |||
| ab910c736c | |||
| 4047a6bb12 | |||
| df2478dc26 | |||
| 4cc3f6045b | |||
| 1550316b8d | |||
| 87394d2512 | |||
| bad59c95bc | |||
| 9f138ef246 | |||
| 6453fc4973 | |||
| f62f926537 | |||
| b3dafd913b | |||
| b2d8a7eaf1 | |||
| 3e54414191 | |||
| a173546c8d | |||
| aa69d90489 | |||
| 4ba1292455 | |||
| bb01c31f30 | |||
| cd90b2ca9e | |||
| 9a65350cf7 | |||
| 680eb7a9f6 | |||
| 878420463c | |||
| 4692e20daf | |||
| 13fe2ca8fe | |||
| 1264e7d4f6 | |||
| 4f45978cd9 | |||
| 5a0bf8e028 | |||
| ffa163a8a8 | |||
| 8f86f5749d | |||
| 00d3bf15f3 | |||
| 7196c09e9d | |||
| fadd9e0bf4 | |||
| d8b4bbe067 | |||
| 24611e375a | |||
| ccec582cea | |||
| b2e4107c17 | |||
| 87aa070486 | |||
| 21230a8eb2 | |||
| 85cda47c70 | |||
| 7dadb33003 | |||
| b5a7e64e19 | |||
| b283b10d3e | |||
| ecb22226d6 | |||
| 8635aacb46 | |||
| bdd85b36a4 | |||
| a0c7713494 | |||
| abf4955c26 | |||
| 74340e3c04 | |||
| b98b389baf | |||
| 877806c34d | |||
| 0bbf4fb66a | |||
| 169ce71e59 | |||
| bdbe078630 | |||
| 88d5e27fe8 | |||
| bb5b8d2902 | |||
| bab4975809 | |||
| 2e44ebe98d | |||
| a1961cc37a | |||
| 727e1d3743 | |||
| 4e3b16c5f4 | |||
| 6c36bf28d7 | |||
| 5548b22fe7 | |||
| 03664d6b51 | |||
| 07d383ffaa | |||
| 9bb7bcf52e | |||
| 67a686cf98 | |||
| a3f2c05632 | |||
| efcf052004 | |||
| 9234a2293d | |||
| 7a626747cf | |||
| db01cbb63d | |||
| 4f868275a9 | |||
| ed20d14d01 | |||
| 0add1af1c8 | |||
| 5c50c3aa70 | |||
| 9e7328abfb | |||
| 188eb838c5 | |||
| 36ab9974d2 | |||
| a825f0f2b2 | |||
| 1b0f92a331 | |||
| a13d7987e0 | |||
| 635e7d3e70 | |||
| c78ef79995 | |||
| c3f9a7ed9b | |||
| c91253d05d | |||
| 285291f545 | |||
| c0e1015c6e | |||
| 12d1bcc545 | |||
| ec808f3fe8 | |||
| 394b0ac9c0 | |||
| c2fcd2895b | |||
| bb1514be2d | |||
| 8ffb9b6aed | |||
| 33afa7c84a | |||
| 69aad38d03 | |||
| 17b5309e47 | |||
| 05af23f88f | |||
| 4511f4f537 | |||
| bdacc4da36 | |||
| 1a078657d8 | |||
| 77ba3e8f26 | |||
| 84e3571ec3 | |||
| de18b14372 | |||
| a1322ddb5d | |||
| c7868fb176 | |||
| 4b6687db6b | |||
| f1d5bc58b0 | |||
| 99f4cd1cfa | |||
| 874406d934 | |||
| 07d067d828 | |||
| af7f67dc9c | |||
| 34e55028ae | |||
| 70e4d6be34 | |||
| b690ac4e2a | |||
| cbc0e639e4 | |||
| b51c724a94 | |||
| 26a9abef64 | |||
| fecdb9554d | |||
| 45ef177809 | |||
| 6574e9f0b2 | |||
| cce13750ad | |||
| 928bef9d82 | |||
| 08dd3f7b50 | |||
| 2ac7a9c8fc | |||
| 240b65b980 | |||
| 7443c5a6fc | |||
| a1cf48f84e | |||
| e5122945fe | |||
| 22cd97e2e0 | |||
| 38057b1b0e | |||
| eb52216a9c | |||
| 4c92e63b0b | |||
| ac2aa967c4 | |||
| d2e50a508c | |||
| 37975319f2 | |||
| 4aba570fa8 | |||
| e180c19cca | |||
| c595c03452 | |||
| 64c9a2f678 | |||
| 566e0fd3e5 | |||
| 7dfb72e381 | |||
| 649242f82b | |||
| cf1ee3162f | |||
| bf6485fab4 | |||
| 720ecea737 | |||
| d5e86d9180 | |||
| cab1272bb1 | |||
| 563a5af9e7 | |||
| ec0800eb1a | |||
| ea61420441 | |||
| 598ec07c91 | |||
| a932413314 | |||
| aff2482436 | |||
| 860ee20c71 | |||
| 74be2087b5 | |||
| 57f1822213 | |||
| cdfdf324e8 | |||
| f891c67eca | |||
| 5d0a50042f | |||
| 4ee49f3550 | |||
| f6059ef389 | |||
| ce2281d31b | |||
| 3d16767fb3 | |||
| 593f7989b8 | |||
| 16a3e21410 | |||
| 98204d78fb | |||
| 27bf244b3b | |||
| 9b8a03b53b | |||
| e1f871fefe | |||
| beaa8de648 | |||
| 7e629fd783 | |||
| b623224d07 | |||
| 92a939c401 | |||
| afa7228076 | |||
| bbc43ca50d | |||
| 9964cc202d | |||
| b05245eab0 | |||
| e41e23481c | |||
| 30e5c197cb | |||
| 52b1ac5f54 | |||
| a78339a040 | |||
| 2b0695bdde | |||
| 917d60a1cb | |||
| edf4a1b652 | |||
| 05cd7e2d8a | |||
| a9da8edbde | |||
| d03d3518d7 | |||
| cd95237ae4 | |||
| 1ba69b8abf | |||
| 95eac7f7f0 | |||
| f84b9fd5ef | |||
| e78f1cdc6a | |||
| 432f89cf33 | |||
| f0561c0c3b | |||
| 64e338133c | |||
| 4966e4e1fb | |||
| 19e1cbd033 | |||
| f721c778ad | |||
| a2e0f80c01 | |||
| 334218a62c | |||
| de768af099 | |||
| d36ce782b7 | |||
| fb307ae128 | |||
| 8d5f788f2b | |||
| 804e599598 | |||
| cdf9b674dc | |||
| d5aaee614f | |||
| 865ba8bb4f | |||
| ebbb4a5d0b | |||
| 9040b534c8 | |||
| 0a0ae16bd6 | |||
| c22b325c31 | |||
| c0d82a412d | |||
| ac057a2d40 | |||
| 3427f19a01 | |||
| 8effbaf101 | |||
| 53c4a8787f | |||
| 017a75aa44 | |||
| 56afb3fd64 | |||
| a9c7669c16 | |||
| aae792a9dd | |||
| db53656a45 | |||
| ff7a0e3170 | |||
| c7700ac176 | |||
| d011ddfc64 | |||
| 67cc70ad61 | |||
| a384ae9140 | |||
| a7627882a7 | |||
| 8eae7a95be | |||
| dabf266048 | |||
| 462e764a3c | |||
| 0e8a37dca8 | |||
| bffbe54120 | |||
| b673560b92 | |||
| 9e125e2029 | |||
| b88146c443 | |||
| c40cb7fd59 | |||
| 9d5956cef8 | |||
| 1fff4620e6 | |||
| c3820f55f4 | |||
| 60c5bdd62f | |||
| 5092e5f631 | |||
| c0bd35594e | |||
| bc9efa7ea8 | |||
| f540d0b747 | |||
| 7bcaa513fa | |||
| d33dfee8a3 | |||
| b5216df4fe | |||
| 25a11bfafc | |||
| 8fcc864fb7 | |||
| ed5ed0306e | |||
| a418c43d32 | |||
| 5aa8c9c8df | |||
| 32972b45db | |||
| af351b1723 | |||
| af88266212 | |||
| b14119b531 | |||
| 68c75f221b | |||
| 7b379e2a61 | |||
| c373b734bc | |||
| 2ac8f8003f | |||
| d6b3df8f6f | |||
| deea07e905 | |||
| 0caa94bd1c | |||
| a32dde5428 | |||
| 067b0d07c4 | |||
| 044f96bd93 | |||
| ca96350707 | |||
| be3af1e234 | |||
| 2e89d29c87 | |||
| e4eb9f7c55 | |||
| dd6547de06 | |||
| 84d09b8b8a | |||
| 2c462154f7 | |||
| b810efdb3f | |||
| ae04ccc445 | |||
| f7ac1192ae | |||
| e048588a88 | |||
| 2042353526 | |||
| 9486715929 | |||
| 64319c0d56 | |||
| acd209a890 | |||
| bd482eb8ef | |||
| 5b3cc560d5 | |||
| d41d4deaac | |||
| 208ce4e774 | |||
| 414ee51975 | |||
| d5a521eef2 | |||
| 1b401063e8 | |||
| 60d9d0584a | |||
| ffba341258 | |||
| f11131f8b5 | |||
| 2e6e414a9e | |||
| c45d676477 | |||
| b8d8dddd5a | |||
| c45c22b1b2 | |||
| 3d57a9ccdc | |||
| cb04c21141 | |||
| f70272f638 | |||
| b4b71ded47 | |||
| 24e2b72b71 | |||
| 529791ce62 | |||
| b66945b9b8 | |||
| f3c5d77ad5 | |||
| e5e42bc483 | |||
| bdfbfa391f | |||
| 72acd9b483 | |||
| 9f528d23d4 | |||
| d937cc491d | |||
| 863f3aeb27 | |||
| 0fe078d25e | |||
| d9420c7224 | |||
| 9ff6baaf52 | |||
| 574d00bb13 | |||
| 8d60e5c342 | |||
| d9eb1a73af | |||
| 1a34ff8a67 | |||
| 14e7ba4818 | |||
| 52e9bcbfdb | |||
| 20ae3eae54 | |||
| 0fb145e667 | |||
| bcac43c812 | |||
| 929d9e0b3f | |||
| d5e560a987 | |||
| e4383d6167 | |||
| f32e176d6a | |||
| 3d5a4df9d0 | |||
| e47bfd2ca3 | |||
| f8f768873e | |||
| d043e1a05a | |||
| 837c0ddacc | |||
| 7c340695d6 | |||
| e87d4fbf69 | |||
| 39064197da | |||
| c4496e6cf2 | |||
| 27d09d1783 | |||
| a174ee419e | |||
| 79e6138ce2 | |||
| 5a64f69456 | |||
| 5c01dd97e8 | |||
| ecf74d91e2 | |||
| 62892ed8d7 | |||
| 7b399cc5e5 | |||
| fab5740778 | |||
| 30f2d756a7 | |||
| 0d745c64d8 | |||
| 56c51f0112 | |||
| 4adf85d7d4 | |||
| 7995ff1410 | |||
| d2f234757b | |||
| bf34437186 | |||
| 47f02eec96 | |||
| 06dd4d6e00 | |||
| fbceda7b66 | |||
| 9d6ce3065d | |||
| bb718acadf | |||
| 4cd00efe3b | |||
| 22b11e4b43 | |||
| 2a29c61041 | |||
| 34b041e9f0 | |||
| 917ed8cf84 | |||
| 85b0b8373b | |||
| f04844435f | |||
| 421a3284bc | |||
| d4883256f1 | |||
| 372074edba | |||
| 826f19e968 | |||
| c06cfcbb5a | |||
| ddf6192643 | |||
| 58189ed9a0 | |||
| 726c429772 | |||
| a159c13333 | |||
| 5df3a4eb98 | |||
| 244ed5e5e3 | |||
| 249e9a10a1 | |||
| d4dba373cb | |||
| 06f249a3b4 | |||
| 8809b3ba4d | |||
| 66e80f9dac | |||
| b486d72b8e | |||
| d9e26eba65 | |||
| a7419d0aba | |||
| d5e6e38c58 | |||
| a2598fd134 | |||
| 58165c3951 | |||
| dac72b078d | |||
| b5c2756261 | |||
| fa753239ad | |||
| 8af2ae973f | |||
| 23a8409e0c | |||
| 6e674b511a | |||
| 47f480c0dc | |||
| bfc4fe1a9a | |||
| 98473e9d4f | |||
| 13d3271ec0 | |||
| 6727ff6dbe | |||
| 04954918a5 | |||
| eb3a031964 | |||
| 243876e9b7 | |||
| 884fdc2fa8 | |||
| 410fe7293f | |||
| d7869a4d1e | |||
| aa71f88e1b | |||
| cfb8d224da | |||
| c14b498676 | |||
| ac5aed7a45 | |||
| abb86753c1 | |||
| f6cfe80bf5 | |||
| 2b91ba2411 | |||
| b4be132201 | |||
| 99fec40117 | |||
| 3df04c7e9a | |||
| e7833b42cd | |||
| c64b9c941a | |||
| 1d776c4cd0 | |||
| d1ba5fec89 | |||
| 9260aa3445 | |||
| 6010d5f24c | |||
| b08bfa203a | |||
| a06681913d | |||
| 424fdf4b52 | |||
| bcf42362e3 | |||
| a4d17cb585 | |||
| a9e106b17e | |||
| 044ad5100e | |||
| 3032e6fe59 | |||
| 4eba2ee92b | |||
| b78ce1689a | |||
| 3aedc139ac | |||
| f7416eb810 | |||
| fe06d266e9 | |||
| 9a18911eba | |||
| 8c6e655380 | |||
| 3075d415e1 | |||
| b7466f8b65 | |||
| 846b6bd14e | |||
| d8000251ff | |||
| b5159d8c8a | |||
| 23dcb2dc1b | |||
| f31ddc4b1c | |||
| 5ff3947312 | |||
| 18dce66443 | |||
| b84bb4164f | |||
| 1d09708eb7 | |||
| 8a348bea21 | |||
| e64ff77852 | |||
| 68576a5d63 | |||
| 2e47558f4b | |||
| 0a2111f33d | |||
| da9af7b547 | |||
| 295b47cbff | |||
| ad2c541163 | |||
| 77223e4df4 | |||
| ffe1685b54 | |||
| 51cc2bf429 | |||
| 2a43e634e8 | |||
| 805b698c2e | |||
| cfcfc3c1fd | |||
| 09b0dd5583 | |||
| 455f842785 | |||
| 6b01b0b165 | |||
| c5614d04d2 | |||
| 1459fded08 | |||
| 6b466a8469 | |||
| 21c56c3107 | |||
| 5ab6bc283c | |||
| 106ab7f2a8 | |||
| 0c595c4745 | |||
| 1abf1240b2 | |||
| ad8e82ee1d | |||
| 0e1dfb4161 | |||
| a183b2affb | |||
| 738aaee101 | |||
| 5867e6356d | |||
| ea066f891f | |||
| 34b9fbab48 | |||
| 1d7a8d94e0 | |||
| 1caeac56f2 | |||
| 6b1606f4f4 | |||
| f0684c1957 | |||
| d12255025c | |||
| 864872d24e | |||
| 5be2666938 | |||
| cfefe4f738 | |||
| ddf05ca059 | |||
| 870e3daa95 | |||
| 4c1ad40f8e | |||
| 52857eb6b3 | |||
| e6183074f3 | |||
| 6b5c2bea4d | |||
| 7c7618c083 | |||
| 5f0b52c017 | |||
| c2606f9062 | |||
| 70da81d0e5 | |||
| 75199442c1 | |||
| 60cc82aff1 | |||
| ebd2c8236d | |||
| a2537ba4fd | |||
| a3a041ef6f | |||
| c0702aacac | |||
| 670d479e32 | |||
| ae7de7d36b | |||
| ef5decc98a | |||
| 4445460eca | |||
| 8288b1dcab | |||
| 16d1289a0a | |||
| ba775a1c90 | |||
| b0e58f9da7 | |||
| 531e784a92 | |||
| 5e8fe30035 | |||
| f5033c5a0e | |||
| 26d7654851 | |||
| 790a6ec203 | |||
| de9c5f10b3 | |||
| a7fe0e3f87 | |||
| 218e247fd2 | |||
| 6b51530e21 | |||
| 9755564a05 | |||
| 052d0e059e | |||
| fa4d3bba86 | |||
| f8fc9f8c79 | |||
| 80f0594f4b | |||
| 97b24f48d5 | |||
| b475a6b257 | |||
| ff52a54fef | |||
| c69634085d | |||
| 0a9af45194 | |||
| d92ddc4dd4 | |||
| 32fa817eaa | |||
| af10b3c5fa | |||
| 8b601a983c | |||
| 4b9812ce6a | |||
| c9e18346ce | |||
| 462ba8f416 | |||
| 658157e9a1 | |||
| 4031a46572 | |||
| 821fe26b56 | |||
| 352776ba77 | |||
| f560116fb2 | |||
| e7a5268fdd | |||
| aa71173dbb | |||
| 11fdcb18c6 | |||
| 62c34c4bc2 | |||
| 01f2333381 | |||
| 8d47213529 | |||
| f40e2cf98a | |||
| 05e071bf2f | |||
| e340fccafb | |||
| 4a2e6af9b5 | |||
| acba135de1 | |||
| 2476511368 | |||
| c39dfad7b6 | |||
| 9e29309ffd | |||
| e0f0813b7c | |||
| caf50ea01e | |||
| d4756ba659 | |||
| cc4d82f932 | |||
| 7286b4ad06 | |||
| 02194db0c6 | |||
| b6bd145130 | |||
| 5dcbc9861b | |||
| 3ac4e122eb | |||
| e336a8666a | |||
| a36fdf6a7d | |||
| bf2f03f911 | |||
| 5aaa47e25d | |||
| f884886ef4 | |||
| 4da6ec787e | |||
| 2da00bb4ec | |||
| e0fe0e1a3e | |||
| e11a334c9b | |||
| 21e1b825fe | |||
| 1cf7c3430a | |||
| ad2bc7f8ac | |||
| e600070a61 | |||
| 854c1aa37d | |||
| 74ab057f56 | |||
| ccc6d5975f | |||
| b3399642c5 | |||
| 6e6389c930 | |||
| 973a390298 | |||
| a77dfb69b0 | |||
| 9769318875 | |||
| c7f36d1a5a | |||
| cb46726fa4 | |||
| 7820e31a92 | |||
| de0dae9d9b | |||
| a09935d9b9 | |||
| a62371940f | |||
| 02f7677d92 | |||
| 1ffe190557 | |||
| d3eff9b1a3 | |||
| 66232792a2 | |||
| dc2aaae414 | |||
| c0bb2ec851 | |||
| 7566d90dfe | |||
| b38f195a0d | |||
| a6c5b7414d | |||
| cdee5aab3a | |||
| 4240e2dd29 | |||
| e298eee822 | |||
| bc1cfd4373 | |||
| 2944a4fd43 | |||
| a44ca29717 | |||
| 332e8e68ee | |||
| 577062b93a | |||
| 223c1a8089 | |||
| 0baccb9e82 | |||
| 2c81db5a1c | |||
| 43411d7a9e | |||
| 2dbf20a3e9 | |||
| aaf9fc1562 | |||
| d30f898274 | |||
| 4a72fa6268 | |||
| 0c5e66bccb | |||
| ff791efe18 | |||
| 6083b1d618 | |||
| 69c3439c3a | |||
| 7ee170f0a7 | |||
| 36b221b170 | |||
| d1fc98200c | |||
| bb852ef6d2 | |||
| a17b7b3d89 | |||
| dc65a72d93 | |||
| ea502d36a9 | |||
| 79a3c1618f | |||
| 0be3b4e7a6 | |||
| 5f8967918e | |||
| 6900b08134 | |||
| dc641348f6 | |||
| 431e0105de | |||
| cbe0d9d053 | |||
| f9abcfd789 | |||
| 5a0a2b7e44 | |||
| 41345199d8 | |||
| 8362365eae | |||
| 14e1c16cf2 | |||
| b32b7712e2 | |||
| 5cf55fcbab | |||
| 57c4fc6bf8 | |||
| 92fcf0854b | |||
| f73ec60311 | |||
| 1c60b7f070 | |||
| 084dcd1a50 | |||
| fd536a943a | |||
| 6f80fb72cb | |||
| cb5e2ad9b2 | |||
| 62772e8871 | |||
| 4b0480c8b3 | |||
| c8c591d73c | |||
| 2edd32fdea | |||
| 5889059ce4 | |||
| 7230497bf4 | |||
| d98071a088 | |||
| ac02c12e49 | |||
| 11d29e8d3e | |||
| b44ecf9bf7 | |||
| e60f148824 | |||
| e9045a8838 | |||
| 55487ba0c6 | |||
| 305ea0a2d5 | |||
| a5ca76befb | |||
| e01510e2a6 | |||
| 2931c891a7 | |||
| ad1b1193fa | |||
| 85f33fb73d | |||
| f3c98a274b | |||
| f6c7175828 | |||
| d253ca192a | |||
| e072b7dafa | |||
| 3ff52f1809 | |||
| ad61b42494 | |||
| c95761f4e6 | |||
| 1f15cba9a0 | |||
| 3344aaabb6 | |||
| b8ef0c84e6 | |||
| 6b8b31ff64 | |||
| 823872d294 | |||
| 724ec12bf3 | |||
| 8aac402b24 | |||
| eb12fd9461 | |||
| ad622cea9e | |||
| 2575eaf1d6 | |||
| fc5ed9f316 | |||
| 40a11b6942 | |||
| 84543a591a | |||
| 2cd3fe0dce | |||
| 5eb061466f | |||
| 52050d3dff | |||
| 904af20023 | |||
| 4934dbd0e6 | |||
| d080bea20b | |||
| 607dfc8be7 | |||
| 3b5130b03d | |||
| 75f722a959 | |||
| b946378b38 | |||
| 0cee57acca | |||
| ab78e12089 | |||
| 7fe23a0ca6 | |||
| d8584dc03a | |||
| a724f35672 | |||
| 60c7663a80 | |||
| 8041808b53 | |||
| 146d870098 | |||
| 78d2f49e01 | |||
| 54c8bd29ee | |||
| 406c1952b8 | |||
| 0ebcee9a6b | |||
| 964fa132cb | |||
| dedd5f571c | |||
| 90373c7165 | |||
| d470120a60 | |||
| 0c925bd088 | |||
| 76d123fe19 | |||
| 20f0238aab | |||
| 6d5a7684b4 | |||
| 7831d44099 | |||
| fbf844efd5 | |||
| 99a4bd82b5 | |||
| 58608f51da | |||
| ff9fd0cdb2 | |||
| aac849d4f4 | |||
| 688d07e9c3 | |||
| f9b3cd1b68 | |||
| b2c8718f35 | |||
| 46ba0a8781 | |||
| bc18d4d1b9 | |||
| a4b14fc992 | |||
| be914438a5 | |||
| ec488a4c43 | |||
| f78b903a49 | |||
| fd086b06a6 | |||
| 759ded3e3a | |||
| 05b002a8b7 | |||
| f7016fd922 | |||
| da5c003f97 | |||
| c33741a5e9 | |||
| 872ff3f1d4 | |||
| 8ab3fda5a8 | |||
| 1821726d4f | |||
| 98aea05ad2 | |||
| 79ea94483e | |||
| a0a30bfdcc | |||
| caa5928ac4 | |||
| 9400832b2b | |||
| a82b55005b | |||
| a434f6240f | |||
| f5e177db89 | |||
| 5febd66808 | |||
| afac1fe590 | |||
| 4251515b4e | |||
| 1b2046da3f | |||
| 646900b00c | |||
| 142ab74784 | |||
| ffddabde43 | |||
| 8c6d87f08a | |||
| 270dd955d0 | |||
| 4e2129d74f | |||
| 07cff1ed2c | |||
| 070379a900 | |||
| bbdeb15501 | |||
| 28478cdc41 | |||
| 11ec62ca70 | |||
| 4499cda186 | |||
| c05c5953a8 | |||
| eee576355b | |||
| a3ef869db6 | |||
| a51998e4aa | |||
| 0b44edaca9 | |||
| ab163a5f75 | |||
| f17ca26b10 | |||
| 0ea010d7ee | |||
| 72a2c3decf | |||
| ab7c2cf000 | |||
| 6914c1c85e | |||
| ea542d42ca | |||
| cba5bd588c | |||
| 00cb1c26a1 | |||
| f4d4a32af2 | |||
| 1bf0df03b5 | |||
| ae28ca0b8d | |||
| 51a6b9dc57 | |||
| 4c65a8091a | |||
| 27f400e13f | |||
| 7721648867 | |||
| 47cc951841 | |||
| 63b6026e6e | |||
| 84aa38586f | |||
| a70d59d4a6 | |||
| 57e0a12ccd | |||
| f5e1fa4bd2 | |||
| a7ce1e5789 | |||
| 5f550126b3 | |||
| 572a2bbe53 | |||
| 537c04745d | |||
| 7f004e2f41 | |||
| 7c6415551d | |||
| 0d2d349ab3 | |||
| de28bb1075 | |||
| d409fb5f58 | |||
| 3248d728ac | |||
| beebcae0a1 | |||
| 15757110cf | |||
| fce126b206 | |||
| 5c5f61b2aa | |||
| 3f8fb18c89 | |||
| f72c03a174 | |||
| ee731c7810 | |||
| bd5b938571 | |||
| 7eb707f811 | |||
| 0546351d3e | |||
| 1c05491f1c | |||
| 177b0fb5e8 | |||
| 67a0751cf3 | |||
| 665fcad655 | |||
| d776a7cde7 | |||
| e0fe158f0b | |||
| 5411fd3757 | |||
| 3328addb58 | |||
| 1446f19709 | |||
| 773932b1e7 | |||
| faaf828dff | |||
| ee50a2bcd5 | |||
| 31985d94fa | |||
| 9c5b1e7012 | |||
| ad472d59e0 | |||
| 32df3b68c7 | |||
| a189d293f8 | |||
| 570aee5fe6 | |||
| 88952cbb7a | |||
| 89415ac453 | |||
| 8bbed5aeea | |||
| 8340d775bd | |||
| cff6a488f8 | |||
| d6b980a2dd | |||
| 206bc4b36d | |||
| 45cebf09b0 | |||
| bd43ca6275 | |||
| 9237976988 | |||
| 6ac06486e3 | |||
| 061d4c8ea0 | |||
| aca8b83669 | |||
| a8f09ad43f | |||
| de611ab344 | |||
| 371fe7a700 | |||
| c6d7328e15 | |||
| a327d024e9 | |||
| b8504ac7d0 | |||
| bb33335dd4 | |||
| 5a02e599e1 | |||
| d1572f47a0 | |||
| ef51678c73 | |||
| e64e7563f6 | |||
| 0731db8c22 | |||
| 8c3e390172 | |||
| 8278b39f85 | |||
| 1c3c40db69 |
19
.claude/settings.json.example
Normal file
19
.claude/settings.json.example
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [],
|
||||
"deny": []
|
||||
},
|
||||
"env": {
|
||||
"__comment": "Environment variables for MCP servers. Override in .claude/settings.local.json with actual values.",
|
||||
"GITHUB_PERSONAL_ACCESS_TOKEN": "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
|
||||
},
|
||||
"enabledMcpjsonServers": [
|
||||
"context7",
|
||||
"sequential-thinking",
|
||||
"github",
|
||||
"fetch",
|
||||
"playwright",
|
||||
"ide"
|
||||
],
|
||||
"enableAllProjectMcpServers": true
|
||||
}
|
||||
@ -1,4 +1,4 @@
|
||||
FROM mcr.microsoft.com/devcontainers/python:3.12
|
||||
FROM mcr.microsoft.com/devcontainers/python:3.12-bookworm
|
||||
|
||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
&& apt-get -y install libgmp-dev libmpfr-dev libmpc-dev
|
||||
|
||||
@ -1,23 +1,26 @@
|
||||
# Development with devcontainer
|
||||
|
||||
This project includes a devcontainer configuration that allows you to open the project in a container with a fully configured development environment.
|
||||
Both frontend and backend environments are initialized when the container is started.
|
||||
|
||||
## GitHub Codespaces
|
||||
|
||||
[](https://codespaces.new/langgenius/dify)
|
||||
|
||||
you can simply click the button above to open this project in GitHub Codespaces.
|
||||
|
||||
For more info, check out the [GitHub documentation](https://docs.github.com/en/free-pro-team@latest/github/developing-online-with-codespaces/creating-a-codespace#creating-a-codespace).
|
||||
|
||||
|
||||
## VS Code Dev Containers
|
||||
|
||||
[](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/langgenius/dify)
|
||||
|
||||
if you have VS Code installed, you can click the button above to open this project in VS Code Dev Containers.
|
||||
|
||||
You can learn more in the [Dev Containers documentation](https://code.visualstudio.com/docs/devcontainers/containers).
|
||||
|
||||
|
||||
## Pros of Devcontainer
|
||||
|
||||
Unified Development Environment: By using devcontainers, you can ensure that all developers are developing in the same environment, reducing the occurrence of "it works on my machine" type of issues.
|
||||
|
||||
Quick Start: New developers can set up their development environment in a few simple steps, without spending a lot of time on environment configuration.
|
||||
@ -25,11 +28,13 @@ Quick Start: New developers can set up their development environment in a few si
|
||||
Isolation: Devcontainers isolate your project from your host operating system, reducing the chance of OS updates or other application installations impacting the development environment.
|
||||
|
||||
## Cons of Devcontainer
|
||||
|
||||
Learning Curve: For developers unfamiliar with Docker and VS Code, using devcontainers may be somewhat complex.
|
||||
|
||||
Performance Impact: While usually minimal, programs running inside a devcontainer may be slightly slower than those running directly on the host.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
if you see such error message when you open this project in codespaces:
|
||||

|
||||
|
||||
|
||||
@ -1,15 +1,16 @@
|
||||
#!/bin/bash
|
||||
WORKSPACE_ROOT=$(pwd)
|
||||
|
||||
npm add -g pnpm@10.13.1
|
||||
corepack enable
|
||||
cd web && pnpm install
|
||||
pipx install uv
|
||||
|
||||
echo 'alias start-api="cd /workspaces/dify/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
|
||||
echo 'alias start-worker="cd /workspaces/dify/api && uv run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
|
||||
echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc
|
||||
echo 'alias start-web-prod="cd /workspaces/dify/web && pnpm build && pnpm start"' >> ~/.bashrc
|
||||
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d"' >> ~/.bashrc
|
||||
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc
|
||||
echo "alias start-api=\"cd $WORKSPACE_ROOT/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug\"" >> ~/.bashrc
|
||||
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage\"" >> ~/.bashrc
|
||||
echo "alias start-web=\"cd $WORKSPACE_ROOT/web && pnpm dev\"" >> ~/.bashrc
|
||||
echo "alias start-web-prod=\"cd $WORKSPACE_ROOT/web && pnpm build && pnpm start\"" >> ~/.bashrc
|
||||
echo "alias start-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d\"" >> ~/.bashrc
|
||||
echo "alias stop-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down\"" >> ~/.bashrc
|
||||
|
||||
source /home/vscode/.bashrc
|
||||
|
||||
|
||||
22
.github/CODE_OF_CONDUCT.md
vendored
22
.github/CODE_OF_CONDUCT.md
vendored
@ -17,27 +17,25 @@ diverse, inclusive, and healthy community.
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
- Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
- The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Language Policy
|
||||
|
||||
To facilitate clear and effective communication, all discussions, comments, documentation, and pull requests in this project should be conducted in English. This ensures that all contributors can participate and collaborate effectively.
|
||||
|
||||
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/config.yml
vendored
3
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,5 +1,8 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: "\U0001F510 Security Vulnerabilities"
|
||||
url: "https://github.com/langgenius/dify/security/advisories/new"
|
||||
about: Report security vulnerabilities through GitHub Security Advisories to ensure responsible disclosure. 💡 Please do not report security vulnerabilities in public issues.
|
||||
- name: "\U0001F4A1 Model Providers & Plugins"
|
||||
url: "https://github.com/langgenius/dify-official-plugins/issues/new/choose"
|
||||
about: Report issues with official plugins or model providers, you will need to provide the plugin version and other relevant details.
|
||||
|
||||
44
.github/ISSUE_TEMPLATE/refactor.yml
vendored
Normal file
44
.github/ISSUE_TEMPLATE/refactor.yml
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
name: "✨ Refactor"
|
||||
description: Refactor existing code for improved readability and maintainability.
|
||||
title: "[Chore/Refactor] "
|
||||
labels:
|
||||
- refactor
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Self Checks
|
||||
description: "To make sure we get to you in time, please check the following :)"
|
||||
options:
|
||||
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
|
||||
required: true
|
||||
- label: This is only for refactoring, if you would like to ask a question, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
|
||||
required: true
|
||||
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
|
||||
required: true
|
||||
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
|
||||
required: true
|
||||
- label: 【中文用户 & Non English User】请使用英语提交,否则会被关闭 :)
|
||||
required: true
|
||||
- label: "Please do not modify this template :) and fill in all the required fields."
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
placeholder: "Describe the refactor you are proposing."
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: motivation
|
||||
attributes:
|
||||
label: Motivation
|
||||
placeholder: "Explain why this refactor is necessary."
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: additional-context
|
||||
attributes:
|
||||
label: Additional Context
|
||||
placeholder: "Add any other context or screenshots about the request here."
|
||||
validations:
|
||||
required: false
|
||||
34
.github/actions/setup-uv/action.yml
vendored
34
.github/actions/setup-uv/action.yml
vendored
@ -1,34 +0,0 @@
|
||||
name: Setup UV and Python
|
||||
|
||||
inputs:
|
||||
python-version:
|
||||
description: Python version to use and the UV installed with
|
||||
required: true
|
||||
default: '3.12'
|
||||
uv-version:
|
||||
description: UV version to set up
|
||||
required: true
|
||||
default: '~=0.7.11'
|
||||
uv-lockfile:
|
||||
description: Path to the UV lockfile to restore cache from
|
||||
required: true
|
||||
default: ''
|
||||
enable-cache:
|
||||
required: true
|
||||
default: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: ${{ inputs.uv-version }}
|
||||
python-version: ${{ inputs.python-version }}
|
||||
enable-cache: ${{ inputs.enable-cache }}
|
||||
cache-dependency-glob: ${{ inputs.uv-lockfile }}
|
||||
12
.github/dependabot.yml
vendored
Normal file
12
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 2
|
||||
- package-ecosystem: "uv"
|
||||
directory: "/api"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 2
|
||||
6
.github/pull_request_template.md
vendored
6
.github/pull_request_template.md
vendored
@ -1,8 +1,8 @@
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
> 1. Make sure you have read our [contribution guidelines](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)
|
||||
> 2. Ensure there is an associated issue and you have been assigned to it
|
||||
> 3. Use the correct syntax to link this PR: `Fixes #<issue number>`.
|
||||
> 1. Ensure there is an associated issue and you have been assigned to it
|
||||
> 1. Use the correct syntax to link this PR: `Fixes #<issue number>`.
|
||||
|
||||
## Summary
|
||||
|
||||
@ -12,7 +12,7 @@
|
||||
|
||||
| Before | After |
|
||||
|--------|-------|
|
||||
| ... | ... |
|
||||
| ... | ... |
|
||||
|
||||
## Checklist
|
||||
|
||||
|
||||
30
.github/workflows/api-tests.yml
vendored
30
.github/workflows/api-tests.yml
vendored
@ -1,13 +1,7 @@
|
||||
name: Run Pytest
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- api/**
|
||||
- docker/**
|
||||
- .github/workflows/api-tests.yml
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: api-tests-${{ github.head_ref || github.run_id }}
|
||||
@ -33,10 +27,11 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: ./.github/actions/setup-uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
uv-lockfile: api/uv.lock
|
||||
cache-dependency-glob: api/uv.lock
|
||||
|
||||
- name: Check UV lockfile
|
||||
run: uv lock --project api --check
|
||||
@ -48,6 +43,11 @@ jobs:
|
||||
run: |
|
||||
uv run --project api bash dev/pytest/pytest_unit_tests.sh
|
||||
|
||||
- name: Run pyrefly check
|
||||
run: |
|
||||
cd api
|
||||
uv add --dev pyrefly
|
||||
uv run pyrefly check || true
|
||||
- name: Coverage Summary
|
||||
run: |
|
||||
set -x
|
||||
@ -62,15 +62,6 @@ jobs:
|
||||
- name: Run dify config tests
|
||||
run: uv run --project api dev/pytest/pytest_config_tests.py
|
||||
|
||||
- name: MyPy Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: api/.mypy_cache
|
||||
key: mypy-${{ matrix.python-version }}-${{ runner.os }}-${{ hashFiles('api/uv.lock') }}
|
||||
|
||||
- name: Run MyPy Checks
|
||||
run: dev/mypy-check
|
||||
|
||||
- name: Set up dotenvs
|
||||
run: |
|
||||
cp docker/.env.example docker/.env
|
||||
@ -99,3 +90,6 @@ jobs:
|
||||
|
||||
- name: Run Tool
|
||||
run: uv run --project api bash dev/pytest/pytest_tools.sh
|
||||
|
||||
- name: Run TestContainers
|
||||
run: uv run --project api bash dev/pytest/pytest_testcontainers.sh
|
||||
|
||||
81
.github/workflows/autofix.yml
vendored
Normal file
81
.github/workflows/autofix.yml
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
name: autofix.ci
|
||||
on:
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
autofix:
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
# Use uv to ensure we have the same ruff version in CI and locally.
|
||||
- uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- run: |
|
||||
cd api
|
||||
uv sync --dev
|
||||
# fmt first to avoid line too long
|
||||
uv run ruff format ..
|
||||
# Fix lint errors
|
||||
uv run ruff check --fix .
|
||||
# Format code
|
||||
uv run ruff format ..
|
||||
|
||||
- name: ast-grep
|
||||
run: |
|
||||
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
|
||||
uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all
|
||||
# Convert Optional[T] to T | None (ignoring quoted types)
|
||||
cat > /tmp/optional-rule.yml << 'EOF'
|
||||
id: convert-optional-to-union
|
||||
language: python
|
||||
rule:
|
||||
kind: generic_type
|
||||
all:
|
||||
- has:
|
||||
kind: identifier
|
||||
pattern: Optional
|
||||
- has:
|
||||
kind: type_parameter
|
||||
has:
|
||||
kind: type
|
||||
pattern: $T
|
||||
fix: $T | None
|
||||
EOF
|
||||
uvx --from ast-grep-cli sg scan --inline-rules "$(cat /tmp/optional-rule.yml)" --update-all
|
||||
# Fix forward references that were incorrectly converted (Python doesn't support "Type" | None syntax)
|
||||
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
|
||||
find . -name "*.py.bak" -type f -delete
|
||||
|
||||
- name: mdformat
|
||||
run: |
|
||||
uvx mdformat .
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
package_json_file: web/package.json
|
||||
run_install: false
|
||||
|
||||
- name: Setup NodeJS
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/package.json
|
||||
|
||||
- name: Web dependencies
|
||||
working-directory: ./web
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: oxlint
|
||||
working-directory: ./web
|
||||
run: |
|
||||
pnpx oxlint --fix
|
||||
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
2
.github/workflows/build-push.yml
vendored
2
.github/workflows/build-push.yml
vendored
@ -7,6 +7,8 @@ on:
|
||||
- "deploy/dev"
|
||||
- "deploy/enterprise"
|
||||
- "build/**"
|
||||
- "release/e-*"
|
||||
- "hotfix/**"
|
||||
tags:
|
||||
- "*"
|
||||
|
||||
|
||||
20
.github/workflows/db-migration-test.yml
vendored
20
.github/workflows/db-migration-test.yml
vendored
@ -1,13 +1,7 @@
|
||||
name: DB Migration Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- plugins/beta
|
||||
paths:
|
||||
- api/migrations/**
|
||||
- .github/workflows/db-migration-test.yml
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: db-migration-test-${{ github.ref }}
|
||||
@ -25,12 +19,20 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: ./.github/actions/setup-uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
uv-lockfile: api/uv.lock
|
||||
enable-cache: true
|
||||
python-version: "3.12"
|
||||
cache-dependency-glob: api/uv.lock
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --project api
|
||||
- name: Ensure Offline migration are supported
|
||||
run: |
|
||||
# upgrade
|
||||
uv run --directory api flask db upgrade 'base:head' --sql
|
||||
# downgrade
|
||||
uv run --directory api flask db downgrade 'head:base' --sql
|
||||
|
||||
- name: Prepare middleware env
|
||||
run: |
|
||||
|
||||
5
.github/workflows/deploy-dev.yml
vendored
5
.github/workflows/deploy-dev.yml
vendored
@ -12,12 +12,13 @@ jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event.workflow_run.conclusion == 'success'
|
||||
github.event.workflow_run.conclusion == 'success' &&
|
||||
github.event.workflow_run.head_branch == 'deploy/dev'
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v0.1.8
|
||||
with:
|
||||
host: ${{ secrets.SSH_HOST }}
|
||||
host: ${{ secrets.RAG_SSH_HOST }}
|
||||
username: ${{ secrets.SSH_USER }}
|
||||
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
script: |
|
||||
|
||||
28
.github/workflows/deploy-enterprise.yml
vendored
28
.github/workflows/deploy-enterprise.yml
vendored
@ -19,11 +19,23 @@ jobs:
|
||||
github.event.workflow_run.head_branch == 'deploy/enterprise'
|
||||
|
||||
steps:
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v0.1.8
|
||||
with:
|
||||
host: ${{ secrets.ENTERPRISE_SSH_HOST }}
|
||||
username: ${{ secrets.ENTERPRISE_SSH_USER }}
|
||||
password: ${{ secrets.ENTERPRISE_SSH_PASSWORD }}
|
||||
script: |
|
||||
${{ vars.ENTERPRISE_SSH_SCRIPT || secrets.ENTERPRISE_SSH_SCRIPT }}
|
||||
- name: trigger deployments
|
||||
env:
|
||||
DEV_ENV_ADDRS: ${{ vars.DEV_ENV_ADDRS }}
|
||||
DEPLOY_SECRET: ${{ secrets.DEPLOY_SECRET }}
|
||||
run: |
|
||||
IFS=',' read -ra ENDPOINTS <<< "${DEV_ENV_ADDRS:-}"
|
||||
BODY='{"project":"dify-api","tag":"deploy-enterprise"}'
|
||||
|
||||
for ENDPOINT in "${ENDPOINTS[@]}"; do
|
||||
ENDPOINT="$(echo "$ENDPOINT" | xargs)"
|
||||
[ -z "$ENDPOINT" ] && continue
|
||||
|
||||
API_SIGNATURE=$(printf '%s' "$BODY" | openssl dgst -sha256 -hmac "$DEPLOY_SECRET" | awk '{print "sha256="$2}')
|
||||
|
||||
curl -sSf -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-Hub-Signature-256: $API_SIGNATURE" \
|
||||
-d "$BODY" \
|
||||
"$ENDPOINT"
|
||||
done
|
||||
|
||||
78
.github/workflows/main-ci.yml
vendored
Normal file
78
.github/workflows/main-ci.yml
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
name: Main CI Pipeline
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
push:
|
||||
branches: ["main"]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
checks: write
|
||||
statuses: write
|
||||
|
||||
concurrency:
|
||||
group: main-ci-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Check which paths were changed to determine which tests to run
|
||||
check-changes:
|
||||
name: Check Changed Files
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
api-changed: ${{ steps.changes.outputs.api }}
|
||||
web-changed: ${{ steps.changes.outputs.web }}
|
||||
vdb-changed: ${{ steps.changes.outputs.vdb }}
|
||||
migration-changed: ${{ steps.changes.outputs.migration }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
api:
|
||||
- 'api/**'
|
||||
- 'docker/**'
|
||||
- '.github/workflows/api-tests.yml'
|
||||
web:
|
||||
- 'web/**'
|
||||
vdb:
|
||||
- 'api/core/rag/datasource/**'
|
||||
- 'docker/**'
|
||||
- '.github/workflows/vdb-tests.yml'
|
||||
- 'api/uv.lock'
|
||||
- 'api/pyproject.toml'
|
||||
migration:
|
||||
- 'api/migrations/**'
|
||||
- '.github/workflows/db-migration-test.yml'
|
||||
|
||||
# Run tests in parallel
|
||||
api-tests:
|
||||
name: API Tests
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.api-changed == 'true'
|
||||
uses: ./.github/workflows/api-tests.yml
|
||||
|
||||
web-tests:
|
||||
name: Web Tests
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.web-changed == 'true'
|
||||
uses: ./.github/workflows/web-tests.yml
|
||||
|
||||
style-check:
|
||||
name: Style Check
|
||||
uses: ./.github/workflows/style.yml
|
||||
|
||||
vdb-tests:
|
||||
name: VDB Tests
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.vdb-changed == 'true'
|
||||
uses: ./.github/workflows/vdb-tests.yml
|
||||
|
||||
db-migration-test:
|
||||
name: DB Migration Test
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.migration-changed == 'true'
|
||||
uses: ./.github/workflows/db-migration-test.yml
|
||||
36
.github/workflows/style.yml
vendored
36
.github/workflows/style.yml
vendored
@ -1,9 +1,7 @@
|
||||
name: Style check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: style-${{ github.head_ref || github.run_id }}
|
||||
@ -14,7 +12,6 @@ permissions:
|
||||
statuses: write
|
||||
contents: read
|
||||
|
||||
|
||||
jobs:
|
||||
python-style:
|
||||
name: Python Style
|
||||
@ -36,30 +33,32 @@ jobs:
|
||||
|
||||
- name: Setup UV and Python
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/setup-uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
uv-lockfile: api/uv.lock
|
||||
enable-cache: false
|
||||
python-version: "3.12"
|
||||
cache-dependency-glob: api/uv.lock
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: uv sync --project api --dev
|
||||
|
||||
- name: Ruff check
|
||||
- name: Run Import Linter
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
uv run --directory api ruff --version
|
||||
uv run --directory api ruff check --diff ./
|
||||
uv run --directory api ruff format --check --diff ./
|
||||
run: uv run --directory api --dev lint-imports
|
||||
|
||||
- name: Run Basedpyright Checks
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: dev/basedpyright-check
|
||||
|
||||
- name: Run Mypy Type Checks
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --check-untyped-defs --disable-error-code=import-untyped .
|
||||
|
||||
- name: Dotenv check
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: uv run --project api dotenv-linter ./api/.env.example ./web/.env.example
|
||||
|
||||
- name: Lint hints
|
||||
if: failure()
|
||||
run: echo "Please run 'dev/reformat' to fix the fixable linting errors."
|
||||
|
||||
web-style:
|
||||
name: Web Style
|
||||
runs-on: ubuntu-latest
|
||||
@ -82,7 +81,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
package_json_file: web/package.json
|
||||
run_install: false
|
||||
|
||||
- name: Setup NodeJS
|
||||
@ -95,11 +94,14 @@ jobs:
|
||||
|
||||
- name: Web dependencies
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: ./web
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Web style check
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
run: pnpm run lint
|
||||
working-directory: ./web
|
||||
run: |
|
||||
pnpm run lint
|
||||
|
||||
docker-compose-template:
|
||||
name: Docker Compose Template
|
||||
|
||||
@ -1,13 +1,18 @@
|
||||
name: Check i18n Files and Create PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'web/i18n/en-US/*.ts'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
check-and-update:
|
||||
if: github.event.pull_request.merged == true
|
||||
if: github.repository == 'langgenius/dify'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@ -15,8 +20,8 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2 # last 2 commits
|
||||
persist-credentials: false
|
||||
fetch-depth: 2
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check for file changes in i18n/en-US
|
||||
id: check_files
|
||||
@ -27,6 +32,13 @@ jobs:
|
||||
echo "Changed files: $changed_files"
|
||||
if [ -n "$changed_files" ]; then
|
||||
echo "FILES_CHANGED=true" >> $GITHUB_ENV
|
||||
file_args=""
|
||||
for file in $changed_files; do
|
||||
filename=$(basename "$file" .ts)
|
||||
file_args="$file_args --file=$filename"
|
||||
done
|
||||
echo "FILE_ARGS=$file_args" >> $GITHUB_ENV
|
||||
echo "File arguments: $file_args"
|
||||
else
|
||||
echo "FILES_CHANGED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
@ -34,7 +46,7 @@ jobs:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
package_json_file: web/package.json
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
@ -47,17 +59,30 @@ jobs:
|
||||
|
||||
- name: Install dependencies
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
working-directory: ./web
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run npm script
|
||||
- name: Generate i18n translations
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
run: pnpm run auto-gen-i18n
|
||||
working-directory: ./web
|
||||
run: pnpm run auto-gen-i18n ${{ env.FILE_ARGS }}
|
||||
|
||||
- name: Generate i18n type definitions
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
working-directory: ./web
|
||||
run: pnpm run gen:i18n-types
|
||||
|
||||
- name: Create Pull Request
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
commit-message: Update i18n files based on en-US changes
|
||||
title: 'chore: translate i18n files'
|
||||
body: This PR was automatically created to update i18n files based on changes in en-US locale.
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: Update i18n files and type definitions based on en-US changes
|
||||
title: 'chore: translate i18n files and update type definitions'
|
||||
body: |
|
||||
This PR was automatically created to update i18n files and TypeScript type definitions based on changes in en-US locale.
|
||||
|
||||
**Changes included:**
|
||||
- Updated translation files for all locales
|
||||
- Regenerated TypeScript type definitions for type safety
|
||||
branch: chore/automated-i18n-updates
|
||||
|
||||
15
.github/workflows/vdb-tests.yml
vendored
15
.github/workflows/vdb-tests.yml
vendored
@ -1,15 +1,7 @@
|
||||
name: Run VDB Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- api/core/rag/datasource/**
|
||||
- docker/**
|
||||
- .github/workflows/vdb-tests.yml
|
||||
- api/uv.lock
|
||||
- api/pyproject.toml
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: vdb-tests-${{ github.head_ref || github.run_id }}
|
||||
@ -39,10 +31,11 @@ jobs:
|
||||
remove_tool_cache: true
|
||||
|
||||
- name: Setup UV and Python
|
||||
uses: ./.github/actions/setup-uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
with:
|
||||
enable-cache: true
|
||||
python-version: ${{ matrix.python-version }}
|
||||
uv-lockfile: api/uv.lock
|
||||
cache-dependency-glob: api/uv.lock
|
||||
|
||||
- name: Check UV lockfile
|
||||
run: uv lock --project api --check
|
||||
|
||||
15
.github/workflows/web-tests.yml
vendored
15
.github/workflows/web-tests.yml
vendored
@ -1,11 +1,7 @@
|
||||
name: Web Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- web/**
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: web-tests-${{ github.head_ref || github.run_id }}
|
||||
@ -35,7 +31,7 @@ jobs:
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
package_json_file: web/package.json
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
@ -48,8 +44,15 @@ jobs:
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: ./web
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Check i18n types synchronization
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: ./web
|
||||
run: pnpm run check:i18n-types
|
||||
|
||||
- name: Run tests
|
||||
if: steps.changed-files.outputs.any_changed == 'true'
|
||||
working-directory: ./web
|
||||
run: pnpm test
|
||||
|
||||
24
.gitignore
vendored
24
.gitignore
vendored
@ -123,10 +123,12 @@ venv.bak/
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
# type checking
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
pyrightconfig.json
|
||||
!api/pyrightconfig.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
@ -195,8 +197,10 @@ sdks/python-client/dify_client.egg-info
|
||||
.vscode/*
|
||||
!.vscode/launch.json.template
|
||||
!.vscode/README.md
|
||||
pyrightconfig.json
|
||||
api/.vscode
|
||||
web/.vscode
|
||||
# vscode Code History Extension
|
||||
.history
|
||||
|
||||
.idea/
|
||||
|
||||
@ -212,6 +216,22 @@ mise.toml
|
||||
# Next.js build output
|
||||
.next/
|
||||
|
||||
# PWA generated files
|
||||
web/public/sw.js
|
||||
web/public/sw.js.map
|
||||
web/public/workbox-*.js
|
||||
web/public/workbox-*.js.map
|
||||
web/public/fallback-*.js
|
||||
|
||||
# AI Assistant
|
||||
.roo/
|
||||
api/.env.backup
|
||||
/clickzetta
|
||||
|
||||
# Benchmark
|
||||
scripts/stress-test/setup/config/
|
||||
scripts/stress-test/reports/
|
||||
|
||||
# mcp
|
||||
.playwright-mcp/
|
||||
.serena/
|
||||
34
.mcp.json
Normal file
34
.mcp.json
Normal file
@ -0,0 +1,34 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"context7": {
|
||||
"type": "http",
|
||||
"url": "https://mcp.context7.com/mcp"
|
||||
},
|
||||
"sequential-thinking": {
|
||||
"type": "stdio",
|
||||
"command": "npx",
|
||||
"args": ["-y", "@modelcontextprotocol/server-sequential-thinking"],
|
||||
"env": {}
|
||||
},
|
||||
"github": {
|
||||
"type": "stdio",
|
||||
"command": "npx",
|
||||
"args": ["-y", "@modelcontextprotocol/server-github"],
|
||||
"env": {
|
||||
"GITHUB_PERSONAL_ACCESS_TOKEN": "${GITHUB_PERSONAL_ACCESS_TOKEN}"
|
||||
}
|
||||
},
|
||||
"fetch": {
|
||||
"type": "stdio",
|
||||
"command": "uvx",
|
||||
"args": ["mcp-server-fetch"],
|
||||
"env": {}
|
||||
},
|
||||
"playwright": {
|
||||
"type": "stdio",
|
||||
"command": "npx",
|
||||
"args": ["-y", "@playwright/mcp@latest"],
|
||||
"env": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
8
.vscode/README.md
vendored
8
.vscode/README.md
vendored
@ -4,10 +4,10 @@ This `launch.json.template` file provides various debug configurations for the D
|
||||
|
||||
## How to Use
|
||||
|
||||
1. **Create `launch.json`**: If you don't have one, create a file named `launch.json` inside the `.vscode` directory.
|
||||
2. **Copy Content**: Copy the entire content from `launch.json.template` into your newly created `launch.json` file.
|
||||
3. **Select Debug Configuration**: Go to the Run and Debug view in VS Code / Cursor (Ctrl+Shift+D or Cmd+Shift+D).
|
||||
4. **Start Debugging**: Select the desired configuration from the dropdown menu and click the green play button.
|
||||
1. **Create `launch.json`**: If you don't have one, create a file named `launch.json` inside the `.vscode` directory.
|
||||
1. **Copy Content**: Copy the entire content from `launch.json.template` into your newly created `launch.json` file.
|
||||
1. **Select Debug Configuration**: Go to the Run and Debug view in VS Code / Cursor (Ctrl+Shift+D or Cmd+Shift+D).
|
||||
1. **Start Debugging**: Select the desired configuration from the dropdown menu and click the green play button.
|
||||
|
||||
## Tips
|
||||
|
||||
|
||||
54
AGENTS.md
Normal file
54
AGENTS.md
Normal file
@ -0,0 +1,54 @@
|
||||
# AGENTS.md
|
||||
|
||||
## Project Overview
|
||||
|
||||
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
|
||||
|
||||
The codebase is split into:
|
||||
|
||||
- **Backend API** (`/api`): Python Flask application organized with Domain-Driven Design
|
||||
- **Frontend Web** (`/web`): Next.js 15 application using TypeScript and React 19
|
||||
- **Docker deployment** (`/docker`): Containerized deployment configurations
|
||||
|
||||
## Backend Workflow
|
||||
|
||||
- Run backend CLI commands through `uv run --project api <command>`.
|
||||
|
||||
- Backend QA gate requires passing `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh` before review.
|
||||
|
||||
- Use Makefile targets for linting and formatting; `make lint` and `make type-check` cover the required checks.
|
||||
|
||||
- Integration tests are CI-only and are not expected to run in the local environment.
|
||||
|
||||
## Frontend Workflow
|
||||
|
||||
```bash
|
||||
cd web
|
||||
pnpm lint
|
||||
pnpm lint:fix
|
||||
pnpm test
|
||||
```
|
||||
|
||||
## Testing & Quality Practices
|
||||
|
||||
- Follow TDD: red → green → refactor.
|
||||
- Use `pytest` for backend tests with Arrange-Act-Assert structure.
|
||||
- Enforce strong typing; avoid `Any` and prefer explicit type annotations.
|
||||
- Write self-documenting code; only add comments that explain intent.
|
||||
|
||||
## Language Style
|
||||
|
||||
- **Python**: Keep type hints on functions and attributes, and implement relevant special methods (e.g., `__repr__`, `__str__`).
|
||||
- **TypeScript**: Use the strict config, lean on ESLint + Prettier workflows, and avoid `any` types.
|
||||
|
||||
## General Practices
|
||||
|
||||
- Prefer editing existing files; add new documentation only when requested.
|
||||
- Inject dependencies through constructors and preserve clean architecture boundaries.
|
||||
- Handle errors with domain-specific exceptions at the correct layer.
|
||||
|
||||
## Project Conventions
|
||||
|
||||
- Backend architecture adheres to DDD and Clean Architecture principles.
|
||||
- Async work runs through Celery with Redis as the broker.
|
||||
- Frontend user-facing strings must use `web/i18n/en-US/`; avoid hardcoded text.
|
||||
@ -34,11 +34,11 @@ Don't forget to link an existing issue or open a new issue in the PR's descripti
|
||||
|
||||
How we prioritize:
|
||||
|
||||
| Issue Type | Priority |
|
||||
| ------------------------------------------------------------ | --------------- |
|
||||
| Bugs in core functions (cloud service, cannot login, applications not working, security loopholes) | Critical |
|
||||
| Non-critical bugs, performance boosts | Medium Priority |
|
||||
| Minor fixes (typos, confusing but working UI) | Low Priority |
|
||||
| Issue Type | Priority |
|
||||
| ------------------------------------------------------------ | --------------- |
|
||||
| Bugs in core functions (cloud service, cannot login, applications not working, security loopholes) | Critical |
|
||||
| Non-critical bugs, performance boosts | Medium Priority |
|
||||
| Minor fixes (typos, confusing but working UI) | Low Priority |
|
||||
|
||||
### Feature requests
|
||||
|
||||
@ -52,23 +52,25 @@ How we prioritize:
|
||||
|
||||
How we prioritize:
|
||||
|
||||
| Feature Type | Priority |
|
||||
| ------------------------------------------------------------ | --------------- |
|
||||
| High-Priority Features as being labeled by a team member | High Priority |
|
||||
| Popular feature requests from our [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Medium Priority |
|
||||
| Non-core features and minor enhancements | Low Priority |
|
||||
| Valuable but not immediate | Future-Feature |
|
||||
| Feature Type | Priority |
|
||||
| ------------------------------------------------------------ | --------------- |
|
||||
| High-Priority Features as being labeled by a team member | High Priority |
|
||||
| Popular feature requests from our [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Medium Priority |
|
||||
| Non-core features and minor enhancements | Low Priority |
|
||||
| Valuable but not immediate | Future-Feature |
|
||||
|
||||
## Submitting your PR
|
||||
|
||||
### Pull Request Process
|
||||
|
||||
1. Fork the repository
|
||||
2. Before you draft a PR, please create an issue to discuss the changes you want to make
|
||||
3. Create a new branch for your changes
|
||||
4. Please add tests for your changes accordingly
|
||||
5. Ensure your code passes the existing tests
|
||||
6. Please link the issue in the PR description, `fixes #<issue_number>`
|
||||
7. Get merged!
|
||||
1. Before you draft a PR, please create an issue to discuss the changes you want to make
|
||||
1. Create a new branch for your changes
|
||||
1. Please add tests for your changes accordingly
|
||||
1. Ensure your code passes the existing tests
|
||||
1. Please link the issue in the PR description, `fixes #<issue_number>`
|
||||
1. Get merged!
|
||||
|
||||
### Setup the project
|
||||
|
||||
#### Frontend
|
||||
@ -82,12 +84,14 @@ For setting up the backend service, kindly refer to our detailed [instructions](
|
||||
#### Other things to note
|
||||
|
||||
We recommend reviewing this document carefully before proceeding with the setup, as it contains essential information about:
|
||||
|
||||
- Prerequisites and dependencies
|
||||
- Installation steps
|
||||
- Configuration details
|
||||
- Common troubleshooting tips
|
||||
|
||||
Feel free to reach out if you encounter any issues during the setup process.
|
||||
|
||||
## Getting Help
|
||||
|
||||
If you ever get stuck or get a burning question while contributing, simply shoot your queries our way via the related GitHub issue, or hop onto our [Discord](https://discord.gg/8Tpq4AcN9c) for a quick chat.
|
||||
|
||||
90
Makefile
90
Makefile
@ -4,6 +4,72 @@ WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web
|
||||
API_IMAGE=$(DOCKER_REGISTRY)/dify-api
|
||||
VERSION=latest
|
||||
|
||||
# Default target - show help
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
# Backend Development Environment Setup
|
||||
.PHONY: dev-setup prepare-docker prepare-web prepare-api
|
||||
|
||||
# Dev setup target
|
||||
dev-setup: prepare-docker prepare-web prepare-api
|
||||
@echo "✅ Backend development environment setup complete!"
|
||||
|
||||
# Step 1: Prepare Docker middleware
|
||||
prepare-docker:
|
||||
@echo "🐳 Setting up Docker middleware..."
|
||||
@cp -n docker/middleware.env.example docker/middleware.env 2>/dev/null || echo "Docker middleware.env already exists"
|
||||
@cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev up -d
|
||||
@echo "✅ Docker middleware started"
|
||||
|
||||
# Step 2: Prepare web environment
|
||||
prepare-web:
|
||||
@echo "🌐 Setting up web environment..."
|
||||
@cp -n web/.env.example web/.env 2>/dev/null || echo "Web .env already exists"
|
||||
@cd web && pnpm install
|
||||
@echo "✅ Web environment prepared (not started)"
|
||||
|
||||
# Step 3: Prepare API environment
|
||||
prepare-api:
|
||||
@echo "🔧 Setting up API environment..."
|
||||
@cp -n api/.env.example api/.env 2>/dev/null || echo "API .env already exists"
|
||||
@cd api && uv sync --dev
|
||||
@cd api && uv run flask db upgrade
|
||||
@echo "✅ API environment prepared (not started)"
|
||||
|
||||
# Clean dev environment
|
||||
dev-clean:
|
||||
@echo "⚠️ Stopping Docker containers..."
|
||||
@cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev down
|
||||
@echo "🗑️ Removing volumes..."
|
||||
@rm -rf docker/volumes/db
|
||||
@rm -rf docker/volumes/redis
|
||||
@rm -rf docker/volumes/plugin_daemon
|
||||
@rm -rf docker/volumes/weaviate
|
||||
@rm -rf api/storage
|
||||
@echo "✅ Cleanup complete"
|
||||
|
||||
# Backend Code Quality Commands
|
||||
format:
|
||||
@echo "🎨 Running ruff format..."
|
||||
@uv run --project api --dev ruff format ./api
|
||||
@echo "✅ Code formatting complete"
|
||||
|
||||
check:
|
||||
@echo "🔍 Running ruff check..."
|
||||
@uv run --project api --dev ruff check ./api
|
||||
@echo "✅ Code check complete"
|
||||
|
||||
lint:
|
||||
@echo "🔧 Running ruff format, check with fixes, and import linter..."
|
||||
@uv run --project api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
|
||||
@uv run --directory api --dev lint-imports
|
||||
@echo "✅ Linting complete"
|
||||
|
||||
type-check:
|
||||
@echo "📝 Running type check with basedpyright..."
|
||||
@uv run --directory api --dev basedpyright
|
||||
@echo "✅ Type check complete"
|
||||
|
||||
# Build Docker images
|
||||
build-web:
|
||||
@echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
|
||||
@ -39,5 +105,27 @@ build-push-web: build-web push-web
|
||||
build-push-all: build-all push-all
|
||||
@echo "All Docker images have been built and pushed."
|
||||
|
||||
# Help target
|
||||
help:
|
||||
@echo "Development Setup Targets:"
|
||||
@echo " make dev-setup - Run all setup steps for backend dev environment"
|
||||
@echo " make prepare-docker - Set up Docker middleware"
|
||||
@echo " make prepare-web - Set up web environment"
|
||||
@echo " make prepare-api - Set up API environment"
|
||||
@echo " make dev-clean - Stop Docker middleware containers"
|
||||
@echo ""
|
||||
@echo "Backend Code Quality:"
|
||||
@echo " make format - Format code with ruff"
|
||||
@echo " make check - Check code with ruff"
|
||||
@echo " make lint - Format and fix code with ruff"
|
||||
@echo " make type-check - Run type checking with basedpyright"
|
||||
@echo ""
|
||||
@echo "Docker Build Targets:"
|
||||
@echo " make build-web - Build web Docker image"
|
||||
@echo " make build-api - Build API Docker image"
|
||||
@echo " make build-all - Build all Docker images"
|
||||
@echo " make push-all - Push all Docker images"
|
||||
@echo " make build-push-all - Build and push all Docker images"
|
||||
|
||||
# Phony targets
|
||||
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all
|
||||
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check
|
||||
|
||||
107
README.md
107
README.md
@ -40,18 +40,18 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README_TW.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
|
||||
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
|
||||
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||
<a href="./README_DE.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
|
||||
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
|
||||
<a href="./docs/zh-TW/README.md"><img alt="繁體中文文件" src="https://img.shields.io/badge/繁體中文-d9d9d9"></a>
|
||||
<a href="./docs/zh-CN/README.md"><img alt="简体中文文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./docs/ja-JP/README.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./docs/es-ES/README.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./docs/fr-FR/README.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./docs/tlh/README.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./docs/ko-KR/README.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
<a href="./docs/ar-SA/README.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
|
||||
<a href="./docs/tr-TR/README.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||
<a href="./docs/vi-VN/README.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||
<a href="./docs/de-DE/README.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
|
||||
<a href="./docs/bn-BD/README.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and more—allowing you to quickly move from prototype to production.
|
||||
@ -107,74 +107,6 @@ Monitor and analyze application logs and performance over time. You could contin
|
||||
**7. Backend-as-a-Service**:
|
||||
All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic.
|
||||
|
||||
## Feature Comparison
|
||||
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
<th align="center">Feature</th>
|
||||
<th align="center">Dify.AI</th>
|
||||
<th align="center">LangChain</th>
|
||||
<th align="center">Flowise</th>
|
||||
<th align="center">OpenAI Assistants API</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Programming Approach</td>
|
||||
<td align="center">API + App-oriented</td>
|
||||
<td align="center">Python Code</td>
|
||||
<td align="center">App-oriented</td>
|
||||
<td align="center">API-oriented</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Supported LLMs</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">OpenAI-only</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">RAG Engine</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Agent</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Workflow</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Observability</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Enterprise Feature (SSO/Access control)</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Local Deployment</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Using Dify
|
||||
|
||||
- **Cloud </br>**
|
||||
@ -185,7 +117,8 @@ All of Dify's offerings come with corresponding APIs, so you could effortlessly
|
||||
Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions.
|
||||
|
||||
- **Dify for enterprise / organizations</br>**
|
||||
We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs. </br>
|
||||
We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss enterprise needs. </br>
|
||||
|
||||
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one click. It's an affordable AMI offering with the option to create apps with custom logo and branding.
|
||||
|
||||
## Staying ahead
|
||||
@ -225,23 +158,27 @@ Deploy Dify to AWS with [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
##### AWS
|
||||
|
||||
- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
|
||||
- [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
|
||||
- [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
|
||||
|
||||
#### Using Alibaba Cloud Computing Nest
|
||||
|
||||
Quickly deploy Dify to Alibaba cloud with [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
|
||||
Quickly deploy Dify to Alibaba cloud with [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
|
||||
|
||||
#### Using Alibaba Cloud Data Management
|
||||
|
||||
One-Click deploy Dify to Alibaba Cloud with [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
One-Click deploy Dify to Alibaba Cloud with [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
#### Deploy to AKS with Azure Devops Pipeline
|
||||
|
||||
One-Click deploy Dify to AKS with [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
|
||||
|
||||
## Contributing
|
||||
|
||||
For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
At the same time, please consider supporting Dify by sharing it on social media and at events and conferences.
|
||||
|
||||
> We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
|
||||
> We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
## Community & contact
|
||||
|
||||
|
||||
269
README_BN.md
269
README_BN.md
@ -1,269 +0,0 @@
|
||||

|
||||
|
||||
<p align="center">
|
||||
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">ডিফাই ওয়ার্কফ্লো ফাইল আপলোড পরিচিতি: গুগল নোটবুক-এলএম পডকাস্ট পুনর্নির্মাণ</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloud.dify.ai">ডিফাই ক্লাউড</a> ·
|
||||
<a href="https://docs.dify.ai/getting-started/install-self-hosted">সেল্ফ-হোস্টিং</a> ·
|
||||
<a href="https://docs.dify.ai">ডকুমেন্টেশন</a> ·
|
||||
<a href="https://dify.ai/pricing">Dify পণ্যের রূপভেদ</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://dify.ai" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Product-F04438"></a>
|
||||
<a href="https://dify.ai/pricing" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/free-pricing?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
|
||||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="join Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on X(Twitter)"></a>
|
||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
||||
alt="follow on LinkedIn"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
<img alt="Commits last month" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
|
||||
<a href="https://github.com/langgenius/dify/" target="_blank">
|
||||
<img alt="Issues closed" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
|
||||
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
|
||||
<img alt="Discussion posts" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
|
||||
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||
<a href="./README_DE.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
|
||||
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
ডিফাই একটি ওপেন-সোর্স LLM অ্যাপ ডেভেলপমেন্ট প্ল্যাটফর্ম। এটি ইন্টুইটিভ ইন্টারফেস, এজেন্টিক AI ওয়ার্কফ্লো, RAG পাইপলাইন, এজেন্ট ক্যাপাবিলিটি, মডেল ম্যানেজমেন্ট, মনিটরিং সুবিধা এবং আরও অনেক কিছু একত্রিত করে, যা দ্রুত প্রোটোটাইপ থেকে প্রোডাকশন পর্যন্ত নিয়ে যেতে সহায়তা করে।
|
||||
|
||||
## কুইক স্টার্ট
|
||||
>
|
||||
> ডিফাই ইনস্টল করার আগে, নিশ্চিত করুন যে আপনার মেশিন নিম্নলিখিত ন্যূনতম কনফিগারেশনের প্রয়োজনীয়তা পূরন করে :
|
||||
>
|
||||
>- সিপিউ >= 2 কোর
|
||||
>- র্যাম >= 4 জিবি
|
||||
|
||||
</br>
|
||||
|
||||
ডিফাই সার্ভার চালু করার সবচেয়ে সহজ উপায় [docker compose](docker/docker-compose.yaml) মাধ্যমে। নিম্নলিখিত কমান্ডগুলো ব্যবহার করে ডিফাই চালানোর আগে, নিশ্চিত করুন যে আপনার মেশিনে [Docker](https://docs.docker.com/get-docker/) এবং [Docker Compose](https://docs.docker.com/compose/install/) ইনস্টল করা আছে :
|
||||
```bash
|
||||
cd dify
|
||||
cd docker
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
চালানোর পর, আপনি আপনার ব্রাউজারে [http://localhost/install](http://localhost/install)-এ ডিফাই ড্যাশবোর্ডে অ্যাক্সেস করতে পারেন এবং ইনিশিয়ালাইজেশন প্রক্রিয়া শুরু করতে পারেন।
|
||||
|
||||
#### সাহায্যের খোঁজে
|
||||
|
||||
ডিফাই সেট আপ করতে সমস্যা হলে দয়া করে আমাদের [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs) দেখুন। যদি তবুও সমস্যা থেকে থাকে, তাহলে [কমিউনিটি এবং আমাদের](#community--contact) সাথে যোগাযোগ করুন।
|
||||
|
||||
> যদি আপনি ডিফাইতে অবদান রাখতে বা অতিরিক্ত উন্নয়ন করতে চান, আমাদের [সোর্স কোড থেকে ডিপ্লয়মেন্টের গাইড](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) দেখুন।
|
||||
|
||||
## প্রধান ফিচারসমূহ
|
||||
|
||||
**১. ওয়ার্কফ্লো**:
|
||||
ভিজ্যুয়াল ক্যানভাসে AI ওয়ার্কফ্লো তৈরি এবং পরীক্ষা করুন, নিম্নলিখিত সব ফিচার এবং তার বাইরেও আরও অনেক কিছু ব্যবহার করে।
|
||||
|
||||
**২. মডেল সাপোর্ট**:
|
||||
GPT, Mistral, Llama3, এবং যেকোনো OpenAI API-সামঞ্জস্যপূর্ণ মডেলসহ, কয়েক ডজন ইনফারেন্স প্রদানকারী এবং সেল্ফ-হোস্টেড সমাধান থেকে শুরু করে প্রোপ্রাইটরি/ওপেন-সোর্স LLM-এর সাথে সহজে ইন্টিগ্রেশন। সমর্থিত মডেল প্রদানকারীদের একটি সম্পূর্ণ তালিকা পাওয়া যাবে [এখানে](https://docs.dify.ai/getting-started/readme/model-providers)।
|
||||
|
||||

|
||||
|
||||
**3. প্রম্পট IDE**:
|
||||
প্রম্পট তৈরি, মডেলের পারফরম্যান্স তুলনা এবং চ্যাট-বেজড অ্যাপে টেক্সট-টু-স্পিচের মতো বৈশিষ্ট্য যুক্ত করার জন্য ইন্টুইটিভ ইন্টারফেস।
|
||||
|
||||
**4. RAG পাইপলাইন**:
|
||||
ডকুমেন্ট ইনজেশন থেকে শুরু করে রিট্রিভ পর্যন্ত সবকিছুই বিস্তৃত RAG ক্যাপাবিলিটির আওতাভুক্ত। PDF, PPT এবং অন্যান্য সাধারণ ডকুমেন্ট ফর্ম্যাট থেকে টেক্সট এক্সট্রাকশনের জন্য আউট-অফ-বক্স সাপোর্ট।
|
||||
|
||||
**5. এজেন্ট ক্যাপাবিলিটি**:
|
||||
LLM ফাংশন কলিং বা ReAct উপর ভিত্তি করে এজেন্ট ডিফাইন করতে পারেন এবং এজেন্টের জন্য পূর্ব-নির্মিত বা কাস্টম টুলস যুক্ত করতে পারেন। Dify AI এজেন্টদের জন্য 50+ বিল্ট-ইন টুলস সরবরাহ করে, যেমন Google Search, DALL·E, Stable Diffusion এবং WolframAlpha।
|
||||
|
||||
**6. এলএলএম-অপ্স**:
|
||||
সময়ের সাথে সাথে অ্যাপ্লিকেশন লগ এবং পারফরম্যান্স মনিটর এবং বিশ্লেষণ করুন। প্রডাকশন ডেটা এবং annotation এর উপর ভিত্তি করে প্রম্পট, ডেটাসেট এবং মডেলগুলিকে ক্রমাগত উন্নত করতে পারেন।
|
||||
|
||||
**7. ব্যাকএন্ড-অ্যাজ-এ-সার্ভিস**:
|
||||
ডিফাই-এর সমস্ত অফার সংশ্লিষ্ট API-সহ আছে, যাতে আপনি অনায়াসে ডিফাইকে আপনার নিজস্ব বিজনেস লজিকে ইন্টেগ্রেট করতে পারেন।
|
||||
|
||||
## বৈশিষ্ট্য তুলনা
|
||||
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
<th align="center">বৈশিষ্ট্য</th>
|
||||
<th align="center">Dify.AI</th>
|
||||
<th align="center">LangChain</th>
|
||||
<th align="center">Flowise</th>
|
||||
<th align="center">OpenAI Assistants API</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">প্রোগ্রামিং পদ্ধতি</td>
|
||||
<td align="center">API + App-oriented</td>
|
||||
<td align="center">Python Code</td>
|
||||
<td align="center">App-oriented</td>
|
||||
<td align="center">API-oriented</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">সাপোর্টেড LLMs</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">OpenAI-only</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">RAG ইঞ্জিন</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">এজেন্ট</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">ওয়ার্কফ্লো</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">অবজার্ভেবল</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">এন্টারপ্রাইজ ফিচার (SSO/Access control)</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">লোকাল ডেপ্লয়মেন্ট</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## ডিফাই-এর ব্যবহার
|
||||
|
||||
- **ক্লাউড </br>**
|
||||
জিরো সেটাপে ব্যবহার করতে আমাদের [Dify Cloud](https://dify.ai) সার্ভিসটি ব্যবহার করতে পারেন। এখানে সেল্ফহোস্টিং-এর সকল ফিচার ও ক্যাপাবিলিটিসহ স্যান্ডবক্সে ২০০ জিপিটি-৪ কল ফ্রি পাবেন।
|
||||
|
||||
- **সেল্ফহোস্টিং ডিফাই কমিউনিটি সংস্করণ</br>**
|
||||
সেল্ফহোস্ট করতে এই [স্টার্টার গাইড](#quick-start) ব্যবহার করে দ্রুত আপনার এনভায়রনমেন্টে ডিফাই চালান।
|
||||
আরো ইন-ডেপথ রেফারেন্সের জন্য [ডকুমেন্টেশন](https://docs.dify.ai) দেখেন।
|
||||
|
||||
- **এন্টারপ্রাইজ / প্রতিষ্ঠানের জন্য Dify</br>**
|
||||
আমরা এন্টারপ্রাইজ/প্রতিষ্ঠান-কেন্দ্রিক সেবা প্রদান করে থাকি । [এই চ্যাটবটের মাধ্যমে আপনার প্রশ্নগুলি আমাদের জন্য লগ করুন।](https://udify.app/chat/22L1zSxg6yW1cWQg) অথবা [আমাদের ইমেল পাঠান](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) আপনার চাহিদা সম্পর্কে আলোচনা করার জন্য। </br>
|
||||
|
||||
> AWS ব্যবহারকারী স্টার্টআপ এবং ছোট ব্যবসার জন্য, [AWS মার্কেটপ্লেসে Dify Premium](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) দেখুন এবং এক-ক্লিকের মাধ্যমে এটি আপনার নিজস্ব AWS VPC-তে ডিপ্লয় করুন। এটি একটি সাশ্রয়ী মূল্যের AMI অফার, যাতে কাস্টম লোগো এবং ব্র্যান্ডিং সহ অ্যাপ তৈরির সুবিধা আছে।
|
||||
|
||||
## এগিয়ে থাকুন
|
||||
|
||||
GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন এবং নতুন রিলিজের খবর তাৎক্ষণিকভাবে পান।
|
||||
|
||||

|
||||
|
||||
## Advanced Setup
|
||||
|
||||
যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা।
|
||||
যেকোনো পরিবর্তন করার পর, অনুগ্রহ করে `docker-compose up -d` পুনরায় চালান। ভেরিয়েবলের সম্পূর্ণ তালিকা [এখানে] (https://docs.dify.ai/getting-started/install-self-hosted/environments) খুঁজে পেতে পারেন।
|
||||
|
||||
যদি আপনি একটি হাইলি এভেইলেবল সেটআপ কনফিগার করতে চান, তাহলে কমিউনিটি [Helm Charts](https://helm.sh/) এবং YAML ফাইল রয়েছে যা Dify কে Kubernetes-এ ডিপ্লয় করার প্রক্রিয়া বর্ণনা করে।
|
||||
|
||||
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
|
||||
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
|
||||
- [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts)
|
||||
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
|
||||
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
|
||||
- [🚀 নতুন! YAML ফাইলসমূহ (Dify v1.6.0 সমর্থিত) তৈরি করেছেন @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
|
||||
|
||||
|
||||
#### টেরাফর্ম ব্যবহার করে ডিপ্লয়
|
||||
|
||||
[terraform](https://www.terraform.io/) ব্যবহার করে এক ক্লিকেই ক্লাউড প্ল্যাটফর্মে Dify ডিপ্লয় করুন।
|
||||
|
||||
##### অ্যাজুর গ্লোবাল
|
||||
|
||||
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### গুগল ক্লাউড
|
||||
|
||||
- [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
#### AWS CDK ব্যবহার করে ডিপ্লয়
|
||||
|
||||
[CDK](https://aws.amazon.com/cdk/) দিয়ে AWS-এ Dify ডিপ্লয় করুন
|
||||
|
||||
##### AWS
|
||||
|
||||
- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
|
||||
|
||||
#### Alibaba Cloud ব্যবহার করে ডিপ্লয়
|
||||
|
||||
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
|
||||
|
||||
#### Alibaba Cloud Data Management ব্যবহার করে ডিপ্লয়
|
||||
|
||||
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা] দেখুন (https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)।
|
||||
একই সাথে, সোশ্যাল মিডিয়া এবং ইভেন্ট এবং কনফারেন্সে এটি শেয়ার করে Dify কে সমর্থন করুন।
|
||||
|
||||
> আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন।
|
||||
|
||||
## কমিউনিটি এবং যোগাযোগ
|
||||
|
||||
- [GitHub Discussion](https://github.com/langgenius/dify/discussions) ফিডব্যাক এবং প্রতিক্রিয়া জানানোর মাধ্যম।
|
||||
- [GitHub Issues](https://github.com/langgenius/dify/issues). Dify.AI ব্যবহার করে আপনি যেসব বাগের সম্মুখীন হন এবং ফিচার প্রস্তাবনা। আমাদের [অবদান নির্দেশিকা](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) দেখুন।
|
||||
- [Discord](https://discord.gg/FngNHpbcY7) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম।
|
||||
- [X(Twitter)](https://twitter.com/dify_ai) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম।
|
||||
|
||||
**অবদানকারীদের তালিকা**
|
||||
|
||||
<a href="https://github.com/langgenius/dify/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
|
||||
</a>
|
||||
|
||||
## স্টার হিস্ট্রি
|
||||
|
||||
[](https://star-history.com/#langgenius/dify&Date)
|
||||
|
||||
## নিরাপত্তা বিষয়ক
|
||||
|
||||
আপনার গোপনীয়তা রক্ষা করতে, অনুগ্রহ করে GitHub-এ নিরাপত্তা সংক্রান্ত সমস্যা পোস্ট করা এড়িয়ে চলুন। পরিবর্তে, আপনার প্রশ্নগুলি <security@dify.ai> ঠিকানায় পাঠান এবং আমরা আপনাকে আরও বিস্তারিত উত্তর প্রদান করব।
|
||||
|
||||
## লাইসেন্স
|
||||
|
||||
এই রিপোজিটরিটি [ডিফাই ওপেন সোর্স লাইসেন্স](LICENSE) এর অধিনে , যা মূলত অ্যাপাচি ২.০, তবে কিছু অতিরিক্ত বিধিনিষেধ রয়েছে।
|
||||
266
README_DE.md
266
README_DE.md
@ -1,266 +0,0 @@
|
||||

|
||||
|
||||
<p align="center">
|
||||
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Einführung in Dify Workflow File Upload: Google NotebookLM Podcast nachbilden</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
|
||||
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Selbstgehostetes</a> ·
|
||||
<a href="https://docs.dify.ai">Dokumentation</a> ·
|
||||
<a href="https://dify.ai/pricing">Überblick über die Dify-Produkte</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://dify.ai" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Product-F04438"></a>
|
||||
<a href="https://dify.ai/pricing" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/free-pricing?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
|
||||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="join Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on X(Twitter)"></a>
|
||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
||||
alt="follow on LinkedIn"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
<img alt="Commits last month" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
|
||||
<a href="https://github.com/langgenius/dify/" target="_blank">
|
||||
<img alt="Issues closed" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
|
||||
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
|
||||
<img alt="Discussion posts" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
|
||||
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||
<a href="./README_DE.md"><img alt="README in Deutsch" src="https://img.shields.io/badge/German-d9d9d9"></a>
|
||||
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
Dify ist eine Open-Source-Plattform zur Entwicklung von LLM-Anwendungen. Ihre intuitive Benutzeroberfläche vereint agentenbasierte KI-Workflows, RAG-Pipelines, Agentenfunktionen, Modellverwaltung, Überwachungsfunktionen und mehr, sodass Sie schnell von einem Prototyp in die Produktion übergehen können.
|
||||
|
||||
## Schnellstart
|
||||
> Bevor Sie Dify installieren, stellen Sie sicher, dass Ihr System die folgenden Mindestanforderungen erfüllt:
|
||||
>
|
||||
>- CPU >= 2 Core
|
||||
>- RAM >= 4 GiB
|
||||
|
||||
</br>
|
||||
|
||||
Der einfachste Weg, den Dify-Server zu starten, ist über [docker compose](docker/docker-compose.yaml). Stellen Sie vor dem Ausführen von Dify mit den folgenden Befehlen sicher, dass [Docker](https://docs.docker.com/get-docker/) und [Docker Compose](https://docs.docker.com/compose/install/) auf Ihrem System installiert sind:
|
||||
|
||||
```bash
|
||||
cd dify
|
||||
cd docker
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
Nachdem Sie den Server gestartet haben, können Sie über Ihren Browser auf das Dify Dashboard unter [http://localhost/install](http://localhost/install) zugreifen und den Initialisierungsprozess starten.
|
||||
|
||||
#### Hilfe suchen
|
||||
Bitte beachten Sie unsere [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs), wenn Sie Probleme bei der Einrichtung von Dify haben. Wenden Sie sich an [die Community und uns](#community--contact), falls weiterhin Schwierigkeiten auftreten.
|
||||
|
||||
> Wenn Sie zu Dify beitragen oder zusätzliche Entwicklungen durchführen möchten, lesen Sie bitte unseren [Leitfaden zur Bereitstellung aus dem Quellcode](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code).
|
||||
|
||||
## Wesentliche Merkmale
|
||||
**1. Workflow**:
|
||||
Erstellen und testen Sie leistungsstarke KI-Workflows auf einer visuellen Oberfläche, wobei Sie alle der folgenden Funktionen und darüber hinaus nutzen können.
|
||||
|
||||
**2. Umfassende Modellunterstützung**:
|
||||
Nahtlose Integration mit Hunderten von proprietären und Open-Source-LLMs von Dutzenden Inferenzanbietern und selbstgehosteten Lösungen, die GPT, Mistral, Llama3 und alle mit der OpenAI API kompatiblen Modelle abdecken. Eine vollständige Liste der unterstützten Modellanbieter finden Sie [hier](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
|
||||
|
||||

|
||||
|
||||
|
||||
**3. Prompt IDE**:
|
||||
Intuitive Benutzeroberfläche zum Erstellen von Prompts, zum Vergleichen der Modellleistung und zum Hinzufügen zusätzlicher Funktionen wie Text-to-Speech in einer chatbasierten Anwendung.
|
||||
|
||||
**4. RAG Pipeline**:
|
||||
Umfassende RAG-Funktionalitäten, die alles von der Dokumenteneinlesung bis zur -abfrage abdecken, mit sofort einsatzbereiter Unterstützung für die Textextraktion aus PDFs, PPTs und anderen gängigen Dokumentformaten.
|
||||
|
||||
**5. Fähigkeiten des Agenten**:
|
||||
Sie können Agenten basierend auf LLM Function Calling oder ReAct definieren und vorgefertigte oder benutzerdefinierte Tools für den Agenten hinzufügen. Dify stellt über 50 integrierte Tools für KI-Agenten bereit, wie zum Beispiel Google Search, DALL·E, Stable Diffusion und WolframAlpha.
|
||||
|
||||
**6. LLMOps**:
|
||||
Überwachen und analysieren Sie Anwendungsprotokolle und die Leistung im Laufe der Zeit. Sie können kontinuierlich Prompts, Datensätze und Modelle basierend auf Produktionsdaten und Annotationen verbessern.
|
||||
|
||||
**7. Backend-as-a-Service**:
|
||||
Alle Dify-Angebote kommen mit entsprechenden APIs, sodass Sie Dify mühelos in Ihre eigene Geschäftslogik integrieren können.
|
||||
|
||||
## Vergleich der Merkmale
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
<th align="center">Feature</th>
|
||||
<th align="center">Dify.AI</th>
|
||||
<th align="center">LangChain</th>
|
||||
<th align="center">Flowise</th>
|
||||
<th align="center">OpenAI Assistants API</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Programming Approach</td>
|
||||
<td align="center">API + App-oriented</td>
|
||||
<td align="center">Python Code</td>
|
||||
<td align="center">App-oriented</td>
|
||||
<td align="center">API-oriented</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Supported LLMs</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">Rich Variety</td>
|
||||
<td align="center">OpenAI-only</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">RAG Engine</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Agent</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Workflow</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Observability</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Enterprise Feature (SSO/Access control)</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Local Deployment</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Dify verwenden
|
||||
|
||||
- **Cloud </br>**
|
||||
Wir hosten einen [Dify Cloud](https://dify.ai)-Service, den jeder ohne Einrichtung ausprobieren kann. Er bietet alle Funktionen der selbstgehosteten Version und beinhaltet 200 kostenlose GPT-4-Aufrufe im Sandbox-Plan.
|
||||
|
||||
- **Selbstgehostete Dify Community Edition</br>**
|
||||
Starten Sie Dify schnell in Ihrer Umgebung mit diesem [Schnellstart-Leitfaden](#quick-start). Nutzen Sie unsere [Dokumentation](https://docs.dify.ai) für weiterführende Informationen und detaillierte Anweisungen.
|
||||
|
||||
- **Dify für Unternehmen / Organisationen</br>**
|
||||
Wir bieten zusätzliche, unternehmensspezifische Funktionen. [Über diesen Chatbot können Sie uns Ihre Fragen mitteilen](https://udify.app/chat/22L1zSxg6yW1cWQg) oder [senden Sie uns eine E-Mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry), um Ihre unternehmerischen Bedürfnisse zu besprechen. </br>
|
||||
> Für Startups und kleine Unternehmen, die AWS nutzen, schauen Sie sich [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) an und stellen Sie es mit nur einem Klick in Ihrer eigenen AWS VPC bereit. Es handelt sich um ein erschwingliches AMI-Angebot mit der Option, Apps mit individuellem Logo und Branding zu erstellen.
|
||||
|
||||
|
||||
## Immer einen Schritt voraus
|
||||
|
||||
Star Dify auf GitHub und lassen Sie sich sofort über neue Releases benachrichtigen.
|
||||
|
||||

|
||||
|
||||
|
||||
## Erweiterte Einstellungen
|
||||
|
||||
Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||
|
||||
Falls Sie eine hochverfügbare Konfiguration einrichten möchten, gibt es von der Community bereitgestellte [Helm Charts](https://helm.sh/) und YAML-Dateien, die es ermöglichen, Dify auf Kubernetes bereitzustellen.
|
||||
|
||||
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
|
||||
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
|
||||
- [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts)
|
||||
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
|
||||
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
|
||||
- [🚀 NEW! YAML files (Supports Dify v1.6.0) by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
|
||||
|
||||
#### Terraform für die Bereitstellung verwenden
|
||||
|
||||
Stellen Sie Dify mit nur einem Klick mithilfe von [terraform](https://www.terraform.io/) auf einer Cloud-Plattform bereit.
|
||||
|
||||
##### Azure Global
|
||||
- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
#### Verwendung von AWS CDK für die Bereitstellung
|
||||
|
||||
Bereitstellung von Dify auf AWS mit [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
##### AWS
|
||||
- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
|
||||
|
||||
#### Alibaba Cloud
|
||||
|
||||
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
|
||||
|
||||
#### Alibaba Cloud Data Management
|
||||
|
||||
Ein-Klick-Bereitstellung von Dify in der Alibaba Cloud mit [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren.
|
||||
|
||||
|
||||
> Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen – außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
## Gemeinschaft & Kontakt
|
||||
|
||||
* [GitHub Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen.
|
||||
* [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community.
|
||||
|
||||
**Mitwirkende**
|
||||
|
||||
<a href="https://github.com/langgenius/dify/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
|
||||
</a>
|
||||
|
||||
## Star-Geschichte
|
||||
|
||||
[](https://star-history.com/#langgenius/dify&Date)
|
||||
|
||||
|
||||
## Offenlegung der Sicherheit
|
||||
|
||||
Um Ihre Privatsphäre zu schützen, vermeiden Sie es bitte, Sicherheitsprobleme auf GitHub zu posten. Schicken Sie Ihre Fragen stattdessen an security@dify.ai und wir werden Ihnen eine ausführlichere Antwort geben.
|
||||
|
||||
## Lizenz
|
||||
|
||||
Dieses Repository steht unter der [Dify Open Source License](LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist.
|
||||
|
||||
271
README_FR.md
271
README_FR.md
@ -1,271 +0,0 @@
|
||||

|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
|
||||
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Auto-hébergement</a> ·
|
||||
<a href="https://docs.dify.ai">Documentation</a> ·
|
||||
<a href="https://dify.ai/pricing">Présentation des différentes offres Dify</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://dify.ai" target="_blank">
|
||||
<img alt="Badge statique" src="https://img.shields.io/badge/Produit-F04438"></a>
|
||||
<a href="https://dify.ai/pricing" target="_blank">
|
||||
<img alt="Badge statique" src="https://img.shields.io/badge/gratuit-Tarification?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
|
||||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat sur Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="join Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="suivre sur X(Twitter)"></a>
|
||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
||||
alt="suivre sur LinkedIn"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Tirages Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
<img alt="Commits le mois dernier" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
|
||||
<a href="https://github.com/langgenius/dify/" target="_blank">
|
||||
<img alt="Problèmes fermés" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
|
||||
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
|
||||
<img alt="Messages de discussion" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
|
||||
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
#
|
||||
|
||||
<p align="center">
|
||||
<a href="https://trendshift.io/repositories/2152" target="_blank"><img src="https://trendshift.io/api/badge/repositories/2152" alt="langgenius%2Fdify | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
||||
</p>
|
||||
Dify est une plateforme de développement d'applications LLM open source. Son interface intuitive combine un flux de travail d'IA, un pipeline RAG, des capacités d'agent, une gestion de modèles, des fonctionnalités d'observabilité, et plus encore, vous permettant de passer rapidement du prototype à la production. Voici une liste des fonctionnalités principales:
|
||||
</br> </br>
|
||||
|
||||
**1. Flux de travail** :
|
||||
Construisez et testez des flux de travail d'IA puissants sur un canevas visuel, en utilisant toutes les fonctionnalités suivantes et plus encore.
|
||||
|
||||
**2. Prise en charge complète des modèles** :
|
||||
Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
|
||||

|
||||
|
||||
|
||||
**3. IDE de prompt** :
|
||||
Interface intuitive pour créer des prompts, comparer les performances des modèles et ajouter des fonctionnalités supplémentaires telles que la synthèse vocale à une application basée sur des chats.
|
||||
|
||||
**4. Pipeline RAG** :
|
||||
Des capacités RAG étendues qui couvrent tout, de l'ingestion de documents à la récupération, avec un support prêt à l'emploi pour l'extraction de texte à partir de PDF, PPT et autres formats de document courants.
|
||||
|
||||
**5. Capacités d'agent** :
|
||||
Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha.
|
||||
|
||||
**6. LLMOps** :
|
||||
Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations.
|
||||
|
||||
**7. Backend-as-a-Service** :
|
||||
Toutes les offres de Dify sont accompagnées d'API correspondantes, vous permettant d'intégrer facilement Dify dans votre propre logique métier.
|
||||
|
||||
|
||||
## Comparaison des fonctionnalités
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
<th align="center">Fonctionnalité</th>
|
||||
<th align="center">Dify.AI</th>
|
||||
<th align="center">LangChain</th>
|
||||
<th align="center">Flowise</th>
|
||||
<th align="center">OpenAI Assistants API</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Approche de programmation</td>
|
||||
<td align="center">API + Application</td>
|
||||
<td align="center">Code Python</td>
|
||||
<td align="center">Application</td>
|
||||
<td align="center">API</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">LLMs pris en charge</td>
|
||||
<td align="center">Grande variété</td>
|
||||
<td align="center">Grande variété</td>
|
||||
<td align="center">Grande variété</td>
|
||||
<td align="center">Uniquement OpenAI</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Moteur RAG</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Agent</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Flux de travail</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Observabilité</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Fonctionnalité d'entreprise (SSO/Contrôle d'accès)</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Déploiement local</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Utiliser Dify
|
||||
|
||||
- **Cloud </br>**
|
||||
Nous hébergeons un service [Dify Cloud](https://dify.ai) pour que tout le monde puisse l'essayer sans aucune configuration. Il fournit toutes les capacités de la version auto-hébergée et comprend 200 appels GPT-4 gratuits dans le plan bac à sable.
|
||||
|
||||
- **Auto-hébergement Dify Community Edition</br>**
|
||||
Lancez rapidement Dify dans votre environnement avec ce [guide de démarrage](#quick-start).
|
||||
Utilisez notre [documentation](https://docs.dify.ai) pour plus de références et des instructions plus détaillées.
|
||||
|
||||
- **Dify pour les entreprises / organisations</br>**
|
||||
Nous proposons des fonctionnalités supplémentaires adaptées aux entreprises. [Envoyez-nous un e-mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) pour discuter des besoins de l'entreprise. </br>
|
||||
> Pour les startups et les petites entreprises utilisant AWS, consultez [Dify Premium sur AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) et déployez-le dans votre propre VPC AWS en un clic. C'est une offre AMI abordable avec la possibilité de créer des applications avec un logo et une marque personnalisés.
|
||||
|
||||
|
||||
## Rester en avance
|
||||
|
||||
Mettez une étoile à Dify sur GitHub et soyez instantanément informé des nouvelles versions.
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
## Démarrage rapide
|
||||
> Avant d'installer Dify, assurez-vous que votre machine répond aux exigences système minimales suivantes:
|
||||
>
|
||||
>- CPU >= 2 cœurs
|
||||
>- RAM >= 4 Go
|
||||
|
||||
</br>
|
||||
|
||||
La manière la plus simple de démarrer le serveur Dify est d'exécuter notre fichier [docker-compose.yml](docker/docker-compose.yaml). Avant d'exécuter la commande d'installation, assurez-vous que [Docker](https://docs.docker.com/get-docker/) et [Docker Compose](https://docs.docker.com/compose/install/) sont installés sur votre machine:
|
||||
|
||||
```bash
|
||||
cd docker
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
Après l'exécution, vous pouvez accéder au tableau de bord Dify dans votre navigateur à [http://localhost/install](http://localhost/install) et commencer le processus d'initialisation.
|
||||
|
||||
> Si vous souhaitez contribuer à Dify ou effectuer un développement supplémentaire, consultez notre [guide de déploiement à partir du code source](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code)
|
||||
|
||||
## Prochaines étapes
|
||||
|
||||
Si vous devez personnaliser la configuration, veuillez vous référer aux commentaires dans notre fichier [.env.example](docker/.env.example) et mettre à jour les valeurs correspondantes dans votre fichier `.env`. De plus, vous devrez peut-être apporter des modifications au fichier `docker-compose.yaml` lui-même, comme changer les versions d'image, les mappages de ports ou les montages de volumes, en fonction de votre environnement de déploiement et de vos exigences spécifiques. Après avoir effectué des modifications, veuillez réexécuter `docker-compose up -d`. Vous pouvez trouver la liste complète des variables d'environnement disponibles [ici](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||
|
||||
Si vous souhaitez configurer une configuration haute disponibilité, la communauté fournit des [Helm Charts](https://helm.sh/) et des fichiers YAML, à travers lesquels vous pouvez déployer Dify sur Kubernetes.
|
||||
|
||||
- [Helm Chart par @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
|
||||
- [Helm Chart par @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
|
||||
- [Helm Chart par @magicsong](https://github.com/magicsong/ai-charts)
|
||||
- [Fichier YAML par @Winson-030](https://github.com/Winson-030/dify-kubernetes)
|
||||
- [Fichier YAML par @wyy-holding](https://github.com/wyy-holding/dify-k8s)
|
||||
- [🚀 NOUVEAU ! Fichiers YAML (compatible avec Dify v1.6.0) par @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
|
||||
|
||||
#### Utilisation de Terraform pour le déploiement
|
||||
|
||||
Déployez Dify sur une plateforme cloud en un clic en utilisant [terraform](https://www.terraform.io/)
|
||||
|
||||
##### Azure Global
|
||||
- [Azure Terraform par @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform par @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
#### Utilisation d'AWS CDK pour le déploiement
|
||||
|
||||
Déployez Dify sur AWS en utilisant [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
##### AWS
|
||||
- [AWS CDK par @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
|
||||
|
||||
#### Alibaba Cloud
|
||||
|
||||
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
|
||||
|
||||
#### Alibaba Cloud Data Management
|
||||
|
||||
Déployez Dify en un clic sur Alibaba Cloud avec [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
|
||||
## Contribuer
|
||||
|
||||
Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences.
|
||||
|
||||
|
||||
> Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
**Contributeurs**
|
||||
|
||||
<a href="https://github.com/langgenius/dify/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
|
||||
</a>
|
||||
|
||||
## Communauté & Contact
|
||||
|
||||
* [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions.
|
||||
* [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté.
|
||||
|
||||
## Historique des étoiles
|
||||
|
||||
[](https://star-history.com/#langgenius/dify&Date)
|
||||
|
||||
|
||||
## Divulgation de sécurité
|
||||
|
||||
Pour protéger votre vie privée, veuillez éviter de publier des problèmes de sécurité sur GitHub. Au lieu de cela, envoyez vos questions à security@dify.ai et nous vous fournirons une réponse plus détaillée.
|
||||
|
||||
## Licence
|
||||
|
||||
Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires.
|
||||
## Divulgation de sécurité
|
||||
|
||||
Pour protéger votre vie privée, veuillez éviter de publier des problèmes de sécurité sur GitHub. Au lieu de cela, envoyez vos questions à security@dify.ai et nous vous fournirons une réponse plus détaillée.
|
||||
|
||||
## Licence
|
||||
|
||||
Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires.
|
||||
261
README_PT.md
261
README_PT.md
@ -1,261 +0,0 @@
|
||||

|
||||
<p align="center">
|
||||
📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Introduzindo o Dify Workflow com Upload de Arquivo: Recrie o Podcast Google NotebookLM</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
|
||||
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Auto-hospedagem</a> ·
|
||||
<a href="https://docs.dify.ai">Documentação</a> ·
|
||||
<a href="https://dify.ai/pricing">Visão geral das edições do Dify</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://dify.ai" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Product-F04438"></a>
|
||||
<a href="https://dify.ai/pricing" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/free-pricing?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
|
||||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat on Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="Follow Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="follow on X(Twitter)"></a>
|
||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
||||
alt="follow on LinkedIn"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
<img alt="Commits last month" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
|
||||
<a href="https://github.com/langgenius/dify/" target="_blank">
|
||||
<img alt="Issues closed" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
|
||||
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
|
||||
<img alt="Discussion posts" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="./README.md"><img alt="README em Inglês" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./README_ES.md"><img alt="README em Espanhol" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="README em Francês" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="README em Coreano" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
<a href="./README_AR.md"><img alt="README em Árabe" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
|
||||
<a href="./README_TR.md"><img alt="README em Turco" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||
<a href="./README_VI.md"><img alt="README em Vietnamita" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||
<a href="./README_PT.md"><img alt="README em Português - BR" src="https://img.shields.io/badge/Portugu%C3%AAs-BR?style=flat&label=BR&color=d9d9d9"></a>
|
||||
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
Dify é uma plataforma de desenvolvimento de aplicativos LLM de código aberto. Sua interface intuitiva combina workflow de IA, pipeline RAG, capacidades de agente, gerenciamento de modelos, recursos de observabilidade e muito mais, permitindo que você vá rapidamente do protótipo à produção. Aqui está uma lista das principais funcionalidades:
|
||||
</br> </br>
|
||||
|
||||
**1. Workflow**:
|
||||
Construa e teste workflows poderosos de IA em uma interface visual, aproveitando todos os recursos a seguir e muito mais.
|
||||
|
||||
**2. Suporte abrangente a modelos**:
|
||||
Integração perfeita com centenas de LLMs proprietários e de código aberto de diversas provedoras e soluções auto-hospedadas, abrangendo GPT, Mistral, Llama3 e qualquer modelo compatível com a API da OpenAI. A lista completa de provedores suportados pode ser encontrada [aqui](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
|
||||

|
||||
|
||||
|
||||
**3. IDE de Prompt**:
|
||||
Interface intuitiva para criação de prompts, comparação de desempenho de modelos e adição de recursos como conversão de texto para fala em um aplicativo baseado em chat.
|
||||
|
||||
**4. Pipeline RAG**:
|
||||
Extensas capacidades de RAG que cobrem desde a ingestão de documentos até a recuperação, com suporte nativo para extração de texto de PDFs, PPTs e outros formatos de documentos comuns.
|
||||
|
||||
**5. Capacidades de agente**:
|
||||
Você pode definir agentes com base em LLM Function Calling ou ReAct e adicionar ferramentas pré-construídas ou personalizadas para o agente. O Dify oferece mais de 50 ferramentas integradas para agentes de IA, como Google Search, DALL·E, Stable Diffusion e WolframAlpha.
|
||||
|
||||
**6. LLMOps**:
|
||||
Monitore e analise os registros e o desempenho do aplicativo ao longo do tempo. É possível melhorar continuamente prompts, conjuntos de dados e modelos com base nos dados de produção e anotações.
|
||||
|
||||
**7. Backend como Serviço**:
|
||||
Todas os recursos do Dify vêm com APIs correspondentes, permitindo que você integre o Dify sem esforço na lógica de negócios da sua empresa.
|
||||
|
||||
|
||||
## Comparação de recursos
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
<th align="center">Recurso</th>
|
||||
<th align="center">Dify.AI</th>
|
||||
<th align="center">LangChain</th>
|
||||
<th align="center">Flowise</th>
|
||||
<th align="center">OpenAI Assistants API</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Abordagem de Programação</td>
|
||||
<td align="center">Orientada a API + Aplicativo</td>
|
||||
<td align="center">Código Python</td>
|
||||
<td align="center">Orientada a Aplicativo</td>
|
||||
<td align="center">Orientada a API</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">LLMs Suportados</td>
|
||||
<td align="center">Variedade Rica</td>
|
||||
<td align="center">Variedade Rica</td>
|
||||
<td align="center">Variedade Rica</td>
|
||||
<td align="center">Apenas OpenAI</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">RAG Engine</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Agente</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Workflow</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Observabilidade</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Recursos Empresariais (SSO/Controle de Acesso)</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Implantação Local</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Usando o Dify
|
||||
|
||||
- **Nuvem </br>**
|
||||
Oferecemos o serviço [Dify Cloud](https://dify.ai) para qualquer pessoa experimentar sem nenhuma configuração. Ele fornece todas as funcionalidades da versão auto-hospedada, incluindo 200 chamadas GPT-4 gratuitas no plano sandbox.
|
||||
|
||||
- **Auto-hospedagem do Dify Community Edition</br>**
|
||||
Configure rapidamente o Dify no seu ambiente com este [guia inicial](#quick-start).
|
||||
Use nossa [documentação](https://docs.dify.ai) para referências adicionais e instruções mais detalhadas.
|
||||
|
||||
- **Dify para empresas/organizações</br>**
|
||||
Oferecemos recursos adicionais voltados para empresas. [Envie suas perguntas através deste chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) ou [envie-nos um e-mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) para discutir necessidades empresariais. </br>
|
||||
> Para startups e pequenas empresas que utilizam AWS, confira o [Dify Premium no AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) e implemente no seu próprio AWS VPC com um clique. É uma oferta AMI acessível com a opção de criar aplicativos com logotipo e marca personalizados.
|
||||
|
||||
|
||||
## Mantendo-se atualizado
|
||||
|
||||
Dê uma estrela no Dify no GitHub e seja notificado imediatamente sobre novos lançamentos.
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
## Início rápido
|
||||
> Antes de instalar o Dify, certifique-se de que sua máquina atenda aos seguintes requisitos mínimos de sistema:
|
||||
>
|
||||
>- CPU >= 2 Núcleos
|
||||
>- RAM >= 4 GiB
|
||||
|
||||
</br>
|
||||
|
||||
A maneira mais fácil de iniciar o servidor Dify é executar nosso arquivo [docker-compose.yml](docker/docker-compose.yaml). Antes de rodar o comando de instalação, certifique-se de que o [Docker](https://docs.docker.com/get-docker/) e o [Docker Compose](https://docs.docker.com/compose/install/) estão instalados na sua máquina:
|
||||
|
||||
```bash
|
||||
cd docker
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
Após a execução, você pode acessar o painel do Dify no navegador em [http://localhost/install](http://localhost/install) e iniciar o processo de inicialização.
|
||||
|
||||
> Se você deseja contribuir com o Dify ou fazer desenvolvimento adicional, consulte nosso [guia para implantar a partir do código fonte](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code).
|
||||
|
||||
## Próximos passos
|
||||
|
||||
Se precisar personalizar a configuração, consulte os comentários no nosso arquivo [.env.example](docker/.env.example) e atualize os valores correspondentes no seu arquivo `.env`. Além disso, talvez seja necessário fazer ajustes no próprio arquivo `docker-compose.yaml`, como alterar versões de imagem, mapeamentos de portas ou montagens de volumes, com base no seu ambiente de implantação específico e nas suas necessidades. Após fazer quaisquer alterações, execute novamente `docker-compose up -d`. Você pode encontrar a lista completa de variáveis de ambiente disponíveis [aqui](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||
|
||||
Se deseja configurar uma instalação de alta disponibilidade, há [Helm Charts](https://helm.sh/) e arquivos YAML contribuídos pela comunidade que permitem a implantação do Dify no Kubernetes.
|
||||
|
||||
- [Helm Chart de @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
|
||||
- [Helm Chart de @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
|
||||
- [Helm Chart de @magicsong](https://github.com/magicsong/ai-charts)
|
||||
- [Arquivo YAML por @Winson-030](https://github.com/Winson-030/dify-kubernetes)
|
||||
- [Arquivo YAML por @wyy-holding](https://github.com/wyy-holding/dify-k8s)
|
||||
- [🚀 NOVO! Arquivos YAML (Compatível com Dify v1.6.0) por @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
|
||||
|
||||
#### Usando o Terraform para Implantação
|
||||
|
||||
Implante o Dify na Plataforma Cloud com um único clique usando [terraform](https://www.terraform.io/)
|
||||
|
||||
##### Azure Global
|
||||
- [Azure Terraform por @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform por @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
#### Usando AWS CDK para Implantação
|
||||
|
||||
Implante o Dify na AWS usando [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
##### AWS
|
||||
- [AWS CDK por @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
|
||||
|
||||
#### Alibaba Cloud
|
||||
|
||||
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
|
||||
|
||||
#### Alibaba Cloud Data Management
|
||||
|
||||
Implante o Dify na Alibaba Cloud com um clique usando o [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
|
||||
## Contribuindo
|
||||
|
||||
Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências.
|
||||
|
||||
> Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c).
|
||||
|
||||
**Contribuidores**
|
||||
|
||||
<a href="https://github.com/langgenius/dify/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
|
||||
</a>
|
||||
|
||||
## Comunidade e contato
|
||||
|
||||
* [Discussões no GitHub](https://github.com/langgenius/dify/discussions). Melhor para: compartilhar feedback e fazer perguntas.
|
||||
* [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Melhor para: compartilhar suas aplicações e interagir com a comunidade.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Melhor para: compartilhar suas aplicações e interagir com a comunidade.
|
||||
|
||||
## Histórico de estrelas
|
||||
|
||||
[](https://star-history.com/#langgenius/dify&Date)
|
||||
|
||||
## Divulgação de segurança
|
||||
|
||||
Para proteger sua privacidade, evite postar problemas de segurança no GitHub. Em vez disso, envie suas perguntas para security@dify.ai e forneceremos uma resposta mais detalhada.
|
||||
|
||||
## Licença
|
||||
|
||||
Este repositório está disponível sob a [Licença de Código Aberto Dify](LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais.
|
||||
259
README_VI.md
259
README_VI.md
@ -1,259 +0,0 @@
|
||||

|
||||
|
||||
<p align="center">
|
||||
<a href="https://cloud.dify.ai">Dify Cloud</a> ·
|
||||
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Tự triển khai</a> ·
|
||||
<a href="https://docs.dify.ai">Tài liệu</a> ·
|
||||
<a href="https://dify.ai/pricing">Tổng quan các lựa chọn sản phẩm Dify</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://dify.ai" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/Product-F04438"></a>
|
||||
<a href="https://dify.ai/pricing" target="_blank">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/free-pricing?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
|
||||
<a href="https://discord.gg/FngNHpbcY7" target="_blank">
|
||||
<img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
|
||||
alt="chat trên Discord"></a>
|
||||
<a href="https://reddit.com/r/difyai" target="_blank">
|
||||
<img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
|
||||
alt="Follow Reddit"></a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
|
||||
<img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
|
||||
alt="theo dõi trên X(Twitter)"></a>
|
||||
<a href="https://www.linkedin.com/company/langgenius/" target="_blank">
|
||||
<img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
|
||||
alt="theo dõi trên LinkedIn"></a>
|
||||
<a href="https://hub.docker.com/u/langgenius" target="_blank">
|
||||
<img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
|
||||
<a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
|
||||
<img alt="Commits tháng trước" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
|
||||
<a href="https://github.com/langgenius/dify/" target="_blank">
|
||||
<img alt="Vấn đề đã đóng" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
|
||||
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
|
||||
<img alt="Bài thảo luận" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
|
||||
<a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
|
||||
<a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
|
||||
<a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
|
||||
<a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
|
||||
<a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
|
||||
<a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
|
||||
<a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
|
||||
<a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
|
||||
<a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
|
||||
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
|
||||
</p>
|
||||
|
||||
|
||||
Dify là một nền tảng phát triển ứng dụng LLM mã nguồn mở. Giao diện trực quan kết hợp quy trình làm việc AI, mô hình RAG, khả năng tác nhân, quản lý mô hình, tính năng quan sát và hơn thế nữa, cho phép bạn nhanh chóng chuyển từ nguyên mẫu sang sản phẩm. Đây là danh sách các tính năng cốt lõi:
|
||||
</br> </br>
|
||||
|
||||
**1. Quy trình làm việc**:
|
||||
Xây dựng và kiểm tra các quy trình làm việc AI mạnh mẽ trên một canvas trực quan, tận dụng tất cả các tính năng sau đây và hơn thế nữa.
|
||||
|
||||
**2. Hỗ trợ mô hình toàn diện**:
|
||||
Tích hợp liền mạch với hàng trăm mô hình LLM độc quyền / mã nguồn mở từ hàng chục nhà cung cấp suy luận và giải pháp tự lưu trữ, bao gồm GPT, Mistral, Llama3, và bất kỳ mô hình tương thích API OpenAI nào. Danh sách đầy đủ các nhà cung cấp mô hình được hỗ trợ có thể được tìm thấy [tại đây](https://docs.dify.ai/getting-started/readme/model-providers).
|
||||
|
||||

|
||||
|
||||
|
||||
**3. IDE Prompt**:
|
||||
Giao diện trực quan để tạo prompt, so sánh hiệu suất mô hình và thêm các tính năng bổ sung như chuyển văn bản thành giọng nói cho một ứng dụng dựa trên trò chuyện.
|
||||
|
||||
**4. Mô hình RAG**:
|
||||
Khả năng RAG mở rộng bao gồm mọi thứ từ nhập tài liệu đến truy xuất, với hỗ trợ sẵn có cho việc trích xuất văn bản từ PDF, PPT và các định dạng tài liệu phổ biến khác.
|
||||
|
||||
**5. Khả năng tác nhân**:
|
||||
Bạn có thể định nghĩa các tác nhân dựa trên LLM Function Calling hoặc ReAct, và thêm các công cụ được xây dựng sẵn hoặc tùy chỉnh cho tác nhân. Dify cung cấp hơn 50 công cụ tích hợp sẵn cho các tác nhân AI, như Google Search, DALL·E, Stable Diffusion và WolframAlpha.
|
||||
|
||||
**6. LLMOps**:
|
||||
Giám sát và phân tích nhật ký và hiệu suất ứng dụng theo thời gian. Bạn có thể liên tục cải thiện prompt, bộ dữ liệu và mô hình dựa trên dữ liệu sản xuất và chú thích.
|
||||
|
||||
**7. Backend-as-a-Service**:
|
||||
Tất cả các dịch vụ của Dify đều đi kèm với các API tương ứng, vì vậy bạn có thể dễ dàng tích hợp Dify vào logic kinh doanh của riêng mình.
|
||||
|
||||
|
||||
## So sánh tính năng
|
||||
<table style="width: 100%;">
|
||||
<tr>
|
||||
<th align="center">Tính năng</th>
|
||||
<th align="center">Dify.AI</th>
|
||||
<th align="center">LangChain</th>
|
||||
<th align="center">Flowise</th>
|
||||
<th align="center">OpenAI Assistants API</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Phương pháp lập trình</td>
|
||||
<td align="center">Hướng API + Ứng dụng</td>
|
||||
<td align="center">Mã Python</td>
|
||||
<td align="center">Hướng ứng dụng</td>
|
||||
<td align="center">Hướng API</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">LLMs được hỗ trợ</td>
|
||||
<td align="center">Đa dạng phong phú</td>
|
||||
<td align="center">Đa dạng phong phú</td>
|
||||
<td align="center">Đa dạng phong phú</td>
|
||||
<td align="center">Chỉ OpenAI</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">RAG Engine</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Agent</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Quy trình làm việc</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Khả năng quan sát</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Tính năng doanh nghiệp (SSO/Kiểm soát truy cập)</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center">Triển khai cục bộ</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">✅</td>
|
||||
<td align="center">❌</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Sử dụng Dify
|
||||
|
||||
- **Cloud </br>**
|
||||
Chúng tôi lưu trữ dịch vụ [Dify Cloud](https://dify.ai) cho bất kỳ ai muốn thử mà không cần cài đặt. Nó cung cấp tất cả các khả năng của phiên bản tự triển khai và bao gồm 200 lượt gọi GPT-4 miễn phí trong gói sandbox.
|
||||
|
||||
- **Tự triển khai Dify Community Edition</br>**
|
||||
Nhanh chóng chạy Dify trong môi trường của bạn với [hướng dẫn bắt đầu](#quick-start) này.
|
||||
Sử dụng [tài liệu](https://docs.dify.ai) của chúng tôi để tham khảo thêm và nhận hướng dẫn chi tiết hơn.
|
||||
|
||||
- **Dify cho doanh nghiệp / tổ chức</br>**
|
||||
Chúng tôi cung cấp các tính năng bổ sung tập trung vào doanh nghiệp. [Ghi lại câu hỏi của bạn cho chúng tôi thông qua chatbot này](https://udify.app/chat/22L1zSxg6yW1cWQg) hoặc [gửi email cho chúng tôi](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) để thảo luận về nhu cầu doanh nghiệp. </br>
|
||||
> Đối với các công ty khởi nghiệp và doanh nghiệp nhỏ sử dụng AWS, hãy xem [Dify Premium trên AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) và triển khai nó vào AWS VPC của riêng bạn chỉ với một cú nhấp chuột. Đây là một AMI giá cả phải chăng với tùy chọn tạo ứng dụng với logo và thương hiệu tùy chỉnh.
|
||||
|
||||
|
||||
## Luôn cập nhật
|
||||
|
||||
Yêu thích Dify trên GitHub và được thông báo ngay lập tức về các bản phát hành mới.
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
## Bắt đầu nhanh
|
||||
> Trước khi cài đặt Dify, hãy đảm bảo máy của bạn đáp ứng các yêu cầu hệ thống tối thiểu sau:
|
||||
>
|
||||
>- CPU >= 2 Core
|
||||
>- RAM >= 4GB
|
||||
|
||||
</br>
|
||||
|
||||
Cách dễ nhất để khởi động máy chủ Dify là chạy tệp [docker-compose.yml](docker/docker-compose.yaml) của chúng tôi. Trước khi chạy lệnh cài đặt, hãy đảm bảo rằng [Docker](https://docs.docker.com/get-docker/) và [Docker Compose](https://docs.docker.com/compose/install/) đã được cài đặt trên máy của bạn:
|
||||
|
||||
```bash
|
||||
cd docker
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
Sau khi chạy, bạn có thể truy cập bảng điều khiển Dify trong trình duyệt của bạn tại [http://localhost/install](http://localhost/install) và bắt đầu quá trình khởi tạo.
|
||||
|
||||
> Nếu bạn muốn đóng góp cho Dify hoặc phát triển thêm, hãy tham khảo [hướng dẫn triển khai từ mã nguồn](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) của chúng tôi
|
||||
|
||||
## Các bước tiếp theo
|
||||
|
||||
Nếu bạn cần tùy chỉnh cấu hình, vui lòng tham khảo các nhận xét trong tệp [.env.example](docker/.env.example) của chúng tôi và cập nhật các giá trị tương ứng trong tệp `.env` của bạn. Ngoài ra, bạn có thể cần điều chỉnh tệp `docker-compose.yaml`, chẳng hạn như thay đổi phiên bản hình ảnh, ánh xạ cổng hoặc gắn kết khối lượng, dựa trên môi trường triển khai cụ thể và yêu cầu của bạn. Sau khi thực hiện bất kỳ thay đổi nào, vui lòng chạy lại `docker-compose up -d`. Bạn có thể tìm thấy danh sách đầy đủ các biến môi trường có sẵn [tại đây](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||
|
||||
Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có các [Helm Charts](https://helm.sh/) và tệp YAML do cộng đồng đóng góp cho phép Dify được triển khai trên Kubernetes.
|
||||
|
||||
- [Helm Chart bởi @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
|
||||
- [Helm Chart bởi @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
|
||||
- [Tệp YAML bởi @Winson-030](https://github.com/Winson-030/dify-kubernetes)
|
||||
- [Tệp YAML bởi @wyy-holding](https://github.com/wyy-holding/dify-k8s)
|
||||
- [🚀 MỚI! Tệp YAML (Hỗ trợ Dify v1.6.0) bởi @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
|
||||
|
||||
#### Sử dụng Terraform để Triển khai
|
||||
|
||||
Triển khai Dify lên nền tảng đám mây với một cú nhấp chuột bằng cách sử dụng [terraform](https://www.terraform.io/)
|
||||
|
||||
##### Azure Global
|
||||
- [Azure Terraform bởi @nikawang](https://github.com/nikawang/dify-azure-terraform)
|
||||
|
||||
##### Google Cloud
|
||||
- [Google Cloud Terraform bởi @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
|
||||
|
||||
#### Sử dụng AWS CDK để Triển khai
|
||||
|
||||
Triển khai Dify trên AWS bằng [CDK](https://aws.amazon.com/cdk/)
|
||||
|
||||
##### AWS
|
||||
- [AWS CDK bởi @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
|
||||
|
||||
|
||||
#### Alibaba Cloud
|
||||
|
||||
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
|
||||
|
||||
#### Alibaba Cloud Data Management
|
||||
|
||||
Triển khai Dify lên Alibaba Cloud chỉ với một cú nhấp chuột bằng [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
|
||||
|
||||
|
||||
## Đóng góp
|
||||
|
||||
Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi.
|
||||
Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị.
|
||||
|
||||
|
||||
> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi.
|
||||
|
||||
**Người đóng góp**
|
||||
|
||||
<a href="https://github.com/langgenius/dify/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=langgenius/dify" />
|
||||
</a>
|
||||
|
||||
## Cộng đồng & liên hệ
|
||||
|
||||
* [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi.
|
||||
* [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi.
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng.
|
||||
|
||||
## Lịch sử Yêu thích
|
||||
|
||||
[](https://star-history.com/#langgenius/dify&Date)
|
||||
|
||||
## Tiết lộ bảo mật
|
||||
|
||||
Để bảo vệ quyền riêng tư của bạn, vui lòng tránh đăng các vấn đề bảo mật trên GitHub. Thay vào đó, hãy gửi câu hỏi của bạn đến security@dify.ai và chúng tôi sẽ cung cấp cho bạn câu trả lời chi tiết hơn.
|
||||
|
||||
## Giấy phép
|
||||
|
||||
Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung.
|
||||
@ -4,6 +4,11 @@
|
||||
# Alternatively you can set it with `SECRET_KEY` environment variable.
|
||||
SECRET_KEY=
|
||||
|
||||
# Ensure UTF-8 encoding
|
||||
LANG=en_US.UTF-8
|
||||
LC_ALL=en_US.UTF-8
|
||||
PYTHONIOENCODING=utf-8
|
||||
|
||||
# Console API base URL
|
||||
CONSOLE_API_URL=http://localhost:5001
|
||||
CONSOLE_WEB_URL=http://localhost:3000
|
||||
@ -37,6 +42,15 @@ REDIS_PORT=6379
|
||||
REDIS_USERNAME=
|
||||
REDIS_PASSWORD=difyai123456
|
||||
REDIS_USE_SSL=false
|
||||
# SSL configuration for Redis (when REDIS_USE_SSL=true)
|
||||
REDIS_SSL_CERT_REQS=CERT_NONE
|
||||
# Options: CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
|
||||
REDIS_SSL_CA_CERTS=
|
||||
# Path to CA certificate file for SSL verification
|
||||
REDIS_SSL_CERTFILE=
|
||||
# Path to client certificate file for SSL authentication
|
||||
REDIS_SSL_KEYFILE=
|
||||
# Path to client private key file for SSL authentication
|
||||
REDIS_DB=0
|
||||
|
||||
# redis Sentinel configuration.
|
||||
@ -61,6 +75,8 @@ DB_PASSWORD=difyai123456
|
||||
DB_HOST=localhost
|
||||
DB_PORT=5432
|
||||
DB_DATABASE=dify
|
||||
SQLALCHEMY_POOL_PRE_PING=true
|
||||
SQLALCHEMY_POOL_TIMEOUT=30
|
||||
|
||||
# Storage configuration
|
||||
# use for store upload files, private keys...
|
||||
@ -227,6 +243,7 @@ TABLESTORE_ENDPOINT=https://instance-name.cn-hangzhou.ots.aliyuncs.com
|
||||
TABLESTORE_INSTANCE_NAME=instance-name
|
||||
TABLESTORE_ACCESS_KEY_ID=xxx
|
||||
TABLESTORE_ACCESS_KEY_SECRET=xxx
|
||||
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE=false
|
||||
|
||||
# Tidb Vector configuration
|
||||
TIDB_VECTOR_HOST=xxx.eu-central-1.xxx.aws.tidbcloud.com
|
||||
@ -287,6 +304,8 @@ BAIDU_VECTOR_DB_API_KEY=dify
|
||||
BAIDU_VECTOR_DB_DATABASE=dify
|
||||
BAIDU_VECTOR_DB_SHARD=1
|
||||
BAIDU_VECTOR_DB_REPLICAS=3
|
||||
BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER=DEFAULT_ANALYZER
|
||||
BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE=COARSE_MODE
|
||||
|
||||
# Upstash configuration
|
||||
UPSTASH_VECTOR_URL=your-server-url
|
||||
@ -312,7 +331,7 @@ MATRIXONE_DATABASE=dify
|
||||
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
|
||||
LINDORM_USERNAME=admin
|
||||
LINDORM_PASSWORD=admin
|
||||
USING_UGC_INDEX=False
|
||||
LINDORM_USING_UGC=True
|
||||
LINDORM_QUERY_TIMEOUT=1
|
||||
|
||||
# OceanBase Vector configuration
|
||||
@ -389,6 +408,9 @@ SSRF_DEFAULT_TIME_OUT=5
|
||||
SSRF_DEFAULT_CONNECT_TIME_OUT=5
|
||||
SSRF_DEFAULT_READ_TIME_OUT=5
|
||||
SSRF_DEFAULT_WRITE_TIME_OUT=5
|
||||
SSRF_POOL_MAX_CONNECTIONS=100
|
||||
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||
SSRF_POOL_KEEPALIVE_EXPIRY=5.0
|
||||
|
||||
BATCH_UPLOAD_LIMIT=10
|
||||
KEYWORD_DATA_SOURCE_TYPE=database
|
||||
@ -399,6 +421,10 @@ WORKFLOW_FILE_UPLOAD_LIMIT=10
|
||||
# CODE EXECUTION CONFIGURATION
|
||||
CODE_EXECUTION_ENDPOINT=http://127.0.0.1:8194
|
||||
CODE_EXECUTION_API_KEY=dify-sandbox
|
||||
CODE_EXECUTION_SSL_VERIFY=True
|
||||
CODE_EXECUTION_POOL_MAX_CONNECTIONS=100
|
||||
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0
|
||||
CODE_MAX_NUMBER=9223372036854775807
|
||||
CODE_MIN_NUMBER=-9223372036854775808
|
||||
CODE_MAX_STRING_LENGTH=80000
|
||||
@ -442,9 +468,18 @@ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
|
||||
WORKFLOW_MAX_EXECUTION_STEPS=500
|
||||
WORKFLOW_MAX_EXECUTION_TIME=1200
|
||||
WORKFLOW_CALL_MAX_DEPTH=5
|
||||
WORKFLOW_PARALLEL_DEPTH_LIMIT=3
|
||||
MAX_VARIABLE_SIZE=204800
|
||||
|
||||
# GraphEngine Worker Pool Configuration
|
||||
# Minimum number of workers per GraphEngine instance (default: 1)
|
||||
GRAPH_ENGINE_MIN_WORKERS=1
|
||||
# Maximum number of workers per GraphEngine instance (default: 10)
|
||||
GRAPH_ENGINE_MAX_WORKERS=10
|
||||
# Queue depth threshold that triggers worker scale up (default: 3)
|
||||
GRAPH_ENGINE_SCALE_UP_THRESHOLD=3
|
||||
# Seconds of idle time before scaling down workers (default: 5.0)
|
||||
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME=5.0
|
||||
|
||||
# Workflow storage configuration
|
||||
# Options: rdbms, hybrid
|
||||
# rdbms: Use only the relational database (default)
|
||||
@ -463,6 +498,13 @@ API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node
|
||||
|
||||
# API workflow run repository implementation
|
||||
API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository
|
||||
# Workflow log cleanup configuration
|
||||
# Enable automatic cleanup of workflow run logs to manage database size
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED=true
|
||||
# Number of days to retain workflow run logs (default: 30 days)
|
||||
WORKFLOW_LOG_RETENTION_DAYS=30
|
||||
# Batch size for workflow log cleanup operations (default: 100)
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||
|
||||
# App configuration
|
||||
APP_MAX_EXECUTION_TIME=1200
|
||||
@ -507,6 +549,7 @@ ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
|
||||
|
||||
# Reset password token expiry minutes
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
||||
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5
|
||||
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
|
||||
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
|
||||
|
||||
@ -542,3 +585,11 @@ QUEUE_MONITOR_THRESHOLD=200
|
||||
QUEUE_MONITOR_ALERT_EMAILS=
|
||||
# Monitor interval in minutes, default is 30 minutes
|
||||
QUEUE_MONITOR_INTERVAL=30
|
||||
|
||||
# Swagger UI configuration
|
||||
SWAGGER_UI_ENABLED=true
|
||||
SWAGGER_UI_PATH=/swagger-ui.html
|
||||
|
||||
# Whether to encrypt dataset IDs when exporting DSL files (default: true)
|
||||
# Set to false to export dataset IDs as plain text for easier cross-environment import
|
||||
DSL_EXPORT_ENCRYPT_DATASET_ID=true
|
||||
|
||||
105
api/.importlinter
Normal file
105
api/.importlinter
Normal file
@ -0,0 +1,105 @@
|
||||
[importlinter]
|
||||
root_packages =
|
||||
core
|
||||
configs
|
||||
controllers
|
||||
models
|
||||
tasks
|
||||
services
|
||||
|
||||
[importlinter:contract:workflow]
|
||||
name = Workflow
|
||||
type=layers
|
||||
layers =
|
||||
graph_engine
|
||||
graph_events
|
||||
graph
|
||||
nodes
|
||||
node_events
|
||||
entities
|
||||
containers =
|
||||
core.workflow
|
||||
ignore_imports =
|
||||
core.workflow.nodes.base.node -> core.workflow.graph_events
|
||||
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_events
|
||||
core.workflow.nodes.loop.loop_node -> core.workflow.graph_events
|
||||
|
||||
core.workflow.nodes.node_factory -> core.workflow.graph
|
||||
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine
|
||||
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph
|
||||
core.workflow.nodes.iteration.iteration_node -> core.workflow.graph_engine.command_channels
|
||||
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine
|
||||
core.workflow.nodes.loop.loop_node -> core.workflow.graph
|
||||
core.workflow.nodes.loop.loop_node -> core.workflow.graph_engine.command_channels
|
||||
|
||||
[importlinter:contract:rsc]
|
||||
name = RSC
|
||||
type = layers
|
||||
layers =
|
||||
graph_engine
|
||||
response_coordinator
|
||||
containers =
|
||||
core.workflow.graph_engine
|
||||
|
||||
[importlinter:contract:worker]
|
||||
name = Worker
|
||||
type = layers
|
||||
layers =
|
||||
graph_engine
|
||||
worker
|
||||
containers =
|
||||
core.workflow.graph_engine
|
||||
|
||||
[importlinter:contract:graph-engine-architecture]
|
||||
name = Graph Engine Architecture
|
||||
type = layers
|
||||
layers =
|
||||
graph_engine
|
||||
orchestration
|
||||
command_processing
|
||||
event_management
|
||||
error_handler
|
||||
graph_traversal
|
||||
graph_state_manager
|
||||
worker_management
|
||||
domain
|
||||
containers =
|
||||
core.workflow.graph_engine
|
||||
|
||||
[importlinter:contract:domain-isolation]
|
||||
name = Domain Model Isolation
|
||||
type = forbidden
|
||||
source_modules =
|
||||
core.workflow.graph_engine.domain
|
||||
forbidden_modules =
|
||||
core.workflow.graph_engine.worker_management
|
||||
core.workflow.graph_engine.command_channels
|
||||
core.workflow.graph_engine.layers
|
||||
core.workflow.graph_engine.protocols
|
||||
|
||||
[importlinter:contract:worker-management]
|
||||
name = Worker Management
|
||||
type = forbidden
|
||||
source_modules =
|
||||
core.workflow.graph_engine.worker_management
|
||||
forbidden_modules =
|
||||
core.workflow.graph_engine.orchestration
|
||||
core.workflow.graph_engine.command_processing
|
||||
core.workflow.graph_engine.event_management
|
||||
|
||||
|
||||
[importlinter:contract:graph-traversal-components]
|
||||
name = Graph Traversal Components
|
||||
type = layers
|
||||
layers =
|
||||
edge_processor
|
||||
skip_propagator
|
||||
containers =
|
||||
core.workflow.graph_engine.graph_traversal
|
||||
|
||||
[importlinter:contract:command-channels]
|
||||
name = Command Channels Independence
|
||||
type = independence
|
||||
modules =
|
||||
core.workflow.graph_engine.command_channels.in_memory_channel
|
||||
core.workflow.graph_engine.command_channels.redis_channel
|
||||
@ -5,7 +5,7 @@ line-length = 120
|
||||
quote-style = "double"
|
||||
|
||||
[lint]
|
||||
preview = false
|
||||
preview = true
|
||||
select = [
|
||||
"B", # flake8-bugbear rules
|
||||
"C4", # flake8-comprehensions
|
||||
@ -30,6 +30,7 @@ select = [
|
||||
"RUF022", # unsorted-dunder-all
|
||||
"S506", # unsafe-yaml-load
|
||||
"SIM", # flake8-simplify rules
|
||||
"T201", # print-found
|
||||
"TRY400", # error-instead-of-exception
|
||||
"TRY401", # verbose-log-message
|
||||
"UP", # pyupgrade rules
|
||||
@ -42,6 +43,10 @@ select = [
|
||||
"S301", # suspicious-pickle-usage, disallow use of `pickle` and its wrappers.
|
||||
"S302", # suspicious-marshal-usage, disallow use of `marshal` module
|
||||
"S311", # suspicious-non-cryptographic-random-usage
|
||||
"G001", # don't use str format to logging messages
|
||||
"G003", # don't use + in logging messages
|
||||
"G004", # don't use f-strings to format logging messages
|
||||
"UP042", # use StrEnum
|
||||
]
|
||||
|
||||
ignore = [
|
||||
@ -61,6 +66,7 @@ ignore = [
|
||||
"B006", # mutable-argument-default
|
||||
"B007", # unused-loop-control-variable
|
||||
"B026", # star-arg-unpacking-after-keyword-arg
|
||||
"B901", # allow return in yield
|
||||
"B903", # class-as-data-structure
|
||||
"B904", # raise-without-from-inside-except
|
||||
"B905", # zip-without-explicit-strict
|
||||
@ -86,11 +92,18 @@ ignore = [
|
||||
"configs/*" = [
|
||||
"N802", # invalid-function-name
|
||||
]
|
||||
"core/model_runtime/callbacks/base_callback.py" = [
|
||||
"T201",
|
||||
]
|
||||
"core/workflow/callbacks/workflow_logging_callback.py" = [
|
||||
"T201",
|
||||
]
|
||||
"libs/gmpy2_pkcs10aep_cipher.py" = [
|
||||
"N803", # invalid-argument-name
|
||||
]
|
||||
"tests/*" = [
|
||||
"F811", # redefined-while-unused
|
||||
"T201", # allow print in tests
|
||||
]
|
||||
|
||||
[lint.pyflakes]
|
||||
|
||||
@ -4,7 +4,7 @@ FROM python:3.12-slim-bookworm AS base
|
||||
WORKDIR /app/api
|
||||
|
||||
# Install uv
|
||||
ENV UV_VERSION=0.7.11
|
||||
ENV UV_VERSION=0.8.9
|
||||
|
||||
RUN pip install --no-cache-dir uv==${UV_VERSION}
|
||||
|
||||
@ -19,7 +19,7 @@ RUN apt-get update \
|
||||
|
||||
# Install Python dependencies
|
||||
COPY pyproject.toml uv.lock ./
|
||||
RUN uv sync --locked
|
||||
RUN uv sync --locked --no-dev
|
||||
|
||||
# production stage
|
||||
FROM base AS production
|
||||
@ -37,6 +37,11 @@ EXPOSE 5001
|
||||
# set timezone
|
||||
ENV TZ=UTC
|
||||
|
||||
# Set UTF-8 locale
|
||||
ENV LANG=en_US.UTF-8
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
ENV PYTHONIOENCODING=utf-8
|
||||
|
||||
WORKDIR /app/api
|
||||
|
||||
RUN \
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
## Usage
|
||||
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
>
|
||||
> In the v1.3.0 release, `poetry` has been replaced with
|
||||
> [`uv`](https://docs.astral.sh/uv/) as the package manager
|
||||
> for Dify API backend service.
|
||||
@ -20,25 +20,29 @@
|
||||
cd ../api
|
||||
```
|
||||
|
||||
2. Copy `.env.example` to `.env`
|
||||
1. Copy `.env.example` to `.env`
|
||||
|
||||
```cli
|
||||
cp .env.example .env
|
||||
cp .env.example .env
|
||||
```
|
||||
3. Generate a `SECRET_KEY` in the `.env` file.
|
||||
|
||||
1. Generate a `SECRET_KEY` in the `.env` file.
|
||||
|
||||
bash for Linux
|
||||
|
||||
```bash for Linux
|
||||
sed -i "/^SECRET_KEY=/c\SECRET_KEY=$(openssl rand -base64 42)" .env
|
||||
```
|
||||
|
||||
bash for Mac
|
||||
|
||||
```bash for Mac
|
||||
secret_key=$(openssl rand -base64 42)
|
||||
sed -i '' "/^SECRET_KEY=/c\\
|
||||
SECRET_KEY=${secret_key}" .env
|
||||
```
|
||||
|
||||
4. Create environment.
|
||||
1. Create environment.
|
||||
|
||||
Dify API service uses [UV](https://docs.astral.sh/uv/) to manage dependencies.
|
||||
First, you need to add the uv package manager, if you don't have it already.
|
||||
@ -49,13 +53,13 @@
|
||||
brew install uv
|
||||
```
|
||||
|
||||
5. Install dependencies
|
||||
1. Install dependencies
|
||||
|
||||
```bash
|
||||
uv sync --dev
|
||||
```
|
||||
|
||||
6. Run migrate
|
||||
1. Run migrate
|
||||
|
||||
Before the first launch, migrate the database to the latest version.
|
||||
|
||||
@ -63,24 +67,27 @@
|
||||
uv run flask db upgrade
|
||||
```
|
||||
|
||||
7. Start backend
|
||||
1. Start backend
|
||||
|
||||
```bash
|
||||
uv run flask run --host 0.0.0.0 --port=5001 --debug
|
||||
```
|
||||
|
||||
8. Start Dify [web](../web) service.
|
||||
9. Setup your application by visiting `http://localhost:3000`.
|
||||
10. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
|
||||
1. Start Dify [web](../web) service.
|
||||
|
||||
```bash
|
||||
uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin
|
||||
```
|
||||
1. Setup your application by visiting `http://localhost:3000`.
|
||||
|
||||
Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal:
|
||||
```bash
|
||||
uv run celery -A app.celery beat
|
||||
```
|
||||
1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
|
||||
|
||||
```bash
|
||||
uv run celery -A app.celery worker -P gevent -c 2 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation
|
||||
```
|
||||
|
||||
Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service:
|
||||
|
||||
```bash
|
||||
uv run celery -A app.celery beat
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
@ -90,9 +97,16 @@
|
||||
uv sync --dev
|
||||
```
|
||||
|
||||
2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`
|
||||
1. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`, more can check [Claude.md](../CLAUDE.md)
|
||||
|
||||
```bash
|
||||
uv run -P api bash dev/pytest/pytest_all_tests.sh
|
||||
```
|
||||
uv run pytest # Run all tests
|
||||
uv run pytest tests/unit_tests/ # Unit tests only
|
||||
uv run pytest tests/integration_tests/ # Integration tests
|
||||
|
||||
# Code quality
|
||||
../dev/reformat # Run all formatters and linters
|
||||
uv run ruff check --fix ./ # Fix linting issues
|
||||
uv run ruff format ./ # Format code
|
||||
uv run basedpyright . # Type checking
|
||||
```
|
||||
|
||||
27
api/app.py
27
api/app.py
@ -1,4 +1,3 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
@ -17,20 +16,20 @@ else:
|
||||
# It seems that JetBrains Python debugger does not work well with gevent,
|
||||
# so we need to disable gevent in debug mode.
|
||||
# If you are using debugpy and set GEVENT_SUPPORT=True, you can debug with gevent.
|
||||
if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}:
|
||||
from gevent import monkey
|
||||
# if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}:
|
||||
# from gevent import monkey
|
||||
#
|
||||
# # gevent
|
||||
# monkey.patch_all()
|
||||
#
|
||||
# from grpc.experimental import gevent as grpc_gevent # type: ignore
|
||||
#
|
||||
# # grpc gevent
|
||||
# grpc_gevent.init_gevent()
|
||||
|
||||
# gevent
|
||||
monkey.patch_all()
|
||||
|
||||
from grpc.experimental import gevent as grpc_gevent # type: ignore
|
||||
|
||||
# grpc gevent
|
||||
grpc_gevent.init_gevent()
|
||||
|
||||
import psycogreen.gevent # type: ignore
|
||||
|
||||
psycogreen.gevent.patch_psycopg()
|
||||
# import psycogreen.gevent # type: ignore
|
||||
#
|
||||
# psycogreen.gevent.patch_psycopg()
|
||||
|
||||
from app_factory import create_app
|
||||
|
||||
|
||||
@ -5,6 +5,8 @@ from configs import dify_config
|
||||
from contexts.wrapper import RecyclableContextVar
|
||||
from dify_app import DifyApp
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ----------------------------
|
||||
# Application Factory Function
|
||||
@ -23,6 +25,9 @@ def create_flask_app_with_configs() -> DifyApp:
|
||||
# add an unique identifier to each request
|
||||
RecyclableContextVar.increment_thread_recycles()
|
||||
|
||||
# Capture the decorator's return value to avoid pyright reportUnusedFunction
|
||||
_ = before_request
|
||||
|
||||
return dify_app
|
||||
|
||||
|
||||
@ -32,7 +37,7 @@ def create_app() -> DifyApp:
|
||||
initialize_extensions(app)
|
||||
end_time = time.perf_counter()
|
||||
if dify_config.DEBUG:
|
||||
logging.info(f"Finished create_app ({round((end_time - start_time) * 1000, 2)} ms)")
|
||||
logger.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
|
||||
return app
|
||||
|
||||
|
||||
@ -51,6 +56,7 @@ def initialize_extensions(app: DifyApp):
|
||||
ext_login,
|
||||
ext_mail,
|
||||
ext_migrate,
|
||||
ext_orjson,
|
||||
ext_otel,
|
||||
ext_proxy_fix,
|
||||
ext_redis,
|
||||
@ -67,6 +73,7 @@ def initialize_extensions(app: DifyApp):
|
||||
ext_logging,
|
||||
ext_warnings,
|
||||
ext_import_modules,
|
||||
ext_orjson,
|
||||
ext_set_secretkey,
|
||||
ext_compress,
|
||||
ext_code_based_extension,
|
||||
@ -91,14 +98,14 @@ def initialize_extensions(app: DifyApp):
|
||||
is_enabled = ext.is_enabled() if hasattr(ext, "is_enabled") else True
|
||||
if not is_enabled:
|
||||
if dify_config.DEBUG:
|
||||
logging.info(f"Skipped {short_name}")
|
||||
logger.info("Skipped %s", short_name)
|
||||
continue
|
||||
|
||||
start_time = time.perf_counter()
|
||||
ext.init_app(app)
|
||||
end_time = time.perf_counter()
|
||||
if dify_config.DEBUG:
|
||||
logging.info(f"Loaded {short_name} ({round((end_time - start_time) * 1000, 2)} ms)")
|
||||
logger.info("Loaded %s (%s ms)", short_name, round((end_time - start_time) * 1000, 2))
|
||||
|
||||
|
||||
def create_migrations_app():
|
||||
|
||||
13
api/celery_entrypoint.py
Normal file
13
api/celery_entrypoint.py
Normal file
@ -0,0 +1,13 @@
|
||||
import psycogreen.gevent as pscycogreen_gevent # type: ignore
|
||||
from grpc.experimental import gevent as grpc_gevent # type: ignore
|
||||
|
||||
# grpc gevent
|
||||
grpc_gevent.init_gevent()
|
||||
print("gRPC patched with gevent.", flush=True) # noqa: T201
|
||||
pscycogreen_gevent.patch_psycopg()
|
||||
print("psycopg2 patched with gevent.", flush=True) # noqa: T201
|
||||
|
||||
|
||||
from app import app, celery
|
||||
|
||||
__all__ = ["app", "celery"]
|
||||
893
api/commands.py
893
api/commands.py
File diff suppressed because it is too large
Load Diff
@ -1,3 +1,3 @@
|
||||
from .app_config import DifyConfig
|
||||
|
||||
dify_config = DifyConfig()
|
||||
dify_config = DifyConfig() # type: ignore
|
||||
|
||||
@ -41,7 +41,7 @@ class RemoteSettingsSourceFactory(PydanticBaseSettingsSource):
|
||||
case RemoteSettingsSourceName.NACOS:
|
||||
remote_source = NacosSettingsSource(current_state)
|
||||
case _:
|
||||
logger.warning(f"Unsupported remote source: {remote_source_name}")
|
||||
logger.warning("Unsupported remote source: %s", remote_source_name)
|
||||
return {}
|
||||
|
||||
d: dict[str, Any] = {}
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,28 +7,28 @@ class NotionConfig(BaseSettings):
|
||||
Configuration settings for Notion integration
|
||||
"""
|
||||
|
||||
NOTION_CLIENT_ID: Optional[str] = Field(
|
||||
NOTION_CLIENT_ID: str | None = Field(
|
||||
description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_CLIENT_SECRET: Optional[str] = Field(
|
||||
NOTION_CLIENT_SECRET: str | None = Field(
|
||||
description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_INTEGRATION_TYPE: Optional[str] = Field(
|
||||
NOTION_INTEGRATION_TYPE: str | None = Field(
|
||||
description="Type of Notion integration."
|
||||
" Set to 'internal' for internal integrations, or None for public integrations.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_INTERNAL_SECRET: Optional[str] = Field(
|
||||
NOTION_INTERNAL_SECRET: str | None = Field(
|
||||
description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_INTEGRATION_TOKEN: Optional[str] = Field(
|
||||
NOTION_INTEGRATION_TOKEN: str | None = Field(
|
||||
description="Integration token for Notion API access. Used for direct API calls without OAuth flow.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeFloat
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,7 +7,7 @@ class SentryConfig(BaseSettings):
|
||||
Configuration settings for Sentry error tracking and performance monitoring
|
||||
"""
|
||||
|
||||
SENTRY_DSN: Optional[str] = Field(
|
||||
SENTRY_DSN: str | None = Field(
|
||||
description="Sentry Data Source Name (DSN)."
|
||||
" This is the unique identifier of your Sentry project, used to send events to the correct project.",
|
||||
default=None,
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
from typing import Annotated, Literal, Optional
|
||||
from enum import StrEnum
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import (
|
||||
AliasChoices,
|
||||
@ -31,6 +32,12 @@ class SecurityConfig(BaseSettings):
|
||||
description="Duration in minutes for which a password reset token remains valid",
|
||||
default=5,
|
||||
)
|
||||
|
||||
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
||||
description="Duration in minutes for which a email register token remains valid",
|
||||
default=5,
|
||||
)
|
||||
|
||||
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
|
||||
description="Duration in minutes for which a change email token remains valid",
|
||||
default=5,
|
||||
@ -51,7 +58,7 @@ class SecurityConfig(BaseSettings):
|
||||
default=False,
|
||||
)
|
||||
|
||||
ADMIN_API_KEY: Optional[str] = Field(
|
||||
ADMIN_API_KEY: str | None = Field(
|
||||
description="admin api key for authentication",
|
||||
default=None,
|
||||
)
|
||||
@ -91,21 +98,36 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
||||
default="dify-sandbox",
|
||||
)
|
||||
|
||||
CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field(
|
||||
CODE_EXECUTION_CONNECT_TIMEOUT: float | None = Field(
|
||||
description="Connection timeout in seconds for code execution requests",
|
||||
default=10.0,
|
||||
)
|
||||
|
||||
CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field(
|
||||
CODE_EXECUTION_READ_TIMEOUT: float | None = Field(
|
||||
description="Read timeout in seconds for code execution requests",
|
||||
default=60.0,
|
||||
)
|
||||
|
||||
CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field(
|
||||
CODE_EXECUTION_WRITE_TIMEOUT: float | None = Field(
|
||||
description="Write timeout in seconds for code execution request",
|
||||
default=10.0,
|
||||
)
|
||||
|
||||
CODE_EXECUTION_POOL_MAX_CONNECTIONS: PositiveInt = Field(
|
||||
description="Maximum number of concurrent connections for the code execution HTTP client",
|
||||
default=100,
|
||||
)
|
||||
|
||||
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS: PositiveInt = Field(
|
||||
description="Maximum number of persistent keep-alive connections for the code execution HTTP client",
|
||||
default=20,
|
||||
)
|
||||
|
||||
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY: PositiveFloat | None = Field(
|
||||
description="Keep-alive expiry in seconds for idle connections (set to None to disable)",
|
||||
default=5.0,
|
||||
)
|
||||
|
||||
CODE_MAX_NUMBER: PositiveInt = Field(
|
||||
description="Maximum allowed numeric value in code execution",
|
||||
default=9223372036854775807,
|
||||
@ -146,6 +168,11 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
||||
default=1000,
|
||||
)
|
||||
|
||||
CODE_EXECUTION_SSL_VERIFY: bool = Field(
|
||||
description="Enable or disable SSL verification for code execution requests",
|
||||
default=True,
|
||||
)
|
||||
|
||||
|
||||
class PluginConfig(BaseSettings):
|
||||
"""
|
||||
@ -330,17 +357,17 @@ class HttpConfig(BaseSettings):
|
||||
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
|
||||
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
|
||||
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=10, description="Maximum connection timeout in seconds for HTTP requests")
|
||||
] = 10
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum connection timeout in seconds for HTTP requests", default=10
|
||||
)
|
||||
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=60, description="Maximum read timeout in seconds for HTTP requests")
|
||||
] = 60
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum read timeout in seconds for HTTP requests", default=60
|
||||
)
|
||||
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT: Annotated[
|
||||
PositiveInt, Field(ge=10, description="Maximum write timeout in seconds for HTTP requests")
|
||||
] = 20
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = Field(
|
||||
ge=1, description="Maximum write timeout in seconds for HTTP requests", default=20
|
||||
)
|
||||
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
|
||||
description="Maximum allowed size in bytes for binary data in HTTP requests",
|
||||
@ -362,17 +389,17 @@ class HttpConfig(BaseSettings):
|
||||
default=3,
|
||||
)
|
||||
|
||||
SSRF_PROXY_ALL_URL: Optional[str] = Field(
|
||||
SSRF_PROXY_ALL_URL: str | None = Field(
|
||||
description="Proxy URL for HTTP or HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SSRF_PROXY_HTTP_URL: Optional[str] = Field(
|
||||
SSRF_PROXY_HTTP_URL: str | None = Field(
|
||||
description="Proxy URL for HTTP requests to prevent Server-Side Request Forgery (SSRF)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SSRF_PROXY_HTTPS_URL: Optional[str] = Field(
|
||||
SSRF_PROXY_HTTPS_URL: str | None = Field(
|
||||
description="Proxy URL for HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
|
||||
default=None,
|
||||
)
|
||||
@ -397,6 +424,21 @@ class HttpConfig(BaseSettings):
|
||||
default=5,
|
||||
)
|
||||
|
||||
SSRF_POOL_MAX_CONNECTIONS: PositiveInt = Field(
|
||||
description="Maximum number of concurrent connections for the SSRF HTTP client",
|
||||
default=100,
|
||||
)
|
||||
|
||||
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS: PositiveInt = Field(
|
||||
description="Maximum number of persistent keep-alive connections for the SSRF HTTP client",
|
||||
default=20,
|
||||
)
|
||||
|
||||
SSRF_POOL_KEEPALIVE_EXPIRY: PositiveFloat | None = Field(
|
||||
description="Keep-alive expiry in seconds for idle SSRF connections (set to None to disable)",
|
||||
default=5.0,
|
||||
)
|
||||
|
||||
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
|
||||
description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers"
|
||||
" when the app is behind a single trusted reverse proxy.",
|
||||
@ -414,7 +456,7 @@ class InnerAPIConfig(BaseSettings):
|
||||
default=False,
|
||||
)
|
||||
|
||||
INNER_API_KEY: Optional[str] = Field(
|
||||
INNER_API_KEY: str | None = Field(
|
||||
description="API key for accessing the internal API",
|
||||
default=None,
|
||||
)
|
||||
@ -430,7 +472,7 @@ class LoggingConfig(BaseSettings):
|
||||
default="INFO",
|
||||
)
|
||||
|
||||
LOG_FILE: Optional[str] = Field(
|
||||
LOG_FILE: str | None = Field(
|
||||
description="File path for log output.",
|
||||
default=None,
|
||||
)
|
||||
@ -450,12 +492,12 @@ class LoggingConfig(BaseSettings):
|
||||
default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s",
|
||||
)
|
||||
|
||||
LOG_DATEFORMAT: Optional[str] = Field(
|
||||
LOG_DATEFORMAT: str | None = Field(
|
||||
description="Date format string for log timestamps",
|
||||
default=None,
|
||||
)
|
||||
|
||||
LOG_TZ: Optional[str] = Field(
|
||||
LOG_TZ: str | None = Field(
|
||||
description="Timezone for log timestamps (e.g., 'America/New_York')",
|
||||
default="UTC",
|
||||
)
|
||||
@ -499,6 +541,22 @@ class UpdateConfig(BaseSettings):
|
||||
)
|
||||
|
||||
|
||||
class WorkflowVariableTruncationConfig(BaseSettings):
|
||||
WORKFLOW_VARIABLE_TRUNCATION_MAX_SIZE: PositiveInt = Field(
|
||||
# 100KB
|
||||
1024_000,
|
||||
description="Maximum size for variable to trigger final truncation.",
|
||||
)
|
||||
WORKFLOW_VARIABLE_TRUNCATION_STRING_LENGTH: PositiveInt = Field(
|
||||
100000,
|
||||
description="maximum length for string to trigger tuncation, measure in number of characters",
|
||||
)
|
||||
WORKFLOW_VARIABLE_TRUNCATION_ARRAY_LENGTH: PositiveInt = Field(
|
||||
1000,
|
||||
description="maximum length for array to trigger truncation.",
|
||||
)
|
||||
|
||||
|
||||
class WorkflowConfig(BaseSettings):
|
||||
"""
|
||||
Configuration for workflow execution
|
||||
@ -519,16 +577,33 @@ class WorkflowConfig(BaseSettings):
|
||||
default=5,
|
||||
)
|
||||
|
||||
WORKFLOW_PARALLEL_DEPTH_LIMIT: PositiveInt = Field(
|
||||
description="Maximum allowed depth for nested parallel executions",
|
||||
default=3,
|
||||
)
|
||||
|
||||
MAX_VARIABLE_SIZE: PositiveInt = Field(
|
||||
description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.",
|
||||
default=200 * 1024,
|
||||
)
|
||||
|
||||
# GraphEngine Worker Pool Configuration
|
||||
GRAPH_ENGINE_MIN_WORKERS: PositiveInt = Field(
|
||||
description="Minimum number of workers per GraphEngine instance",
|
||||
default=1,
|
||||
)
|
||||
|
||||
GRAPH_ENGINE_MAX_WORKERS: PositiveInt = Field(
|
||||
description="Maximum number of workers per GraphEngine instance",
|
||||
default=10,
|
||||
)
|
||||
|
||||
GRAPH_ENGINE_SCALE_UP_THRESHOLD: PositiveInt = Field(
|
||||
description="Queue depth threshold that triggers worker scale up",
|
||||
default=3,
|
||||
)
|
||||
|
||||
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME: float = Field(
|
||||
description="Seconds of idle time before scaling down workers",
|
||||
default=5.0,
|
||||
ge=0.1,
|
||||
)
|
||||
|
||||
|
||||
class WorkflowNodeExecutionConfig(BaseSettings):
|
||||
"""
|
||||
@ -552,12 +627,18 @@ class RepositoryConfig(BaseSettings):
|
||||
"""
|
||||
|
||||
CORE_WORKFLOW_EXECUTION_REPOSITORY: str = Field(
|
||||
description="Repository implementation for WorkflowExecution. Specify as a module path",
|
||||
description="Repository implementation for WorkflowExecution. Options: "
|
||||
"'core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository' (default), "
|
||||
"'core.repositories.celery_workflow_execution_repository.CeleryWorkflowExecutionRepository'",
|
||||
default="core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository",
|
||||
)
|
||||
|
||||
CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY: str = Field(
|
||||
description="Repository implementation for WorkflowNodeExecution. Specify as a module path",
|
||||
description="Repository implementation for WorkflowNodeExecution. Options: "
|
||||
"'core.repositories.sqlalchemy_workflow_node_execution_repository."
|
||||
"SQLAlchemyWorkflowNodeExecutionRepository' (default), "
|
||||
"'core.repositories.celery_workflow_node_execution_repository."
|
||||
"CeleryWorkflowNodeExecutionRepository'",
|
||||
default="core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository",
|
||||
)
|
||||
|
||||
@ -583,22 +664,22 @@ class AuthConfig(BaseSettings):
|
||||
default="/console/api/oauth/authorize",
|
||||
)
|
||||
|
||||
GITHUB_CLIENT_ID: Optional[str] = Field(
|
||||
GITHUB_CLIENT_ID: str | None = Field(
|
||||
description="GitHub OAuth client ID",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GITHUB_CLIENT_SECRET: Optional[str] = Field(
|
||||
GITHUB_CLIENT_SECRET: str | None = Field(
|
||||
description="GitHub OAuth client secret",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GOOGLE_CLIENT_ID: Optional[str] = Field(
|
||||
GOOGLE_CLIENT_ID: str | None = Field(
|
||||
description="Google OAuth client ID",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GOOGLE_CLIENT_SECRET: Optional[str] = Field(
|
||||
GOOGLE_CLIENT_SECRET: str | None = Field(
|
||||
description="Google OAuth client secret",
|
||||
default=None,
|
||||
)
|
||||
@ -633,6 +714,11 @@ class AuthConfig(BaseSettings):
|
||||
default=86400,
|
||||
)
|
||||
|
||||
EMAIL_REGISTER_LOCKOUT_DURATION: PositiveInt = Field(
|
||||
description="Time (in seconds) a user must wait before retrying email register after exceeding the rate limit.",
|
||||
default=86400,
|
||||
)
|
||||
|
||||
|
||||
class ModerationConfig(BaseSettings):
|
||||
"""
|
||||
@ -656,47 +742,71 @@ class ToolConfig(BaseSettings):
|
||||
)
|
||||
|
||||
|
||||
class TemplateMode(StrEnum):
|
||||
# unsafe mode allows flexible operations in templates, but may cause security vulnerabilities
|
||||
UNSAFE = "unsafe"
|
||||
|
||||
# sandbox mode restricts some unsafe operations like accessing __class__.
|
||||
# however, it is still not 100% safe, for example, cpu exploitation can happen.
|
||||
SANDBOX = "sandbox"
|
||||
|
||||
# templating is disabled
|
||||
DISABLED = "disabled"
|
||||
|
||||
|
||||
class MailConfig(BaseSettings):
|
||||
"""
|
||||
Configuration for email services
|
||||
"""
|
||||
|
||||
MAIL_TYPE: Optional[str] = Field(
|
||||
MAIL_TEMPLATING_MODE: TemplateMode = Field(
|
||||
description="Template mode for email services",
|
||||
default=TemplateMode.SANDBOX,
|
||||
)
|
||||
|
||||
MAIL_TEMPLATING_TIMEOUT: int = Field(
|
||||
description="""
|
||||
Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates.
|
||||
Only available in sandbox mode.""",
|
||||
default=3,
|
||||
)
|
||||
|
||||
MAIL_TYPE: str | None = Field(
|
||||
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
MAIL_DEFAULT_SEND_FROM: Optional[str] = Field(
|
||||
MAIL_DEFAULT_SEND_FROM: str | None = Field(
|
||||
description="Default email address to use as the sender",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RESEND_API_KEY: Optional[str] = Field(
|
||||
RESEND_API_KEY: str | None = Field(
|
||||
description="API key for Resend email service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RESEND_API_URL: Optional[str] = Field(
|
||||
RESEND_API_URL: str | None = Field(
|
||||
description="API URL for Resend email service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SMTP_SERVER: Optional[str] = Field(
|
||||
SMTP_SERVER: str | None = Field(
|
||||
description="SMTP server hostname",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SMTP_PORT: Optional[int] = Field(
|
||||
SMTP_PORT: int | None = Field(
|
||||
description="SMTP server port number",
|
||||
default=465,
|
||||
)
|
||||
|
||||
SMTP_USERNAME: Optional[str] = Field(
|
||||
SMTP_USERNAME: str | None = Field(
|
||||
description="Username for SMTP authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SMTP_PASSWORD: Optional[str] = Field(
|
||||
SMTP_PASSWORD: str | None = Field(
|
||||
description="Password for SMTP authentication",
|
||||
default=None,
|
||||
)
|
||||
@ -716,7 +826,7 @@ class MailConfig(BaseSettings):
|
||||
default=50,
|
||||
)
|
||||
|
||||
SENDGRID_API_KEY: Optional[str] = Field(
|
||||
SENDGRID_API_KEY: str | None = Field(
|
||||
description="API key for SendGrid service",
|
||||
default=None,
|
||||
)
|
||||
@ -739,17 +849,17 @@ class RagEtlConfig(BaseSettings):
|
||||
default="database",
|
||||
)
|
||||
|
||||
UNSTRUCTURED_API_URL: Optional[str] = Field(
|
||||
UNSTRUCTURED_API_URL: str | None = Field(
|
||||
description="API URL for Unstructured.io service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
UNSTRUCTURED_API_KEY: Optional[str] = Field(
|
||||
UNSTRUCTURED_API_KEY: str | None = Field(
|
||||
description="API key for Unstructured.io service",
|
||||
default="",
|
||||
)
|
||||
|
||||
SCARF_NO_ANALYTICS: Optional[str] = Field(
|
||||
SCARF_NO_ANALYTICS: str | None = Field(
|
||||
description="This is about whether to disable Scarf analytics in Unstructured library.",
|
||||
default="false",
|
||||
)
|
||||
@ -790,6 +900,11 @@ class DataSetConfig(BaseSettings):
|
||||
default=30,
|
||||
)
|
||||
|
||||
DSL_EXPORT_ENCRYPT_DATASET_ID: bool = Field(
|
||||
description="Enable or disable dataset ID encryption when exporting DSL files",
|
||||
default=True,
|
||||
)
|
||||
|
||||
|
||||
class WorkspaceConfig(BaseSettings):
|
||||
"""
|
||||
@ -962,6 +1077,26 @@ class AccountConfig(BaseSettings):
|
||||
)
|
||||
|
||||
|
||||
class WorkflowLogConfig(BaseSettings):
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED: bool = Field(default=True, description="Enable workflow run log cleanup")
|
||||
WORKFLOW_LOG_RETENTION_DAYS: int = Field(default=30, description="Retention days for workflow run logs")
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: int = Field(
|
||||
default=100, description="Batch size for workflow run log cleanup operations"
|
||||
)
|
||||
|
||||
|
||||
class SwaggerUIConfig(BaseSettings):
|
||||
SWAGGER_UI_ENABLED: bool = Field(
|
||||
description="Whether to enable Swagger UI in api module",
|
||||
default=True,
|
||||
)
|
||||
|
||||
SWAGGER_UI_PATH: str = Field(
|
||||
description="Swagger UI page path in api module",
|
||||
default="/swagger-ui.html",
|
||||
)
|
||||
|
||||
|
||||
class FeatureConfig(
|
||||
# place the configs in alphabet order
|
||||
AppExecutionConfig,
|
||||
@ -993,9 +1128,12 @@ class FeatureConfig(
|
||||
WorkspaceConfig,
|
||||
LoginConfig,
|
||||
AccountConfig,
|
||||
SwaggerUIConfig,
|
||||
# hosted services config
|
||||
HostedServiceConfig,
|
||||
CeleryBeatConfig,
|
||||
CeleryScheduleTasksConfig,
|
||||
WorkflowLogConfig,
|
||||
WorkflowVariableTruncationConfig,
|
||||
):
|
||||
pass
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -40,17 +38,17 @@ class HostedOpenAiConfig(BaseSettings):
|
||||
Configuration for hosted OpenAI service
|
||||
"""
|
||||
|
||||
HOSTED_OPENAI_API_KEY: Optional[str] = Field(
|
||||
HOSTED_OPENAI_API_KEY: str | None = Field(
|
||||
description="API key for hosted OpenAI service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_API_BASE: Optional[str] = Field(
|
||||
HOSTED_OPENAI_API_BASE: str | None = Field(
|
||||
description="Base URL for hosted OpenAI API",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field(
|
||||
HOSTED_OPENAI_API_ORGANIZATION: str | None = Field(
|
||||
description="Organization ID for hosted OpenAI service",
|
||||
default=None,
|
||||
)
|
||||
@ -110,12 +108,12 @@ class HostedAzureOpenAiConfig(BaseSettings):
|
||||
default=False,
|
||||
)
|
||||
|
||||
HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
|
||||
HOSTED_AZURE_OPENAI_API_KEY: str | None = Field(
|
||||
description="API key for hosted Azure OpenAI service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field(
|
||||
HOSTED_AZURE_OPENAI_API_BASE: str | None = Field(
|
||||
description="Base URL for hosted Azure OpenAI API",
|
||||
default=None,
|
||||
)
|
||||
@ -131,12 +129,12 @@ class HostedAnthropicConfig(BaseSettings):
|
||||
Configuration for hosted Anthropic service
|
||||
"""
|
||||
|
||||
HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field(
|
||||
HOSTED_ANTHROPIC_API_BASE: str | None = Field(
|
||||
description="Base URL for hosted Anthropic API",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field(
|
||||
HOSTED_ANTHROPIC_API_KEY: str | None = Field(
|
||||
description="API key for hosted Anthropic service",
|
||||
default=None,
|
||||
)
|
||||
@ -222,11 +220,28 @@ class HostedFetchAppTemplateConfig(BaseSettings):
|
||||
)
|
||||
|
||||
|
||||
class HostedFetchPipelineTemplateConfig(BaseSettings):
|
||||
"""
|
||||
Configuration for fetching pipeline templates
|
||||
"""
|
||||
|
||||
HOSTED_FETCH_PIPELINE_TEMPLATES_MODE: str = Field(
|
||||
description="Mode for fetching pipeline templates: remote, db, or builtin default to remote,",
|
||||
default="remote",
|
||||
)
|
||||
|
||||
HOSTED_FETCH_PIPELINE_TEMPLATES_REMOTE_DOMAIN: str = Field(
|
||||
description="Domain for fetching remote pipeline templates",
|
||||
default="https://tmpl.dify.ai",
|
||||
)
|
||||
|
||||
|
||||
class HostedServiceConfig(
|
||||
# place the configs in alphabet order
|
||||
HostedAnthropicConfig,
|
||||
HostedAzureOpenAiConfig,
|
||||
HostedFetchAppTemplateConfig,
|
||||
HostedFetchPipelineTemplateConfig,
|
||||
HostedMinmaxConfig,
|
||||
HostedOpenAiConfig,
|
||||
HostedSparkConfig,
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import os
|
||||
from typing import Any, Literal, Optional
|
||||
from typing import Any, Literal
|
||||
from urllib.parse import parse_qsl, quote_plus
|
||||
|
||||
from pydantic import Field, NonNegativeFloat, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
|
||||
@ -10,6 +10,7 @@ from .storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
|
||||
from .storage.amazon_s3_storage_config import S3StorageConfig
|
||||
from .storage.azure_blob_storage_config import AzureBlobStorageConfig
|
||||
from .storage.baidu_obs_storage_config import BaiduOBSStorageConfig
|
||||
from .storage.clickzetta_volume_storage_config import ClickZettaVolumeStorageConfig
|
||||
from .storage.google_cloud_storage_config import GoogleCloudStorageConfig
|
||||
from .storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
|
||||
from .storage.oci_storage_config import OCIStorageConfig
|
||||
@ -20,6 +21,7 @@ from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
|
||||
from .vdb.analyticdb_config import AnalyticdbConfig
|
||||
from .vdb.baidu_vector_config import BaiduVectorDBConfig
|
||||
from .vdb.chroma_config import ChromaConfig
|
||||
from .vdb.clickzetta_config import ClickzettaConfig
|
||||
from .vdb.couchbase_config import CouchbaseConfig
|
||||
from .vdb.elasticsearch_config import ElasticsearchConfig
|
||||
from .vdb.huawei_cloud_config import HuaweiCloudConfig
|
||||
@ -52,6 +54,7 @@ class StorageConfig(BaseSettings):
|
||||
"aliyun-oss",
|
||||
"azure-blob",
|
||||
"baidu-obs",
|
||||
"clickzetta-volume",
|
||||
"google-storage",
|
||||
"huawei-obs",
|
||||
"oci-storage",
|
||||
@ -61,8 +64,9 @@ class StorageConfig(BaseSettings):
|
||||
"local",
|
||||
] = Field(
|
||||
description="Type of storage to use."
|
||||
" Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', "
|
||||
"'huawei-obs', 'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'opendal'.",
|
||||
" Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', "
|
||||
"'clickzetta-volume', 'google-storage', 'huawei-obs', 'oci-storage', 'tencent-cos', "
|
||||
"'volcengine-tos', 'supabase'. Default is 'opendal'.",
|
||||
default="opendal",
|
||||
)
|
||||
|
||||
@ -74,18 +78,18 @@ class StorageConfig(BaseSettings):
|
||||
|
||||
|
||||
class VectorStoreConfig(BaseSettings):
|
||||
VECTOR_STORE: Optional[str] = Field(
|
||||
VECTOR_STORE: str | None = Field(
|
||||
description="Type of vector store to use for efficient similarity search."
|
||||
" Set to None if not using a vector store.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VECTOR_STORE_WHITELIST_ENABLE: Optional[bool] = Field(
|
||||
VECTOR_STORE_WHITELIST_ENABLE: bool | None = Field(
|
||||
description="Enable whitelist for vector store.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
VECTOR_INDEX_NAME_PREFIX: Optional[str] = Field(
|
||||
VECTOR_INDEX_NAME_PREFIX: str | None = Field(
|
||||
description="Prefix used to create collection name in vector database",
|
||||
default="Vector_index",
|
||||
)
|
||||
@ -140,7 +144,8 @@ class DatabaseConfig(BaseSettings):
|
||||
default="postgresql",
|
||||
)
|
||||
|
||||
@computed_field
|
||||
@computed_field # type: ignore[misc]
|
||||
@property
|
||||
def SQLALCHEMY_DATABASE_URI(self) -> str:
|
||||
db_extras = (
|
||||
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
|
||||
@ -182,6 +187,11 @@ class DatabaseConfig(BaseSettings):
|
||||
default=False,
|
||||
)
|
||||
|
||||
SQLALCHEMY_POOL_TIMEOUT: NonNegativeInt = Field(
|
||||
description="Number of seconds to wait for a connection from the pool before raising a timeout error.",
|
||||
default=30,
|
||||
)
|
||||
|
||||
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
|
||||
description="Number of processes for the retrieval service, default to CPU cores.",
|
||||
default=os.cpu_count() or 1,
|
||||
@ -210,46 +220,49 @@ class DatabaseConfig(BaseSettings):
|
||||
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
|
||||
"connect_args": connect_args,
|
||||
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
|
||||
"pool_reset_on_return": None,
|
||||
"pool_timeout": self.SQLALCHEMY_POOL_TIMEOUT,
|
||||
}
|
||||
|
||||
|
||||
class CeleryConfig(DatabaseConfig):
|
||||
CELERY_BACKEND: str = Field(
|
||||
description="Backend for Celery task results. Options: 'database', 'redis'.",
|
||||
description="Backend for Celery task results. Options: 'database', 'redis', 'rabbitmq'.",
|
||||
default="redis",
|
||||
)
|
||||
|
||||
CELERY_BROKER_URL: Optional[str] = Field(
|
||||
CELERY_BROKER_URL: str | None = Field(
|
||||
description="URL of the message broker for Celery tasks.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CELERY_USE_SENTINEL: Optional[bool] = Field(
|
||||
CELERY_USE_SENTINEL: bool | None = Field(
|
||||
description="Whether to use Redis Sentinel for high availability.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
CELERY_SENTINEL_MASTER_NAME: Optional[str] = Field(
|
||||
CELERY_SENTINEL_MASTER_NAME: str | None = Field(
|
||||
description="Name of the Redis Sentinel master.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CELERY_SENTINEL_PASSWORD: Optional[str] = Field(
|
||||
CELERY_SENTINEL_PASSWORD: str | None = Field(
|
||||
description="Password of the Redis Sentinel master.",
|
||||
default=None,
|
||||
)
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
|
||||
description="Timeout for Redis Sentinel socket operations in seconds.",
|
||||
default=0.1,
|
||||
)
|
||||
|
||||
@computed_field
|
||||
def CELERY_RESULT_BACKEND(self) -> str | None:
|
||||
return (
|
||||
"db+{}".format(self.SQLALCHEMY_DATABASE_URI)
|
||||
if self.CELERY_BACKEND == "database"
|
||||
else self.CELERY_BROKER_URL
|
||||
)
|
||||
if self.CELERY_BACKEND in ("database", "rabbitmq"):
|
||||
return f"db+{self.SQLALCHEMY_DATABASE_URI}"
|
||||
elif self.CELERY_BACKEND == "redis":
|
||||
return self.CELERY_BROKER_URL
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def BROKER_USE_SSL(self) -> bool:
|
||||
@ -261,12 +274,12 @@ class InternalTestConfig(BaseSettings):
|
||||
Configuration settings for Internal Test
|
||||
"""
|
||||
|
||||
AWS_SECRET_ACCESS_KEY: Optional[str] = Field(
|
||||
AWS_SECRET_ACCESS_KEY: str | None = Field(
|
||||
description="Internal test AWS secret access key",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AWS_ACCESS_KEY_ID: Optional[str] = Field(
|
||||
AWS_ACCESS_KEY_ID: str | None = Field(
|
||||
description="Internal test AWS access key ID",
|
||||
default=None,
|
||||
)
|
||||
@ -277,15 +290,15 @@ class DatasetQueueMonitorConfig(BaseSettings):
|
||||
Configuration settings for Dataset Queue Monitor
|
||||
"""
|
||||
|
||||
QUEUE_MONITOR_THRESHOLD: Optional[NonNegativeInt] = Field(
|
||||
QUEUE_MONITOR_THRESHOLD: NonNegativeInt | None = Field(
|
||||
description="Threshold for dataset queue monitor",
|
||||
default=200,
|
||||
)
|
||||
QUEUE_MONITOR_ALERT_EMAILS: Optional[str] = Field(
|
||||
QUEUE_MONITOR_ALERT_EMAILS: str | None = Field(
|
||||
description="Emails for dataset queue monitor alert, separated by commas",
|
||||
default=None,
|
||||
)
|
||||
QUEUE_MONITOR_INTERVAL: Optional[NonNegativeFloat] = Field(
|
||||
QUEUE_MONITOR_INTERVAL: NonNegativeFloat | None = Field(
|
||||
description="Interval for dataset queue monitor in minutes",
|
||||
default=30,
|
||||
)
|
||||
@ -293,8 +306,7 @@ class DatasetQueueMonitorConfig(BaseSettings):
|
||||
|
||||
class MiddlewareConfig(
|
||||
# place the configs in alphabet order
|
||||
CeleryConfig,
|
||||
DatabaseConfig,
|
||||
CeleryConfig, # Note: CeleryConfig already inherits from DatabaseConfig
|
||||
KeywordStoreConfig,
|
||||
RedisConfig,
|
||||
# configs of storage and storage providers
|
||||
@ -302,6 +314,7 @@ class MiddlewareConfig(
|
||||
AliyunOSSStorageConfig,
|
||||
AzureBlobStorageConfig,
|
||||
BaiduOBSStorageConfig,
|
||||
ClickZettaVolumeStorageConfig,
|
||||
GoogleCloudStorageConfig,
|
||||
HuaweiCloudOBSStorageConfig,
|
||||
OCIStorageConfig,
|
||||
@ -314,6 +327,7 @@ class MiddlewareConfig(
|
||||
VectorStoreConfig,
|
||||
AnalyticdbConfig,
|
||||
ChromaConfig,
|
||||
ClickzettaConfig,
|
||||
HuaweiCloudConfig,
|
||||
MilvusConfig,
|
||||
MyScaleConfig,
|
||||
|
||||
42
api/configs/middleware/cache/redis_config.py
vendored
42
api/configs/middleware/cache/redis_config.py
vendored
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -19,12 +17,12 @@ class RedisConfig(BaseSettings):
|
||||
default=6379,
|
||||
)
|
||||
|
||||
REDIS_USERNAME: Optional[str] = Field(
|
||||
REDIS_USERNAME: str | None = Field(
|
||||
description="Username for Redis authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_PASSWORD: Optional[str] = Field(
|
||||
REDIS_PASSWORD: str | None = Field(
|
||||
description="Password for Redis authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
@ -39,32 +37,52 @@ class RedisConfig(BaseSettings):
|
||||
default=False,
|
||||
)
|
||||
|
||||
REDIS_USE_SENTINEL: Optional[bool] = Field(
|
||||
REDIS_SSL_CERT_REQS: str = Field(
|
||||
description="SSL certificate requirements (CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED)",
|
||||
default="CERT_NONE",
|
||||
)
|
||||
|
||||
REDIS_SSL_CA_CERTS: str | None = Field(
|
||||
description="Path to the CA certificate file for SSL verification",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SSL_CERTFILE: str | None = Field(
|
||||
description="Path to the client certificate file for SSL authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SSL_KEYFILE: str | None = Field(
|
||||
description="Path to the client private key file for SSL authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_USE_SENTINEL: bool | None = Field(
|
||||
description="Enable Redis Sentinel mode for high availability",
|
||||
default=False,
|
||||
)
|
||||
|
||||
REDIS_SENTINELS: Optional[str] = Field(
|
||||
REDIS_SENTINELS: str | None = Field(
|
||||
description="Comma-separated list of Redis Sentinel nodes (host:port)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_SERVICE_NAME: Optional[str] = Field(
|
||||
REDIS_SENTINEL_SERVICE_NAME: str | None = Field(
|
||||
description="Name of the Redis Sentinel service to monitor",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_USERNAME: Optional[str] = Field(
|
||||
REDIS_SENTINEL_USERNAME: str | None = Field(
|
||||
description="Username for Redis Sentinel authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_PASSWORD: Optional[str] = Field(
|
||||
REDIS_SENTINEL_PASSWORD: str | None = Field(
|
||||
description="Password for Redis Sentinel authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
|
||||
REDIS_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
|
||||
description="Socket timeout in seconds for Redis Sentinel connections",
|
||||
default=0.1,
|
||||
)
|
||||
@ -74,12 +92,12 @@ class RedisConfig(BaseSettings):
|
||||
default=False,
|
||||
)
|
||||
|
||||
REDIS_CLUSTERS: Optional[str] = Field(
|
||||
REDIS_CLUSTERS: str | None = Field(
|
||||
description="Comma-separated list of Redis Clusters nodes (host:port)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_CLUSTERS_PASSWORD: Optional[str] = Field(
|
||||
REDIS_CLUSTERS_PASSWORD: str | None = Field(
|
||||
description="Password for Redis Clusters authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,37 +7,37 @@ class AliyunOSSStorageConfig(BaseSettings):
|
||||
Configuration settings for Aliyun Object Storage Service (OSS)
|
||||
"""
|
||||
|
||||
ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field(
|
||||
ALIYUN_OSS_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Aliyun OSS bucket to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field(
|
||||
ALIYUN_OSS_ACCESS_KEY: str | None = Field(
|
||||
description="Access key ID for authenticating with Aliyun OSS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_SECRET_KEY: Optional[str] = Field(
|
||||
ALIYUN_OSS_SECRET_KEY: str | None = Field(
|
||||
description="Secret access key for authenticating with Aliyun OSS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_ENDPOINT: Optional[str] = Field(
|
||||
ALIYUN_OSS_ENDPOINT: str | None = Field(
|
||||
description="URL of the Aliyun OSS endpoint for your chosen region",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_REGION: Optional[str] = Field(
|
||||
ALIYUN_OSS_REGION: str | None = Field(
|
||||
description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field(
|
||||
ALIYUN_OSS_AUTH_VERSION: str | None = Field(
|
||||
description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_PATH: Optional[str] = Field(
|
||||
ALIYUN_OSS_PATH: str | None = Field(
|
||||
description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from typing import Literal, Optional
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
@ -9,27 +9,27 @@ class S3StorageConfig(BaseSettings):
|
||||
Configuration settings for S3-compatible object storage
|
||||
"""
|
||||
|
||||
S3_ENDPOINT: Optional[str] = Field(
|
||||
S3_ENDPOINT: str | None = Field(
|
||||
description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_REGION: Optional[str] = Field(
|
||||
S3_REGION: str | None = Field(
|
||||
description="Region where the S3 bucket is located (e.g., 'us-east-1')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_BUCKET_NAME: Optional[str] = Field(
|
||||
S3_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the S3 bucket to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_ACCESS_KEY: Optional[str] = Field(
|
||||
S3_ACCESS_KEY: str | None = Field(
|
||||
description="Access key ID for authenticating with the S3 service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_SECRET_KEY: Optional[str] = Field(
|
||||
S3_SECRET_KEY: str | None = Field(
|
||||
description="Secret access key for authenticating with the S3 service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,22 +7,22 @@ class AzureBlobStorageConfig(BaseSettings):
|
||||
Configuration settings for Azure Blob Storage
|
||||
"""
|
||||
|
||||
AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field(
|
||||
AZURE_BLOB_ACCOUNT_NAME: str | None = Field(
|
||||
description="Name of the Azure Storage account (e.g., 'mystorageaccount')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field(
|
||||
AZURE_BLOB_ACCOUNT_KEY: str | None = Field(
|
||||
description="Access key for authenticating with the Azure Storage account",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field(
|
||||
AZURE_BLOB_CONTAINER_NAME: str | None = Field(
|
||||
description="Name of the Azure Blob container to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field(
|
||||
AZURE_BLOB_ACCOUNT_URL: str | None = Field(
|
||||
description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,22 +7,22 @@ class BaiduOBSStorageConfig(BaseSettings):
|
||||
Configuration settings for Baidu Object Storage Service (OBS)
|
||||
"""
|
||||
|
||||
BAIDU_OBS_BUCKET_NAME: Optional[str] = Field(
|
||||
BAIDU_OBS_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_OBS_ACCESS_KEY: Optional[str] = Field(
|
||||
BAIDU_OBS_ACCESS_KEY: str | None = Field(
|
||||
description="Access Key ID for authenticating with Baidu OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_OBS_SECRET_KEY: Optional[str] = Field(
|
||||
BAIDU_OBS_SECRET_KEY: str | None = Field(
|
||||
description="Secret Access Key for authenticating with Baidu OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_OBS_ENDPOINT: Optional[str] = Field(
|
||||
BAIDU_OBS_ENDPOINT: str | None = Field(
|
||||
description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -0,0 +1,63 @@
|
||||
"""ClickZetta Volume Storage Configuration"""
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class ClickZettaVolumeStorageConfig(BaseSettings):
|
||||
"""Configuration for ClickZetta Volume storage."""
|
||||
|
||||
CLICKZETTA_VOLUME_USERNAME: str | None = Field(
|
||||
description="Username for ClickZetta Volume authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_PASSWORD: str | None = Field(
|
||||
description="Password for ClickZetta Volume authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_INSTANCE: str | None = Field(
|
||||
description="ClickZetta instance identifier",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_SERVICE: str = Field(
|
||||
description="ClickZetta service endpoint",
|
||||
default="api.clickzetta.com",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_WORKSPACE: str = Field(
|
||||
description="ClickZetta workspace name",
|
||||
default="quick_start",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_VCLUSTER: str = Field(
|
||||
description="ClickZetta virtual cluster name",
|
||||
default="default_ap",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_SCHEMA: str = Field(
|
||||
description="ClickZetta schema name",
|
||||
default="dify",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_TYPE: str = Field(
|
||||
description="ClickZetta volume type (table|user|external)",
|
||||
default="user",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_NAME: str | None = Field(
|
||||
description="ClickZetta volume name for external volumes",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_TABLE_PREFIX: str = Field(
|
||||
description="Prefix for ClickZetta volume table names",
|
||||
default="dataset_",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_DIFY_PREFIX: str = Field(
|
||||
description="Directory prefix for User Volume to organize Dify files",
|
||||
default="dify_km",
|
||||
)
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,12 +7,12 @@ class GoogleCloudStorageConfig(BaseSettings):
|
||||
Configuration settings for Google Cloud Storage
|
||||
"""
|
||||
|
||||
GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field(
|
||||
GOOGLE_STORAGE_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field(
|
||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: str | None = Field(
|
||||
description="Base64-encoded JSON key file for Google Cloud service account authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,22 +7,22 @@ class HuaweiCloudOBSStorageConfig(BaseSettings):
|
||||
Configuration settings for Huawei Cloud Object Storage Service (OBS)
|
||||
"""
|
||||
|
||||
HUAWEI_OBS_BUCKET_NAME: Optional[str] = Field(
|
||||
HUAWEI_OBS_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_OBS_ACCESS_KEY: Optional[str] = Field(
|
||||
HUAWEI_OBS_ACCESS_KEY: str | None = Field(
|
||||
description="Access Key ID for authenticating with Huawei Cloud OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_OBS_SECRET_KEY: Optional[str] = Field(
|
||||
HUAWEI_OBS_SECRET_KEY: str | None = Field(
|
||||
description="Secret Access Key for authenticating with Huawei Cloud OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_OBS_SERVER: Optional[str] = Field(
|
||||
HUAWEI_OBS_SERVER: str | None = Field(
|
||||
description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,27 +7,27 @@ class OCIStorageConfig(BaseSettings):
|
||||
Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage
|
||||
"""
|
||||
|
||||
OCI_ENDPOINT: Optional[str] = Field(
|
||||
OCI_ENDPOINT: str | None = Field(
|
||||
description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_REGION: Optional[str] = Field(
|
||||
OCI_REGION: str | None = Field(
|
||||
description="OCI region where the bucket is located (e.g., 'us-phoenix-1')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_BUCKET_NAME: Optional[str] = Field(
|
||||
OCI_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_ACCESS_KEY: Optional[str] = Field(
|
||||
OCI_ACCESS_KEY: str | None = Field(
|
||||
description="Access key (also known as API key) for authenticating with OCI Object Storage",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_SECRET_KEY: Optional[str] = Field(
|
||||
OCI_SECRET_KEY: str | None = Field(
|
||||
description="Secret key associated with the access key for authenticating with OCI Object Storage",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,17 +7,17 @@ class SupabaseStorageConfig(BaseSettings):
|
||||
Configuration settings for Supabase Object Storage Service
|
||||
"""
|
||||
|
||||
SUPABASE_BUCKET_NAME: Optional[str] = Field(
|
||||
SUPABASE_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SUPABASE_API_KEY: Optional[str] = Field(
|
||||
SUPABASE_API_KEY: str | None = Field(
|
||||
description="API KEY for authenticating with Supabase",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SUPABASE_URL: Optional[str] = Field(
|
||||
SUPABASE_URL: str | None = Field(
|
||||
description="URL of the Supabase",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,27 +7,27 @@ class TencentCloudCOSStorageConfig(BaseSettings):
|
||||
Configuration settings for Tencent Cloud Object Storage (COS)
|
||||
"""
|
||||
|
||||
TENCENT_COS_BUCKET_NAME: Optional[str] = Field(
|
||||
TENCENT_COS_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Tencent Cloud COS bucket to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_REGION: Optional[str] = Field(
|
||||
TENCENT_COS_REGION: str | None = Field(
|
||||
description="Tencent Cloud region where the COS bucket is located (e.g., 'ap-guangzhou')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_SECRET_ID: Optional[str] = Field(
|
||||
TENCENT_COS_SECRET_ID: str | None = Field(
|
||||
description="SecretId for authenticating with Tencent Cloud COS (part of API credentials)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_SECRET_KEY: Optional[str] = Field(
|
||||
TENCENT_COS_SECRET_KEY: str | None = Field(
|
||||
description="SecretKey for authenticating with Tencent Cloud COS (part of API credentials)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_SCHEME: Optional[str] = Field(
|
||||
TENCENT_COS_SCHEME: str | None = Field(
|
||||
description="Protocol scheme for COS requests: 'https' (recommended) or 'http'",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,27 +7,27 @@ class VolcengineTOSStorageConfig(BaseSettings):
|
||||
Configuration settings for Volcengine Tinder Object Storage (TOS)
|
||||
"""
|
||||
|
||||
VOLCENGINE_TOS_BUCKET_NAME: Optional[str] = Field(
|
||||
VOLCENGINE_TOS_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Volcengine TOS bucket to store and retrieve objects (e.g., 'my-tos-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_ACCESS_KEY: Optional[str] = Field(
|
||||
VOLCENGINE_TOS_ACCESS_KEY: str | None = Field(
|
||||
description="Access Key ID for authenticating with Volcengine TOS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_SECRET_KEY: Optional[str] = Field(
|
||||
VOLCENGINE_TOS_SECRET_KEY: str | None = Field(
|
||||
description="Secret Access Key for authenticating with Volcengine TOS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_ENDPOINT: Optional[str] = Field(
|
||||
VOLCENGINE_TOS_ENDPOINT: str | None = Field(
|
||||
description="URL of the Volcengine TOS endpoint (e.g., 'https://tos-cn-beijing.volces.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_REGION: Optional[str] = Field(
|
||||
VOLCENGINE_TOS_REGION: str | None = Field(
|
||||
description="Volcengine region where the TOS bucket is located (e.g., 'cn-beijing')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -11,37 +9,37 @@ class AnalyticdbConfig(BaseSettings):
|
||||
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
|
||||
"""
|
||||
|
||||
ANALYTICDB_KEY_ID: Optional[str] = Field(
|
||||
ANALYTICDB_KEY_ID: str | None = Field(
|
||||
default=None, description="The Access Key ID provided by Alibaba Cloud for API authentication."
|
||||
)
|
||||
ANALYTICDB_KEY_SECRET: Optional[str] = Field(
|
||||
ANALYTICDB_KEY_SECRET: str | None = Field(
|
||||
default=None, description="The Secret Access Key corresponding to the Access Key ID for secure API access."
|
||||
)
|
||||
ANALYTICDB_REGION_ID: Optional[str] = Field(
|
||||
ANALYTICDB_REGION_ID: str | None = Field(
|
||||
default=None,
|
||||
description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou', 'ap-southeast-1').",
|
||||
)
|
||||
ANALYTICDB_INSTANCE_ID: Optional[str] = Field(
|
||||
ANALYTICDB_INSTANCE_ID: str | None = Field(
|
||||
default=None,
|
||||
description="The unique identifier of the AnalyticDB instance you want to connect to.",
|
||||
)
|
||||
ANALYTICDB_ACCOUNT: Optional[str] = Field(
|
||||
ANALYTICDB_ACCOUNT: str | None = Field(
|
||||
default=None,
|
||||
description="The account name used to log in to the AnalyticDB instance"
|
||||
" (usually the initial account created with the instance).",
|
||||
)
|
||||
ANALYTICDB_PASSWORD: Optional[str] = Field(
|
||||
ANALYTICDB_PASSWORD: str | None = Field(
|
||||
default=None, description="The password associated with the AnalyticDB account for database authentication."
|
||||
)
|
||||
ANALYTICDB_NAMESPACE: Optional[str] = Field(
|
||||
ANALYTICDB_NAMESPACE: str | None = Field(
|
||||
default=None, description="The namespace within AnalyticDB for schema isolation (if using namespace feature)."
|
||||
)
|
||||
ANALYTICDB_NAMESPACE_PASSWORD: Optional[str] = Field(
|
||||
ANALYTICDB_NAMESPACE_PASSWORD: str | None = Field(
|
||||
default=None,
|
||||
description="The password for accessing the specified namespace within the AnalyticDB instance"
|
||||
" (if namespace feature is enabled).",
|
||||
)
|
||||
ANALYTICDB_HOST: Optional[str] = Field(
|
||||
ANALYTICDB_HOST: str | None = Field(
|
||||
default=None, description="The host of the AnalyticDB instance you want to connect to."
|
||||
)
|
||||
ANALYTICDB_PORT: PositiveInt = Field(
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,7 +7,7 @@ class BaiduVectorDBConfig(BaseSettings):
|
||||
Configuration settings for Baidu Vector Database
|
||||
"""
|
||||
|
||||
BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field(
|
||||
BAIDU_VECTOR_DB_ENDPOINT: str | None = Field(
|
||||
description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')",
|
||||
default=None,
|
||||
)
|
||||
@ -19,17 +17,17 @@ class BaiduVectorDBConfig(BaseSettings):
|
||||
default=30000,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field(
|
||||
BAIDU_VECTOR_DB_ACCOUNT: str | None = Field(
|
||||
description="Account for authenticating with the Baidu Vector Database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field(
|
||||
BAIDU_VECTOR_DB_API_KEY: str | None = Field(
|
||||
description="API key for authenticating with the Baidu Vector Database service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field(
|
||||
BAIDU_VECTOR_DB_DATABASE: str | None = Field(
|
||||
description="Name of the specific Baidu Vector Database to connect to",
|
||||
default=None,
|
||||
)
|
||||
@ -43,3 +41,13 @@ class BaiduVectorDBConfig(BaseSettings):
|
||||
description="Number of replicas for the Baidu Vector Database (default is 3)",
|
||||
default=3,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER: str = Field(
|
||||
description="Analyzer type for inverted index in Baidu Vector Database (default is DEFAULT_ANALYZER)",
|
||||
default="DEFAULT_ANALYZER",
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE: str = Field(
|
||||
description="Parser mode for inverted index in Baidu Vector Database (default is COARSE_MODE)",
|
||||
default="COARSE_MODE",
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,7 +7,7 @@ class ChromaConfig(BaseSettings):
|
||||
Configuration settings for Chroma vector database
|
||||
"""
|
||||
|
||||
CHROMA_HOST: Optional[str] = Field(
|
||||
CHROMA_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the Chroma server (e.g., 'localhost' or '192.168.1.100')",
|
||||
default=None,
|
||||
)
|
||||
@ -19,22 +17,22 @@ class ChromaConfig(BaseSettings):
|
||||
default=8000,
|
||||
)
|
||||
|
||||
CHROMA_TENANT: Optional[str] = Field(
|
||||
CHROMA_TENANT: str | None = Field(
|
||||
description="Tenant identifier for multi-tenancy support in Chroma",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CHROMA_DATABASE: Optional[str] = Field(
|
||||
CHROMA_DATABASE: str | None = Field(
|
||||
description="Name of the Chroma database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CHROMA_AUTH_PROVIDER: Optional[str] = Field(
|
||||
CHROMA_AUTH_PROVIDER: str | None = Field(
|
||||
description="Authentication provider for Chroma (e.g., 'basic', 'token', or a custom provider)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CHROMA_AUTH_CREDENTIALS: Optional[str] = Field(
|
||||
CHROMA_AUTH_CREDENTIALS: str | None = Field(
|
||||
description="Authentication credentials for Chroma (format depends on the auth provider)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
68
api/configs/middleware/vdb/clickzetta_config.py
Normal file
68
api/configs/middleware/vdb/clickzetta_config.py
Normal file
@ -0,0 +1,68 @@
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class ClickzettaConfig(BaseSettings):
|
||||
"""
|
||||
Clickzetta Lakehouse vector database configuration
|
||||
"""
|
||||
|
||||
CLICKZETTA_USERNAME: str | None = Field(
|
||||
description="Username for authenticating with Clickzetta Lakehouse",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with Clickzetta Lakehouse",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_INSTANCE: str | None = Field(
|
||||
description="Clickzetta Lakehouse instance ID",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_SERVICE: str | None = Field(
|
||||
description="Clickzetta API service endpoint (e.g., 'api.clickzetta.com')",
|
||||
default="api.clickzetta.com",
|
||||
)
|
||||
|
||||
CLICKZETTA_WORKSPACE: str | None = Field(
|
||||
description="Clickzetta workspace name",
|
||||
default="default",
|
||||
)
|
||||
|
||||
CLICKZETTA_VCLUSTER: str | None = Field(
|
||||
description="Clickzetta virtual cluster name",
|
||||
default="default_ap",
|
||||
)
|
||||
|
||||
CLICKZETTA_SCHEMA: str | None = Field(
|
||||
description="Database schema name in Clickzetta",
|
||||
default="public",
|
||||
)
|
||||
|
||||
CLICKZETTA_BATCH_SIZE: int | None = Field(
|
||||
description="Batch size for bulk insert operations",
|
||||
default=100,
|
||||
)
|
||||
|
||||
CLICKZETTA_ENABLE_INVERTED_INDEX: bool | None = Field(
|
||||
description="Enable inverted index for full-text search capabilities",
|
||||
default=True,
|
||||
)
|
||||
|
||||
CLICKZETTA_ANALYZER_TYPE: str | None = Field(
|
||||
description="Analyzer type for full-text search: keyword, english, chinese, unicode",
|
||||
default="chinese",
|
||||
)
|
||||
|
||||
CLICKZETTA_ANALYZER_MODE: str | None = Field(
|
||||
description="Analyzer mode for tokenization: max_word (fine-grained) or smart (intelligent)",
|
||||
default="smart",
|
||||
)
|
||||
|
||||
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: str | None = Field(
|
||||
description="Distance function for vector similarity: l2_distance or cosine_distance",
|
||||
default="cosine_distance",
|
||||
)
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,27 +7,27 @@ class CouchbaseConfig(BaseSettings):
|
||||
Couchbase configs
|
||||
"""
|
||||
|
||||
COUCHBASE_CONNECTION_STRING: Optional[str] = Field(
|
||||
COUCHBASE_CONNECTION_STRING: str | None = Field(
|
||||
description="COUCHBASE connection string",
|
||||
default=None,
|
||||
)
|
||||
|
||||
COUCHBASE_USER: Optional[str] = Field(
|
||||
COUCHBASE_USER: str | None = Field(
|
||||
description="COUCHBASE user",
|
||||
default=None,
|
||||
)
|
||||
|
||||
COUCHBASE_PASSWORD: Optional[str] = Field(
|
||||
COUCHBASE_PASSWORD: str | None = Field(
|
||||
description="COUCHBASE password",
|
||||
default=None,
|
||||
)
|
||||
|
||||
COUCHBASE_BUCKET_NAME: Optional[str] = Field(
|
||||
COUCHBASE_BUCKET_NAME: str | None = Field(
|
||||
description="COUCHBASE bucket name",
|
||||
default=None,
|
||||
)
|
||||
|
||||
COUCHBASE_SCOPE_NAME: Optional[str] = Field(
|
||||
COUCHBASE_SCOPE_NAME: str | None = Field(
|
||||
description="COUCHBASE scope name",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,15 +1,14 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic import Field, PositiveInt, model_validator
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class ElasticsearchConfig(BaseSettings):
|
||||
"""
|
||||
Configuration settings for Elasticsearch
|
||||
Configuration settings for both self-managed and Elastic Cloud deployments.
|
||||
Can load from environment variables or .env files.
|
||||
"""
|
||||
|
||||
ELASTICSEARCH_HOST: Optional[str] = Field(
|
||||
ELASTICSEARCH_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the Elasticsearch server (e.g., 'localhost' or '192.168.1.100')",
|
||||
default="127.0.0.1",
|
||||
)
|
||||
@ -19,12 +18,57 @@ class ElasticsearchConfig(BaseSettings):
|
||||
default=9200,
|
||||
)
|
||||
|
||||
ELASTICSEARCH_USERNAME: Optional[str] = Field(
|
||||
ELASTICSEARCH_USERNAME: str | None = Field(
|
||||
description="Username for authenticating with Elasticsearch (default is 'elastic')",
|
||||
default="elastic",
|
||||
)
|
||||
|
||||
ELASTICSEARCH_PASSWORD: Optional[str] = Field(
|
||||
ELASTICSEARCH_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with Elasticsearch (default is 'elastic')",
|
||||
default="elastic",
|
||||
)
|
||||
|
||||
# Elastic Cloud (optional)
|
||||
ELASTICSEARCH_USE_CLOUD: bool | None = Field(
|
||||
description="Set to True to use Elastic Cloud instead of self-hosted Elasticsearch", default=False
|
||||
)
|
||||
ELASTICSEARCH_CLOUD_URL: str | None = Field(
|
||||
description="Full URL for Elastic Cloud deployment (e.g., 'https://example.es.region.aws.found.io:443')",
|
||||
default=None,
|
||||
)
|
||||
ELASTICSEARCH_API_KEY: str | None = Field(description="API key for authenticating with Elastic Cloud", default=None)
|
||||
|
||||
# Common options
|
||||
ELASTICSEARCH_CA_CERTS: str | None = Field(
|
||||
description="Path to CA certificate file for SSL verification", default=None
|
||||
)
|
||||
ELASTICSEARCH_VERIFY_CERTS: bool = Field(
|
||||
description="Whether to verify SSL certificates (default is False)", default=False
|
||||
)
|
||||
ELASTICSEARCH_REQUEST_TIMEOUT: int = Field(
|
||||
description="Request timeout in milliseconds (default is 100000)", default=100000
|
||||
)
|
||||
ELASTICSEARCH_RETRY_ON_TIMEOUT: bool = Field(
|
||||
description="Whether to retry requests on timeout (default is True)", default=True
|
||||
)
|
||||
ELASTICSEARCH_MAX_RETRIES: int = Field(
|
||||
description="Maximum number of retry attempts (default is 10000)", default=10000
|
||||
)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_elasticsearch_config(self):
|
||||
"""Validate Elasticsearch configuration based on deployment type."""
|
||||
if self.ELASTICSEARCH_USE_CLOUD:
|
||||
if not self.ELASTICSEARCH_CLOUD_URL:
|
||||
raise ValueError("ELASTICSEARCH_CLOUD_URL is required when using Elastic Cloud")
|
||||
if not self.ELASTICSEARCH_API_KEY:
|
||||
raise ValueError("ELASTICSEARCH_API_KEY is required when using Elastic Cloud")
|
||||
else:
|
||||
if not self.ELASTICSEARCH_HOST:
|
||||
raise ValueError("ELASTICSEARCH_HOST is required for self-hosted Elasticsearch")
|
||||
if not self.ELASTICSEARCH_USERNAME:
|
||||
raise ValueError("ELASTICSEARCH_USERNAME is required for self-hosted Elasticsearch")
|
||||
if not self.ELASTICSEARCH_PASSWORD:
|
||||
raise ValueError("ELASTICSEARCH_PASSWORD is required for self-hosted Elasticsearch")
|
||||
|
||||
return self
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,17 +7,17 @@ class HuaweiCloudConfig(BaseSettings):
|
||||
Configuration settings for Huawei cloud search service
|
||||
"""
|
||||
|
||||
HUAWEI_CLOUD_HOSTS: Optional[str] = Field(
|
||||
HUAWEI_CLOUD_HOSTS: str | None = Field(
|
||||
description="Hostname or IP address of the Huawei cloud search service instance",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_CLOUD_USER: Optional[str] = Field(
|
||||
HUAWEI_CLOUD_USER: str | None = Field(
|
||||
description="Username for authenticating with Huawei cloud search service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_CLOUD_PASSWORD: Optional[str] = Field(
|
||||
HUAWEI_CLOUD_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with Huawei cloud search service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,27 +7,27 @@ class LindormConfig(BaseSettings):
|
||||
Lindorm configs
|
||||
"""
|
||||
|
||||
LINDORM_URL: Optional[str] = Field(
|
||||
LINDORM_URL: str | None = Field(
|
||||
description="Lindorm url",
|
||||
default=None,
|
||||
)
|
||||
LINDORM_USERNAME: Optional[str] = Field(
|
||||
LINDORM_USERNAME: str | None = Field(
|
||||
description="Lindorm user",
|
||||
default=None,
|
||||
)
|
||||
LINDORM_PASSWORD: Optional[str] = Field(
|
||||
LINDORM_PASSWORD: str | None = Field(
|
||||
description="Lindorm password",
|
||||
default=None,
|
||||
)
|
||||
DEFAULT_INDEX_TYPE: Optional[str] = Field(
|
||||
LINDORM_INDEX_TYPE: str | None = Field(
|
||||
description="Lindorm Vector Index Type, hnsw or flat is available in dify",
|
||||
default="hnsw",
|
||||
)
|
||||
DEFAULT_DISTANCE_TYPE: Optional[str] = Field(
|
||||
LINDORM_DISTANCE_TYPE: str | None = Field(
|
||||
description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2"
|
||||
)
|
||||
USING_UGC_INDEX: Optional[bool] = Field(
|
||||
description="Using UGC index will store the same type of Index in a single index but can retrieve separately.",
|
||||
default=False,
|
||||
LINDORM_USING_UGC: bool | None = Field(
|
||||
description="Using UGC index will store indexes with the same IndexType/Dimension in a single big index.",
|
||||
default=True,
|
||||
)
|
||||
LINDORM_QUERY_TIMEOUT: Optional[float] = Field(description="The lindorm search request timeout (s)", default=2.0)
|
||||
LINDORM_QUERY_TIMEOUT: float | None = Field(description="The lindorm search request timeout (s)", default=2.0)
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class MatrixoneConfig(BaseModel):
|
||||
class MatrixoneConfig(BaseSettings):
|
||||
"""Matrixone vector database configuration."""
|
||||
|
||||
MATRIXONE_HOST: str = Field(default="localhost", description="Host address of the Matrixone server")
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,22 +7,22 @@ class MilvusConfig(BaseSettings):
|
||||
Configuration settings for Milvus vector database
|
||||
"""
|
||||
|
||||
MILVUS_URI: Optional[str] = Field(
|
||||
MILVUS_URI: str | None = Field(
|
||||
description="URI for connecting to the Milvus server (e.g., 'http://localhost:19530' or 'https://milvus-instance.example.com:19530')",
|
||||
default="http://127.0.0.1:19530",
|
||||
)
|
||||
|
||||
MILVUS_TOKEN: Optional[str] = Field(
|
||||
MILVUS_TOKEN: str | None = Field(
|
||||
description="Authentication token for Milvus, if token-based authentication is enabled",
|
||||
default=None,
|
||||
)
|
||||
|
||||
MILVUS_USER: Optional[str] = Field(
|
||||
MILVUS_USER: str | None = Field(
|
||||
description="Username for authenticating with Milvus, if username/password authentication is enabled",
|
||||
default=None,
|
||||
)
|
||||
|
||||
MILVUS_PASSWORD: Optional[str] = Field(
|
||||
MILVUS_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with Milvus, if username/password authentication is enabled",
|
||||
default=None,
|
||||
)
|
||||
@ -40,7 +38,7 @@ class MilvusConfig(BaseSettings):
|
||||
default=True,
|
||||
)
|
||||
|
||||
MILVUS_ANALYZER_PARAMS: Optional[str] = Field(
|
||||
MILVUS_ANALYZER_PARAMS: str | None = Field(
|
||||
description='Milvus text analyzer parameters, e.g., {"type": "chinese"} for Chinese segmentation support.',
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,27 +7,27 @@ class OceanBaseVectorConfig(BaseSettings):
|
||||
Configuration settings for OceanBase Vector database
|
||||
"""
|
||||
|
||||
OCEANBASE_VECTOR_HOST: Optional[str] = Field(
|
||||
OCEANBASE_VECTOR_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the OceanBase Vector server (e.g. 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCEANBASE_VECTOR_PORT: Optional[PositiveInt] = Field(
|
||||
OCEANBASE_VECTOR_PORT: PositiveInt | None = Field(
|
||||
description="Port number on which the OceanBase Vector server is listening (default is 2881)",
|
||||
default=2881,
|
||||
)
|
||||
|
||||
OCEANBASE_VECTOR_USER: Optional[str] = Field(
|
||||
OCEANBASE_VECTOR_USER: str | None = Field(
|
||||
description="Username for authenticating with the OceanBase Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCEANBASE_VECTOR_PASSWORD: Optional[str] = Field(
|
||||
OCEANBASE_VECTOR_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the OceanBase Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCEANBASE_VECTOR_DATABASE: Optional[str] = Field(
|
||||
OCEANBASE_VECTOR_DATABASE: str | None = Field(
|
||||
description="Name of the OceanBase Vector database to connect to",
|
||||
default=None,
|
||||
)
|
||||
@ -39,3 +37,15 @@ class OceanBaseVectorConfig(BaseSettings):
|
||||
"with older versions",
|
||||
default=False,
|
||||
)
|
||||
|
||||
OCEANBASE_FULLTEXT_PARSER: str | None = Field(
|
||||
description=(
|
||||
"Fulltext parser to use for text indexing. "
|
||||
"Built-in options: 'ngram' (N-gram tokenizer for English/numbers), "
|
||||
"'beng' (Basic English tokenizer), 'space' (Space-based tokenizer), "
|
||||
"'ngram2' (Improved N-gram tokenizer), 'ik' (Chinese tokenizer). "
|
||||
"External plugins (require installation): 'japanese_ftparser' (Japanese tokenizer), "
|
||||
"'thai_ftparser' (Thai tokenizer). Default is 'ik'"
|
||||
),
|
||||
default="ik",
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,7 +7,7 @@ class OpenGaussConfig(BaseSettings):
|
||||
Configuration settings for OpenGauss
|
||||
"""
|
||||
|
||||
OPENGAUSS_HOST: Optional[str] = Field(
|
||||
OPENGAUSS_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the OpenGauss server(e.g., 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
@ -19,17 +17,17 @@ class OpenGaussConfig(BaseSettings):
|
||||
default=6600,
|
||||
)
|
||||
|
||||
OPENGAUSS_USER: Optional[str] = Field(
|
||||
OPENGAUSS_USER: str | None = Field(
|
||||
description="Username for authenticating with the OpenGauss database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENGAUSS_PASSWORD: Optional[str] = Field(
|
||||
OPENGAUSS_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the OpenGauss database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENGAUSS_DATABASE: Optional[str] = Field(
|
||||
OPENGAUSS_DATABASE: str | None = Field(
|
||||
description="Name of the OpenGauss database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import enum
|
||||
from typing import Literal, Optional
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
@ -10,7 +10,7 @@ class OpenSearchConfig(BaseSettings):
|
||||
Configuration settings for OpenSearch
|
||||
"""
|
||||
|
||||
class AuthMethod(enum.StrEnum):
|
||||
class AuthMethod(Enum):
|
||||
"""
|
||||
Authentication method for OpenSearch
|
||||
"""
|
||||
@ -18,7 +18,7 @@ class OpenSearchConfig(BaseSettings):
|
||||
BASIC = "basic"
|
||||
AWS_MANAGED_IAM = "aws_managed_iam"
|
||||
|
||||
OPENSEARCH_HOST: Optional[str] = Field(
|
||||
OPENSEARCH_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')",
|
||||
default=None,
|
||||
)
|
||||
@ -43,21 +43,21 @@ class OpenSearchConfig(BaseSettings):
|
||||
default=AuthMethod.BASIC,
|
||||
)
|
||||
|
||||
OPENSEARCH_USER: Optional[str] = Field(
|
||||
OPENSEARCH_USER: str | None = Field(
|
||||
description="Username for authenticating with OpenSearch",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENSEARCH_PASSWORD: Optional[str] = Field(
|
||||
OPENSEARCH_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with OpenSearch",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENSEARCH_AWS_REGION: Optional[str] = Field(
|
||||
OPENSEARCH_AWS_REGION: str | None = Field(
|
||||
description="AWS region for OpenSearch (e.g. 'us-west-2')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENSEARCH_AWS_SERVICE: Optional[Literal["es", "aoss"]] = Field(
|
||||
OPENSEARCH_AWS_SERVICE: Literal["es", "aoss"] | None = Field(
|
||||
description="AWS service for OpenSearch (e.g. 'aoss' for OpenSearch Serverless)", default=None
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,33 +7,33 @@ class OracleConfig(BaseSettings):
|
||||
Configuration settings for Oracle database
|
||||
"""
|
||||
|
||||
ORACLE_USER: Optional[str] = Field(
|
||||
ORACLE_USER: str | None = Field(
|
||||
description="Username for authenticating with the Oracle database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_PASSWORD: Optional[str] = Field(
|
||||
ORACLE_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the Oracle database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_DSN: Optional[str] = Field(
|
||||
ORACLE_DSN: str | None = Field(
|
||||
description="Oracle database connection string. For traditional database, use format 'host:port/service_name'. "
|
||||
"For autonomous database, use the service name from tnsnames.ora in the wallet",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_CONFIG_DIR: Optional[str] = Field(
|
||||
ORACLE_CONFIG_DIR: str | None = Field(
|
||||
description="Directory containing the tnsnames.ora configuration file. Only used in thin mode connection",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_WALLET_LOCATION: Optional[str] = Field(
|
||||
ORACLE_WALLET_LOCATION: str | None = Field(
|
||||
description="Oracle wallet directory path containing the wallet files for secure connection",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_WALLET_PASSWORD: Optional[str] = Field(
|
||||
ORACLE_WALLET_PASSWORD: str | None = Field(
|
||||
description="Password to decrypt the Oracle wallet, if it is encrypted",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,7 +7,7 @@ class PGVectorConfig(BaseSettings):
|
||||
Configuration settings for PGVector (PostgreSQL with vector extension)
|
||||
"""
|
||||
|
||||
PGVECTOR_HOST: Optional[str] = Field(
|
||||
PGVECTOR_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the PostgreSQL server with PGVector extension (e.g., 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
@ -19,17 +17,17 @@ class PGVectorConfig(BaseSettings):
|
||||
default=5433,
|
||||
)
|
||||
|
||||
PGVECTOR_USER: Optional[str] = Field(
|
||||
PGVECTOR_USER: str | None = Field(
|
||||
description="Username for authenticating with the PostgreSQL database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTOR_PASSWORD: Optional[str] = Field(
|
||||
PGVECTOR_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the PostgreSQL database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTOR_DATABASE: Optional[str] = Field(
|
||||
PGVECTOR_DATABASE: str | None = Field(
|
||||
description="Name of the PostgreSQL database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,7 +7,7 @@ class PGVectoRSConfig(BaseSettings):
|
||||
Configuration settings for PGVecto.RS (Rust-based vector extension for PostgreSQL)
|
||||
"""
|
||||
|
||||
PGVECTO_RS_HOST: Optional[str] = Field(
|
||||
PGVECTO_RS_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the PostgreSQL server with PGVecto.RS extension (e.g., 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
@ -19,17 +17,17 @@ class PGVectoRSConfig(BaseSettings):
|
||||
default=5431,
|
||||
)
|
||||
|
||||
PGVECTO_RS_USER: Optional[str] = Field(
|
||||
PGVECTO_RS_USER: str | None = Field(
|
||||
description="Username for authenticating with the PostgreSQL database using PGVecto.RS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTO_RS_PASSWORD: Optional[str] = Field(
|
||||
PGVECTO_RS_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the PostgreSQL database using PGVecto.RS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTO_RS_DATABASE: Optional[str] = Field(
|
||||
PGVECTO_RS_DATABASE: str | None = Field(
|
||||
description="Name of the PostgreSQL database with PGVecto.RS extension to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,12 +7,12 @@ class QdrantConfig(BaseSettings):
|
||||
Configuration settings for Qdrant vector database
|
||||
"""
|
||||
|
||||
QDRANT_URL: Optional[str] = Field(
|
||||
QDRANT_URL: str | None = Field(
|
||||
description="URL of the Qdrant server (e.g., 'http://localhost:6333' or 'https://qdrant.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
QDRANT_API_KEY: Optional[str] = Field(
|
||||
QDRANT_API_KEY: str | None = Field(
|
||||
description="API key for authenticating with the Qdrant server",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,7 +7,7 @@ class RelytConfig(BaseSettings):
|
||||
Configuration settings for Relyt database
|
||||
"""
|
||||
|
||||
RELYT_HOST: Optional[str] = Field(
|
||||
RELYT_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the Relyt server (e.g., 'localhost' or 'relyt.example.com')",
|
||||
default=None,
|
||||
)
|
||||
@ -19,17 +17,17 @@ class RelytConfig(BaseSettings):
|
||||
default=9200,
|
||||
)
|
||||
|
||||
RELYT_USER: Optional[str] = Field(
|
||||
RELYT_USER: str | None = Field(
|
||||
description="Username for authenticating with the Relyt database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RELYT_PASSWORD: Optional[str] = Field(
|
||||
RELYT_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the Relyt database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RELYT_DATABASE: Optional[str] = Field(
|
||||
RELYT_DATABASE: str | None = Field(
|
||||
description="Name of the Relyt database to connect to (default is 'default')",
|
||||
default="default",
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,22 +7,27 @@ class TableStoreConfig(BaseSettings):
|
||||
Configuration settings for TableStore.
|
||||
"""
|
||||
|
||||
TABLESTORE_ENDPOINT: Optional[str] = Field(
|
||||
TABLESTORE_ENDPOINT: str | None = Field(
|
||||
description="Endpoint address of the TableStore server (e.g. 'https://instance-name.cn-hangzhou.ots.aliyuncs.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TABLESTORE_INSTANCE_NAME: Optional[str] = Field(
|
||||
TABLESTORE_INSTANCE_NAME: str | None = Field(
|
||||
description="Instance name to access TableStore server (eg. 'instance-name')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TABLESTORE_ACCESS_KEY_ID: Optional[str] = Field(
|
||||
TABLESTORE_ACCESS_KEY_ID: str | None = Field(
|
||||
description="AccessKey id for the instance name",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TABLESTORE_ACCESS_KEY_SECRET: Optional[str] = Field(
|
||||
TABLESTORE_ACCESS_KEY_SECRET: str | None = Field(
|
||||
description="AccessKey secret for the instance name",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE: bool = Field(
|
||||
description="Whether to normalize full-text search scores to [0, 1]",
|
||||
default=False,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,12 +7,12 @@ class TencentVectorDBConfig(BaseSettings):
|
||||
Configuration settings for Tencent Vector Database
|
||||
"""
|
||||
|
||||
TENCENT_VECTOR_DB_URL: Optional[str] = Field(
|
||||
TENCENT_VECTOR_DB_URL: str | None = Field(
|
||||
description="URL of the Tencent Vector Database service (e.g., 'https://vectordb.tencentcloudapi.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field(
|
||||
TENCENT_VECTOR_DB_API_KEY: str | None = Field(
|
||||
description="API key for authenticating with the Tencent Vector Database service",
|
||||
default=None,
|
||||
)
|
||||
@ -24,12 +22,12 @@ class TencentVectorDBConfig(BaseSettings):
|
||||
default=30,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
|
||||
TENCENT_VECTOR_DB_USERNAME: str | None = Field(
|
||||
description="Username for authenticating with the Tencent Vector Database (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field(
|
||||
TENCENT_VECTOR_DB_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the Tencent Vector Database (if required)",
|
||||
default=None,
|
||||
)
|
||||
@ -44,7 +42,7 @@ class TencentVectorDBConfig(BaseSettings):
|
||||
default=2,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
|
||||
TENCENT_VECTOR_DB_DATABASE: str | None = Field(
|
||||
description="Name of the specific Tencent Vector Database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,12 +7,12 @@ class TidbOnQdrantConfig(BaseSettings):
|
||||
Tidb on Qdrant configs
|
||||
"""
|
||||
|
||||
TIDB_ON_QDRANT_URL: Optional[str] = Field(
|
||||
TIDB_ON_QDRANT_URL: str | None = Field(
|
||||
description="Tidb on Qdrant url",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_ON_QDRANT_API_KEY: Optional[str] = Field(
|
||||
TIDB_ON_QDRANT_API_KEY: str | None = Field(
|
||||
description="Tidb on Qdrant api key",
|
||||
default=None,
|
||||
)
|
||||
@ -34,37 +32,37 @@ class TidbOnQdrantConfig(BaseSettings):
|
||||
default=6334,
|
||||
)
|
||||
|
||||
TIDB_PUBLIC_KEY: Optional[str] = Field(
|
||||
TIDB_PUBLIC_KEY: str | None = Field(
|
||||
description="Tidb account public key",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_PRIVATE_KEY: Optional[str] = Field(
|
||||
TIDB_PRIVATE_KEY: str | None = Field(
|
||||
description="Tidb account private key",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_API_URL: Optional[str] = Field(
|
||||
TIDB_API_URL: str | None = Field(
|
||||
description="Tidb API url",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_IAM_API_URL: Optional[str] = Field(
|
||||
TIDB_IAM_API_URL: str | None = Field(
|
||||
description="Tidb IAM API url",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_REGION: Optional[str] = Field(
|
||||
TIDB_REGION: str | None = Field(
|
||||
description="Tidb serverless region",
|
||||
default="regions/aws-us-east-1",
|
||||
)
|
||||
|
||||
TIDB_PROJECT_ID: Optional[str] = Field(
|
||||
TIDB_PROJECT_ID: str | None = Field(
|
||||
description="Tidb project id",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_SPEND_LIMIT: Optional[int] = Field(
|
||||
TIDB_SPEND_LIMIT: int | None = Field(
|
||||
description="Tidb spend limit",
|
||||
default=100,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,27 +7,27 @@ class TiDBVectorConfig(BaseSettings):
|
||||
Configuration settings for TiDB Vector database
|
||||
"""
|
||||
|
||||
TIDB_VECTOR_HOST: Optional[str] = Field(
|
||||
TIDB_VECTOR_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the TiDB Vector server (e.g., 'localhost' or 'tidb.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_PORT: Optional[PositiveInt] = Field(
|
||||
TIDB_VECTOR_PORT: PositiveInt | None = Field(
|
||||
description="Port number on which the TiDB Vector server is listening (default is 4000)",
|
||||
default=4000,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_USER: Optional[str] = Field(
|
||||
TIDB_VECTOR_USER: str | None = Field(
|
||||
description="Username for authenticating with the TiDB Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_PASSWORD: Optional[str] = Field(
|
||||
TIDB_VECTOR_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the TiDB Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_DATABASE: Optional[str] = Field(
|
||||
TIDB_VECTOR_DATABASE: str | None = Field(
|
||||
description="Name of the TiDB Vector database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,12 +7,12 @@ class UpstashConfig(BaseSettings):
|
||||
Configuration settings for Upstash vector database
|
||||
"""
|
||||
|
||||
UPSTASH_VECTOR_URL: Optional[str] = Field(
|
||||
UPSTASH_VECTOR_URL: str | None = Field(
|
||||
description="URL of the upstash server (e.g., 'https://vector.upstash.io')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
UPSTASH_VECTOR_TOKEN: Optional[str] = Field(
|
||||
UPSTASH_VECTOR_TOKEN: str | None = Field(
|
||||
description="Token for authenticating with the upstash server",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,7 +7,7 @@ class VastbaseVectorConfig(BaseSettings):
|
||||
Configuration settings for Vector (Vastbase with vector extension)
|
||||
"""
|
||||
|
||||
VASTBASE_HOST: Optional[str] = Field(
|
||||
VASTBASE_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the Vastbase server with Vector extension (e.g., 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
@ -19,17 +17,17 @@ class VastbaseVectorConfig(BaseSettings):
|
||||
default=5432,
|
||||
)
|
||||
|
||||
VASTBASE_USER: Optional[str] = Field(
|
||||
VASTBASE_USER: str | None = Field(
|
||||
description="Username for authenticating with the Vastbase database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VASTBASE_PASSWORD: Optional[str] = Field(
|
||||
VASTBASE_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the Vastbase database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VASTBASE_DATABASE: Optional[str] = Field(
|
||||
VASTBASE_DATABASE: str | None = Field(
|
||||
description="Name of the Vastbase database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -11,14 +9,14 @@ class VikingDBConfig(BaseSettings):
|
||||
https://www.volcengine.com/docs/6291/65568
|
||||
"""
|
||||
|
||||
VIKINGDB_ACCESS_KEY: Optional[str] = Field(
|
||||
VIKINGDB_ACCESS_KEY: str | None = Field(
|
||||
description="The Access Key provided by Volcengine VikingDB for API authentication."
|
||||
"Refer to the following documentation for details on obtaining credentials:"
|
||||
"https://www.volcengine.com/docs/6291/65568",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VIKINGDB_SECRET_KEY: Optional[str] = Field(
|
||||
VIKINGDB_SECRET_KEY: str | None = Field(
|
||||
description="The Secret Key provided by Volcengine VikingDB for API authentication.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
@ -9,12 +7,12 @@ class WeaviateConfig(BaseSettings):
|
||||
Configuration settings for Weaviate vector database
|
||||
"""
|
||||
|
||||
WEAVIATE_ENDPOINT: Optional[str] = Field(
|
||||
WEAVIATE_ENDPOINT: str | None = Field(
|
||||
description="URL of the Weaviate server (e.g., 'http://localhost:8080' or 'https://weaviate.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
WEAVIATE_API_KEY: Optional[str] = Field(
|
||||
WEAVIATE_API_KEY: str | None = Field(
|
||||
description="API key for authenticating with the Weaviate server",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
from pydantic import Field
|
||||
|
||||
from configs.packaging.pyproject import PyProjectConfig, PyProjectTomlConfig
|
||||
from configs.packaging.pyproject import PyProjectTomlConfig
|
||||
|
||||
|
||||
class PackagingInfo(PyProjectTomlConfig):
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
from collections.abc import Mapping
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic.fields import FieldInfo
|
||||
@ -15,22 +15,22 @@ class ApolloSettingsSourceInfo(BaseSettings):
|
||||
Packaging build information
|
||||
"""
|
||||
|
||||
APOLLO_APP_ID: Optional[str] = Field(
|
||||
APOLLO_APP_ID: str | None = Field(
|
||||
description="apollo app_id",
|
||||
default=None,
|
||||
)
|
||||
|
||||
APOLLO_CLUSTER: Optional[str] = Field(
|
||||
APOLLO_CLUSTER: str | None = Field(
|
||||
description="apollo cluster",
|
||||
default=None,
|
||||
)
|
||||
|
||||
APOLLO_CONFIG_URL: Optional[str] = Field(
|
||||
APOLLO_CONFIG_URL: str | None = Field(
|
||||
description="apollo config url",
|
||||
default=None,
|
||||
)
|
||||
|
||||
APOLLO_NAMESPACE: Optional[str] = Field(
|
||||
APOLLO_NAMESPACE: str | None = Field(
|
||||
description="apollo namespace",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@ -4,8 +4,9 @@ import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from collections.abc import Mapping
|
||||
from collections.abc import Callable, Mapping
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from .python_3x import http_request, makedirs_wrapper
|
||||
from .utils import (
|
||||
@ -25,13 +26,13 @@ logger = logging.getLogger(__name__)
|
||||
class ApolloClient:
|
||||
def __init__(
|
||||
self,
|
||||
config_url,
|
||||
app_id,
|
||||
cluster="default",
|
||||
secret="",
|
||||
start_hot_update=True,
|
||||
change_listener=None,
|
||||
_notification_map=None,
|
||||
config_url: str,
|
||||
app_id: str,
|
||||
cluster: str = "default",
|
||||
secret: str = "",
|
||||
start_hot_update: bool = True,
|
||||
change_listener: Callable[[str, str, str, Any], None] | None = None,
|
||||
_notification_map: dict[str, int] | None = None,
|
||||
):
|
||||
# Core routing parameters
|
||||
self.config_url = config_url
|
||||
@ -47,17 +48,17 @@ class ApolloClient:
|
||||
# Private control variables
|
||||
self._cycle_time = 5
|
||||
self._stopping = False
|
||||
self._cache = {}
|
||||
self._no_key = {}
|
||||
self._hash = {}
|
||||
self._cache: dict[str, dict[str, Any]] = {}
|
||||
self._no_key: dict[str, str] = {}
|
||||
self._hash: dict[str, str] = {}
|
||||
self._pull_timeout = 75
|
||||
self._cache_file_path = os.path.expanduser("~") + "/.dify/config/remote-settings/apollo/cache/"
|
||||
self._long_poll_thread = None
|
||||
self._long_poll_thread: threading.Thread | None = None
|
||||
self._change_listener = change_listener # "add" "delete" "update"
|
||||
if _notification_map is None:
|
||||
_notification_map = {"application": -1}
|
||||
self._notification_map = _notification_map
|
||||
self.last_release_key = None
|
||||
self.last_release_key: str | None = None
|
||||
# Private startup method
|
||||
self._path_checker()
|
||||
if start_hot_update:
|
||||
@ -68,7 +69,7 @@ class ApolloClient:
|
||||
heartbeat.daemon = True
|
||||
heartbeat.start()
|
||||
|
||||
def get_json_from_net(self, namespace="application"):
|
||||
def get_json_from_net(self, namespace: str = "application") -> dict[str, Any] | None:
|
||||
url = "{}/configs/{}/{}/{}?releaseKey={}&ip={}".format(
|
||||
self.config_url, self.app_id, self.cluster, namespace, "", self.ip
|
||||
)
|
||||
@ -76,7 +77,7 @@ class ApolloClient:
|
||||
code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
|
||||
if code == 200:
|
||||
if not body:
|
||||
logger.error(f"get_json_from_net load configs failed, body is {body}")
|
||||
logger.error("get_json_from_net load configs failed, body is %s", body)
|
||||
return None
|
||||
data = json.loads(body)
|
||||
data = data["configurations"]
|
||||
@ -88,7 +89,7 @@ class ApolloClient:
|
||||
logger.exception("an error occurred in get_json_from_net")
|
||||
return None
|
||||
|
||||
def get_value(self, key, default_val=None, namespace="application"):
|
||||
def get_value(self, key: str, default_val: Any = None, namespace: str = "application") -> Any:
|
||||
try:
|
||||
# read memory configuration
|
||||
namespace_cache = self._cache.get(namespace)
|
||||
@ -104,7 +105,8 @@ class ApolloClient:
|
||||
namespace_data = self.get_json_from_net(namespace)
|
||||
val = get_value_from_dict(namespace_data, key)
|
||||
if val is not None:
|
||||
self._update_cache_and_file(namespace_data, namespace)
|
||||
if namespace_data is not None:
|
||||
self._update_cache_and_file(namespace_data, namespace)
|
||||
return val
|
||||
|
||||
# read the file configuration
|
||||
@ -126,23 +128,23 @@ class ApolloClient:
|
||||
# to ensure the real-time correctness of the function call.
|
||||
# If the user does not have the same default val twice
|
||||
# and the default val is used here, there may be a problem.
|
||||
def _set_local_cache_none(self, namespace, key):
|
||||
def _set_local_cache_none(self, namespace: str, key: str) -> None:
|
||||
no_key = no_key_cache_key(namespace, key)
|
||||
self._no_key[no_key] = key
|
||||
|
||||
def _start_hot_update(self):
|
||||
def _start_hot_update(self) -> None:
|
||||
self._long_poll_thread = threading.Thread(target=self._listener)
|
||||
# When the asynchronous thread is started, the daemon thread will automatically exit
|
||||
# when the main thread is launched.
|
||||
self._long_poll_thread.daemon = True
|
||||
self._long_poll_thread.start()
|
||||
|
||||
def stop(self):
|
||||
def stop(self) -> None:
|
||||
self._stopping = True
|
||||
logger.info("Stopping listener...")
|
||||
|
||||
# Call the set callback function, and if it is abnormal, try it out
|
||||
def _call_listener(self, namespace, old_kv, new_kv):
|
||||
def _call_listener(self, namespace: str, old_kv: dict[str, Any] | None, new_kv: dict[str, Any] | None) -> None:
|
||||
if self._change_listener is None:
|
||||
return
|
||||
if old_kv is None:
|
||||
@ -168,12 +170,12 @@ class ApolloClient:
|
||||
except BaseException as e:
|
||||
logger.warning(str(e))
|
||||
|
||||
def _path_checker(self):
|
||||
def _path_checker(self) -> None:
|
||||
if not os.path.isdir(self._cache_file_path):
|
||||
makedirs_wrapper(self._cache_file_path)
|
||||
|
||||
# update the local cache and file cache
|
||||
def _update_cache_and_file(self, namespace_data, namespace="application"):
|
||||
def _update_cache_and_file(self, namespace_data: dict[str, Any], namespace: str = "application") -> None:
|
||||
# update the local cache
|
||||
self._cache[namespace] = namespace_data
|
||||
# update the file cache
|
||||
@ -187,7 +189,7 @@ class ApolloClient:
|
||||
self._hash[namespace] = new_hash
|
||||
|
||||
# get the configuration from the local file
|
||||
def _get_local_cache(self, namespace="application"):
|
||||
def _get_local_cache(self, namespace: str = "application") -> dict[str, Any]:
|
||||
cache_file_path = os.path.join(self._cache_file_path, f"{self.app_id}_configuration_{namespace}.txt")
|
||||
if os.path.isfile(cache_file_path):
|
||||
with open(cache_file_path) as f:
|
||||
@ -195,8 +197,8 @@ class ApolloClient:
|
||||
return result
|
||||
return {}
|
||||
|
||||
def _long_poll(self):
|
||||
notifications = []
|
||||
def _long_poll(self) -> None:
|
||||
notifications: list[dict[str, Any]] = []
|
||||
for key in self._cache:
|
||||
namespace_data = self._cache[key]
|
||||
notification_id = -1
|
||||
@ -207,7 +209,7 @@ class ApolloClient:
|
||||
# if the length is 0 it is returned directly
|
||||
if len(notifications) == 0:
|
||||
return
|
||||
url = "{}/notifications/v2".format(self.config_url)
|
||||
url = f"{self.config_url}/notifications/v2"
|
||||
params = {
|
||||
"appId": self.app_id,
|
||||
"cluster": self.cluster,
|
||||
@ -222,7 +224,7 @@ class ApolloClient:
|
||||
return
|
||||
if http_code == 200:
|
||||
if not body:
|
||||
logger.error(f"_long_poll load configs failed,body is {body}")
|
||||
logger.error("_long_poll load configs failed,body is %s", body)
|
||||
return
|
||||
data = json.loads(body)
|
||||
for entry in data:
|
||||
@ -236,7 +238,7 @@ class ApolloClient:
|
||||
except Exception as e:
|
||||
logger.warning(str(e))
|
||||
|
||||
def _get_net_and_set_local(self, namespace, n_id, call_change=False):
|
||||
def _get_net_and_set_local(self, namespace: str, n_id: int, call_change: bool = False) -> None:
|
||||
namespace_data = self.get_json_from_net(namespace)
|
||||
if not namespace_data:
|
||||
return
|
||||
@ -248,7 +250,7 @@ class ApolloClient:
|
||||
new_kv = namespace_data.get(CONFIGURATIONS)
|
||||
self._call_listener(namespace, old_kv, new_kv)
|
||||
|
||||
def _listener(self):
|
||||
def _listener(self) -> None:
|
||||
logger.info("start long_poll")
|
||||
while not self._stopping:
|
||||
self._long_poll()
|
||||
@ -266,19 +268,19 @@ class ApolloClient:
|
||||
headers["Timestamp"] = time_unix_now
|
||||
return headers
|
||||
|
||||
def _heart_beat(self):
|
||||
def _heart_beat(self) -> None:
|
||||
while not self._stopping:
|
||||
for namespace in self._notification_map:
|
||||
self._do_heart_beat(namespace)
|
||||
time.sleep(60 * 10) # 10 minutes
|
||||
|
||||
def _do_heart_beat(self, namespace):
|
||||
url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip)
|
||||
def _do_heart_beat(self, namespace: str) -> None:
|
||||
url = f"{self.config_url}/configs/{self.app_id}/{self.cluster}/{namespace}?ip={self.ip}"
|
||||
try:
|
||||
code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
|
||||
if code == 200:
|
||||
if not body:
|
||||
logger.error(f"_do_heart_beat load configs failed,body is {body}")
|
||||
logger.error("_do_heart_beat load configs failed,body is %s", body)
|
||||
return None
|
||||
data = json.loads(body)
|
||||
if self.last_release_key == data["releaseKey"]:
|
||||
@ -292,7 +294,7 @@ class ApolloClient:
|
||||
logger.exception("an error occurred in _do_heart_beat")
|
||||
return None
|
||||
|
||||
def get_all_dicts(self, namespace):
|
||||
def get_all_dicts(self, namespace: str) -> dict[str, Any] | None:
|
||||
namespace_data = self._cache.get(namespace)
|
||||
if namespace_data is None:
|
||||
net_namespace_data = self.get_json_from_net(namespace)
|
||||
|
||||
@ -2,6 +2,8 @@ import logging
|
||||
import os
|
||||
import ssl
|
||||
import urllib.request
|
||||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
from urllib import parse
|
||||
from urllib.error import HTTPError
|
||||
|
||||
@ -19,9 +21,9 @@ urllib.request.install_opener(opener)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def http_request(url, timeout, headers={}):
|
||||
def http_request(url: str, timeout: int | float, headers: Mapping[str, str] = {}) -> tuple[int, str | None]:
|
||||
try:
|
||||
request = urllib.request.Request(url, headers=headers)
|
||||
request = urllib.request.Request(url, headers=dict(headers))
|
||||
res = urllib.request.urlopen(request, timeout=timeout)
|
||||
body = res.read().decode("utf-8")
|
||||
return res.code, body
|
||||
@ -33,9 +35,9 @@ def http_request(url, timeout, headers={}):
|
||||
raise e
|
||||
|
||||
|
||||
def url_encode(params):
|
||||
def url_encode(params: dict[str, Any]) -> str:
|
||||
return parse.urlencode(params)
|
||||
|
||||
|
||||
def makedirs_wrapper(path):
|
||||
def makedirs_wrapper(path: str) -> None:
|
||||
os.makedirs(path, exist_ok=True)
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import hashlib
|
||||
import socket
|
||||
from typing import Any
|
||||
|
||||
from .python_3x import url_encode
|
||||
|
||||
@ -10,7 +11,7 @@ NAMESPACE_NAME = "namespaceName"
|
||||
|
||||
|
||||
# add timestamps uris and keys
|
||||
def signature(timestamp, uri, secret):
|
||||
def signature(timestamp: str, uri: str, secret: str) -> str:
|
||||
import base64
|
||||
import hmac
|
||||
|
||||
@ -19,16 +20,16 @@ def signature(timestamp, uri, secret):
|
||||
return base64.b64encode(hmac_code).decode()
|
||||
|
||||
|
||||
def url_encode_wrapper(params):
|
||||
def url_encode_wrapper(params: dict[str, Any]) -> str:
|
||||
return url_encode(params)
|
||||
|
||||
|
||||
def no_key_cache_key(namespace, key):
|
||||
return "{}{}{}".format(namespace, len(namespace), key)
|
||||
def no_key_cache_key(namespace: str, key: str) -> str:
|
||||
return f"{namespace}{len(namespace)}{key}"
|
||||
|
||||
|
||||
# Returns whether the obtained value is obtained, and None if it does not
|
||||
def get_value_from_dict(namespace_cache, key):
|
||||
def get_value_from_dict(namespace_cache: dict[str, Any] | None, key: str) -> Any:
|
||||
if namespace_cache:
|
||||
kv_data = namespace_cache.get(CONFIGURATIONS)
|
||||
if kv_data is None:
|
||||
@ -38,7 +39,7 @@ def get_value_from_dict(namespace_cache, key):
|
||||
return None
|
||||
|
||||
|
||||
def init_ip():
|
||||
def init_ip() -> str:
|
||||
ip = ""
|
||||
s = None
|
||||
try:
|
||||
|
||||
@ -11,5 +11,5 @@ class RemoteSettingsSource:
|
||||
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
||||
raise NotImplementedError
|
||||
|
||||
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any:
|
||||
def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool):
|
||||
return value
|
||||
|
||||
@ -11,16 +11,16 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
from configs.remote_settings_sources.base import RemoteSettingsSource
|
||||
|
||||
from .utils import _parse_config
|
||||
from .utils import parse_config
|
||||
|
||||
|
||||
class NacosSettingsSource(RemoteSettingsSource):
|
||||
def __init__(self, configs: Mapping[str, Any]):
|
||||
self.configs = configs
|
||||
self.remote_configs: dict[str, Any] = {}
|
||||
self.remote_configs: dict[str, str] = {}
|
||||
self.async_init()
|
||||
|
||||
def async_init(self):
|
||||
def async_init(self) -> None:
|
||||
data_id = os.getenv("DIFY_ENV_NACOS_DATA_ID", "dify-api-env.properties")
|
||||
group = os.getenv("DIFY_ENV_NACOS_GROUP", "nacos-dify")
|
||||
tenant = os.getenv("DIFY_ENV_NACOS_NAMESPACE", "")
|
||||
@ -29,22 +29,19 @@ class NacosSettingsSource(RemoteSettingsSource):
|
||||
try:
|
||||
content = NacosHttpClient().http_request("/nacos/v1/cs/configs", method="GET", headers={}, params=params)
|
||||
self.remote_configs = self._parse_config(content)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
logger.exception("[get-access-token] exception occurred")
|
||||
raise
|
||||
|
||||
def _parse_config(self, content: str) -> dict:
|
||||
def _parse_config(self, content: str) -> dict[str, str]:
|
||||
if not content:
|
||||
return {}
|
||||
try:
|
||||
return _parse_config(self, content)
|
||||
return parse_config(content)
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Failed to parse config: {e}")
|
||||
|
||||
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
|
||||
if not isinstance(self.remote_configs, dict):
|
||||
raise ValueError(f"remote configs is not dict, but {type(self.remote_configs)}")
|
||||
|
||||
field_value = self.remote_configs.get(field_name)
|
||||
if field_value is None:
|
||||
return None, field_name, False
|
||||
|
||||
@ -5,7 +5,7 @@ import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
import requests
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -17,20 +17,26 @@ class NacosHttpClient:
|
||||
self.ak = os.getenv("DIFY_ENV_NACOS_ACCESS_KEY")
|
||||
self.sk = os.getenv("DIFY_ENV_NACOS_SECRET_KEY")
|
||||
self.server = os.getenv("DIFY_ENV_NACOS_SERVER_ADDR", "localhost:8848")
|
||||
self.token = None
|
||||
self.token: str | None = None
|
||||
self.token_ttl = 18000
|
||||
self.token_expire_time: float = 0
|
||||
|
||||
def http_request(self, url, method="GET", headers=None, params=None):
|
||||
def http_request(
|
||||
self, url: str, method: str = "GET", headers: dict[str, str] | None = None, params: dict[str, str] | None = None
|
||||
) -> str:
|
||||
if headers is None:
|
||||
headers = {}
|
||||
if params is None:
|
||||
params = {}
|
||||
try:
|
||||
self._inject_auth_info(headers, params)
|
||||
response = requests.request(method, url="http://" + self.server + url, headers=headers, params=params)
|
||||
response = httpx.request(method, url="http://" + self.server + url, headers=headers, params=params)
|
||||
response.raise_for_status()
|
||||
return response.text
|
||||
except requests.exceptions.RequestException as e:
|
||||
except httpx.RequestError as e:
|
||||
return f"Request to Nacos failed: {e}"
|
||||
|
||||
def _inject_auth_info(self, headers, params, module="config"):
|
||||
def _inject_auth_info(self, headers: dict[str, str], params: dict[str, str], module: str = "config") -> None:
|
||||
headers.update({"User-Agent": "Nacos-Http-Client-In-Dify:v0.0.1"})
|
||||
|
||||
if module == "login":
|
||||
@ -45,16 +51,17 @@ class NacosHttpClient:
|
||||
headers["timeStamp"] = ts
|
||||
if self.username and self.password:
|
||||
self.get_access_token(force_refresh=False)
|
||||
params["accessToken"] = self.token
|
||||
if self.token is not None:
|
||||
params["accessToken"] = self.token
|
||||
|
||||
def __do_sign(self, sign_str, sk):
|
||||
def __do_sign(self, sign_str: str, sk: str) -> str:
|
||||
return (
|
||||
base64.encodebytes(hmac.new(sk.encode(), sign_str.encode(), digestmod=hashlib.sha1).digest())
|
||||
.decode()
|
||||
.strip()
|
||||
)
|
||||
|
||||
def get_sign_str(self, group, tenant, ts):
|
||||
def get_sign_str(self, group: str, tenant: str, ts: str) -> str:
|
||||
sign_str = ""
|
||||
if tenant:
|
||||
sign_str = tenant + "+"
|
||||
@ -63,7 +70,7 @@ class NacosHttpClient:
|
||||
sign_str += ts # Directly concatenate ts without conditional checks, because the nacos auth header forced it.
|
||||
return sign_str
|
||||
|
||||
def get_access_token(self, force_refresh=False):
|
||||
def get_access_token(self, force_refresh: bool = False) -> str | None:
|
||||
current_time = time.time()
|
||||
if self.token and not force_refresh and self.token_expire_time > current_time:
|
||||
return self.token
|
||||
@ -71,12 +78,13 @@ class NacosHttpClient:
|
||||
params = {"username": self.username, "password": self.password}
|
||||
url = "http://" + self.server + "/nacos/v1/auth/login"
|
||||
try:
|
||||
resp = requests.request("POST", url, headers=None, params=params)
|
||||
resp = httpx.request("POST", url, headers=None, params=params)
|
||||
resp.raise_for_status()
|
||||
response_data = resp.json()
|
||||
self.token = response_data.get("accessToken")
|
||||
self.token_ttl = response_data.get("tokenTtl", 18000)
|
||||
self.token_expire_time = current_time + self.token_ttl - 10
|
||||
except Exception as e:
|
||||
return self.token
|
||||
except Exception:
|
||||
logger.exception("[get-access-token] exception occur")
|
||||
raise
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
def _parse_config(self, content: str) -> dict[str, str]:
|
||||
def parse_config(content: str) -> dict[str, str]:
|
||||
config: dict[str, str] = {}
|
||||
if not content:
|
||||
return config
|
||||
|
||||
@ -9,21 +9,21 @@ DEFAULT_FILE_NUMBER_LIMITS = 3
|
||||
IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"]
|
||||
IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS])
|
||||
|
||||
VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "mpga"]
|
||||
VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "webm"]
|
||||
VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS])
|
||||
|
||||
AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "webm", "amr"]
|
||||
AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "amr", "mpga"]
|
||||
AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
|
||||
|
||||
|
||||
_doc_extensions: list[str]
|
||||
if dify_config.ETL_TYPE == "Unstructured":
|
||||
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"]
|
||||
DOCUMENT_EXTENSIONS.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
|
||||
_doc_extensions = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "vtt", "properties"]
|
||||
_doc_extensions.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
|
||||
if dify_config.UNSTRUCTURED_API_URL:
|
||||
DOCUMENT_EXTENSIONS.append("ppt")
|
||||
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
|
||||
_doc_extensions.append("ppt")
|
||||
else:
|
||||
DOCUMENT_EXTENSIONS = [
|
||||
_doc_extensions = [
|
||||
"txt",
|
||||
"markdown",
|
||||
"md",
|
||||
@ -38,4 +38,4 @@ else:
|
||||
"vtt",
|
||||
"properties",
|
||||
]
|
||||
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
|
||||
DOCUMENT_EXTENSIONS = _doc_extensions + [ext.upper() for ext in _doc_extensions]
|
||||
|
||||
@ -19,6 +19,7 @@ language_timezone_mapping = {
|
||||
"fa-IR": "Asia/Tehran",
|
||||
"sl-SI": "Europe/Ljubljana",
|
||||
"th-TH": "Asia/Bangkok",
|
||||
"id-ID": "Asia/Jakarta",
|
||||
}
|
||||
|
||||
languages = list(language_timezone_mapping.keys())
|
||||
@ -28,5 +29,5 @@ def supported_language(lang):
|
||||
if lang in languages:
|
||||
return lang
|
||||
|
||||
error = "{lang} is not a valid language.".format(lang=lang)
|
||||
error = f"{lang} is not a valid language."
|
||||
raise ValueError(error)
|
||||
|
||||
@ -7,7 +7,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
|
||||
# workflow default mode
|
||||
AppMode.WORKFLOW: {
|
||||
"app": {
|
||||
"mode": AppMode.WORKFLOW.value,
|
||||
"mode": AppMode.WORKFLOW,
|
||||
"enable_site": True,
|
||||
"enable_api": True,
|
||||
}
|
||||
@ -15,7 +15,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
|
||||
# completion default mode
|
||||
AppMode.COMPLETION: {
|
||||
"app": {
|
||||
"mode": AppMode.COMPLETION.value,
|
||||
"mode": AppMode.COMPLETION,
|
||||
"enable_site": True,
|
||||
"enable_api": True,
|
||||
},
|
||||
@ -44,7 +44,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
|
||||
# chat default mode
|
||||
AppMode.CHAT: {
|
||||
"app": {
|
||||
"mode": AppMode.CHAT.value,
|
||||
"mode": AppMode.CHAT,
|
||||
"enable_site": True,
|
||||
"enable_api": True,
|
||||
},
|
||||
@ -60,7 +60,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
|
||||
# advanced-chat default mode
|
||||
AppMode.ADVANCED_CHAT: {
|
||||
"app": {
|
||||
"mode": AppMode.ADVANCED_CHAT.value,
|
||||
"mode": AppMode.ADVANCED_CHAT,
|
||||
"enable_site": True,
|
||||
"enable_api": True,
|
||||
},
|
||||
@ -68,7 +68,7 @@ default_app_templates: Mapping[AppMode, Mapping] = {
|
||||
# agent-chat default mode
|
||||
AppMode.AGENT_CHAT: {
|
||||
"app": {
|
||||
"mode": AppMode.AGENT_CHAT.value,
|
||||
"mode": AppMode.AGENT_CHAT,
|
||||
"enable_site": True,
|
||||
"enable_api": True,
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user