Compare commits

..

272 Commits

Author SHA1 Message Date
f4154fb7d9 fix: tool info 2025-02-25 12:49:52 +08:00
b0a804e22f fix: gemini model info 2025-02-25 10:29:51 +08:00
ce545274a6 refactor(tool-engine): Optimize tool engine response handling (#14216)
Co-authored-by: xudong2.zhang <xudong2.zhang@zkh.com>
Co-authored-by: crazywoola <427733928@qq.com>
2025-02-25 09:58:43 +08:00
aa6c951e8c chore: compatible with es5 (#14268) 2025-02-25 09:46:54 +08:00
c4f4dfc3fb Fixed: The use of default parameters in API interfaces (#14138) 2025-02-25 09:43:36 +08:00
548f6ef2b6 fix: incorrect score in the chroma vector (#14273) 2025-02-25 09:40:22 +08:00
b15ff4eb8c fix: datasets documents update-by-file api missing assign field 'indexing_technique' internally (#14243) 2025-02-24 20:14:36 +08:00
7790214620 chore: Fix typos in simplified and traditional Chinese in i18n files (#14228) 2025-02-24 11:32:09 +08:00
3942e45cab fix: update refresh logic for plugin list to avoid redundant request and fix model provider list update issue in settings (#14152) 2025-02-24 10:49:43 +08:00
2ace9ae4e4 fix: add responsive layout for file uploader in datasets (#14159) 2025-02-23 19:35:10 +08:00
5ac0ef6253 Retain previous page's search params (#14176) 2025-02-23 13:47:03 +08:00
f552667312 chore: Simplify the unchanged overrided method in VariableAggregatorNode (#14175) 2025-02-23 13:46:48 +08:00
5669a18bd8 fix: typo in README_FR (#14179) (#14180) 2025-02-22 09:28:03 +08:00
a97d73ab05 Update iteration_node.py to fix #14098: Token count increases exponen… (#14100) 2025-02-21 09:15:46 +08:00
252d2c425b feat(docker): add PM2_INSTANCES variable and update entrypoint script… (#14083) 2025-02-20 17:30:19 +08:00
09fc4bba61 fix: agent tools setting (#14097) 2025-02-20 17:20:23 +08:00
79d4db8541 fix: support selecting yaml extension when importing dsl file (#14088) 2025-02-20 15:40:43 +08:00
9c42626772 fix: fix chunk and segment detail components (#14002) 2025-02-20 15:13:43 +08:00
bbfe83c86b Fix/upgrade btn show logic (#14072) 2025-02-20 14:31:56 +08:00
55aa4e424a fix: quota less than zero show error (#14080) 2025-02-20 14:04:13 +08:00
8015f5c0c5 Fix ko-KR/workflow.ts (#14075) 2025-02-20 13:49:04 +08:00
f3fe14863d fix: marketplace search url (#14061) 2025-02-20 10:00:04 +08:00
d96c368660 fix: frontend for <think> tags conflicting with original <details> tags (#14039) 2025-02-19 22:04:11 +08:00
3f34b8b0d1 fix: remove duplicated code (#14047)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-02-19 22:03:24 +08:00
6a58ea9e56 chore(docker): Back to version 0.15.3 (#14042)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-19 20:09:14 +08:00
23888398d1 refactor: Simplify plugin and provider ID generation logic and deduplicate plugin_ids (#14041) 2025-02-19 20:05:01 +08:00
bfbc5eb91e fix: Prevent plugin installation with non-existent plugin IDs (#14038) 2025-02-19 19:37:09 +08:00
98b0d4169e fix workspace model list (#14033) 2025-02-19 18:18:25 +08:00
356cd271b2 fix: MessageLogModal closed unexpectedly (#13617) 2025-02-19 17:32:54 +08:00
baf7561cf8 fix: There is an error in the Japanese in the Japanese documentation (#14026) 2025-02-19 17:29:11 +08:00
b09f22961c fix #13573 (#13843)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-02-19 17:28:32 +08:00
f3ad3a5dfd fix: "today" statistics are not displayed in chart view (#13854) 2025-02-19 17:28:10 +08:00
ee49d321c5 fix(workflow): correct edge type mapping typo (#13988) 2025-02-19 17:27:26 +08:00
3467ad3d02 fix: build error 2 (#14003) 2025-02-19 14:00:33 +08:00
6741604027 fix: build web image error (#14000) 2025-02-19 13:42:56 +08:00
35312cf96c fix: remove run unit test ci to ensure build successfully (#13999) 2025-02-19 12:51:35 +08:00
15f028fe59 fix: build web image fail (#13996) 2025-02-19 12:35:16 +08:00
8a2301af56 fix: fix build web image install problem (#13994) 2025-02-19 11:52:36 +08:00
66747a8eef fix: some GitHub run action problem (#13991) 2025-02-19 11:35:19 +08:00
19d413ac1e feat: date and time picker (#13985) 2025-02-19 10:56:18 +08:00
eux
4a332ff1af fix: update the en-US i18n for logAndAnn (#13902) 2025-02-19 09:15:11 +08:00
dc942db52f chore: remove duplicate import statements (#13959) 2025-02-19 09:14:32 +08:00
f535a2aa71 chore: prompt_message is actually assistant_message which is a bit am… (#13839)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-02-19 09:14:10 +08:00
dfdd6dfa20 fix: change the config name and fix typo in description of the number of retrieval executors (#13856) 2025-02-19 09:13:36 +08:00
2af81d1ee3 chore: translate i18n files (#13795)
Co-authored-by: douxc <7553076+douxc@users.noreply.github.com>
2025-02-19 09:13:26 +08:00
ece25bce1a fix: LLM may not return <think> tag, cause thinking time keep increase (#13962) 2025-02-18 21:39:01 +08:00
6fc234183a chore: replace marketplace search api (#13963) 2025-02-18 21:37:11 +08:00
15a56f705f fix: get tool provider (#13958) 2025-02-18 20:18:36 +08:00
899f7e125f Fix/add or update model credentials (#13952) 2025-02-18 20:00:39 +08:00
aa19bb3f30 fix session close issue (#13946) 2025-02-18 19:29:57 +08:00
562852a0ae fix: web test not install pnpm before use (#13931) 2025-02-18 18:18:37 +08:00
a4b992c1ab fix: web style workflow checkout error (#13929) 2025-02-18 18:18:20 +08:00
3460c1dfbd fix: tool id (#13932) 2025-02-18 18:17:41 +08:00
653f6c2d46 fix: fetch configured model providers (#13924) 2025-02-18 17:43:39 +08:00
ed7851a4b3 fix: plugin tool icon (#13918) 2025-02-18 16:54:14 +08:00
cb841e5cde fix: plugins install task (#13899) 2025-02-18 15:20:26 +08:00
4dae0e514e fix: ignore plugin already exists (#13888) 2025-02-18 13:22:39 +08:00
363c46ace8 fix: add missing package xinference_client to pass vdb CI tests (#13865) 2025-02-17 23:37:49 +08:00
abe5aca3e2 Retrieval service optimization (#13849) 2025-02-17 18:22:36 +08:00
bea10b4356 fix: undefined attribute 'query' on MessageAnnotation (#13852) 2025-02-17 18:16:45 +08:00
f5f83f1924 fix: markdown merge error (#13853) 2025-02-17 18:11:07 +08:00
403e2d58b9 Introduce Plugins (#13836)
Signed-off-by: yihong0618 <zouzou0208@gmail.com>
Signed-off-by: -LAN- <laipz8200@outlook.com>
Signed-off-by: xhe <xw897002528@gmail.com>
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: takatost <takatost@gmail.com>
Co-authored-by: kurokobo <kuro664@gmail.com>
Co-authored-by: Novice Lee <novicelee@NoviPro.local>
Co-authored-by: zxhlyh <jasonapring2015@outlook.com>
Co-authored-by: AkaraChen <akarachen@outlook.com>
Co-authored-by: Yi <yxiaoisme@gmail.com>
Co-authored-by: Joel <iamjoel007@gmail.com>
Co-authored-by: JzoNg <jzongcode@gmail.com>
Co-authored-by: twwu <twwu@dify.ai>
Co-authored-by: Hiroshi Fujita <fujita-h@users.noreply.github.com>
Co-authored-by: AkaraChen <85140972+AkaraChen@users.noreply.github.com>
Co-authored-by: NFish <douxc512@gmail.com>
Co-authored-by: Wu Tianwei <30284043+WTW0313@users.noreply.github.com>
Co-authored-by: 非法操作 <hjlarry@163.com>
Co-authored-by: Novice <857526207@qq.com>
Co-authored-by: Hiroki Nagai <82458324+nagaihiroki-git@users.noreply.github.com>
Co-authored-by: Gen Sato <52241300+halogen22@users.noreply.github.com>
Co-authored-by: eux <euxuuu@gmail.com>
Co-authored-by: huangzhuo1949 <167434202+huangzhuo1949@users.noreply.github.com>
Co-authored-by: huangzhuo <huangzhuo1@xiaomi.com>
Co-authored-by: lotsik <lotsik@mail.ru>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: nite-knite <nkCoding@gmail.com>
Co-authored-by: Jyong <76649700+JohnJyong@users.noreply.github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: gakkiyomi <gakkiyomi@aliyun.com>
Co-authored-by: CN-P5 <heibai2006@gmail.com>
Co-authored-by: CN-P5 <heibai2006@qq.com>
Co-authored-by: Chuehnone <1897025+chuehnone@users.noreply.github.com>
Co-authored-by: yihong <zouzou0208@gmail.com>
Co-authored-by: Kevin9703 <51311316+Kevin9703@users.noreply.github.com>
Co-authored-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: Boris Feld <lothiraldan@gmail.com>
Co-authored-by: mbo <himabo@gmail.com>
Co-authored-by: mabo <mabo@aeyes.ai>
Co-authored-by: Warren Chen <warren.chen830@gmail.com>
Co-authored-by: JzoNgKVO <27049666+JzoNgKVO@users.noreply.github.com>
Co-authored-by: jiandanfeng <chenjh3@wangsu.com>
Co-authored-by: zhu-an <70234959+xhdd123321@users.noreply.github.com>
Co-authored-by: zhaoqingyu.1075 <zhaoqingyu.1075@bytedance.com>
Co-authored-by: 海狸大師 <86974027+yenslife@users.noreply.github.com>
Co-authored-by: Xu Song <xusong.vip@gmail.com>
Co-authored-by: rayshaw001 <396301947@163.com>
Co-authored-by: Ding Jiatong <dingjiatong@gmail.com>
Co-authored-by: Bowen Liang <liangbowen@gf.com.cn>
Co-authored-by: JasonVV <jasonwangiii@outlook.com>
Co-authored-by: le0zh <newlight@qq.com>
Co-authored-by: zhuxinliang <zhuxinliang@didiglobal.com>
Co-authored-by: k-zaku <zaku99@outlook.jp>
Co-authored-by: luckylhb90 <luckylhb90@gmail.com>
Co-authored-by: hobo.l <hobo.l@binance.com>
Co-authored-by: jiangbo721 <365065261@qq.com>
Co-authored-by: 刘江波 <jiangbo721@163.com>
Co-authored-by: Shun Miyazawa <34241526+miya@users.noreply.github.com>
Co-authored-by: EricPan <30651140+Egfly@users.noreply.github.com>
Co-authored-by: crazywoola <427733928@qq.com>
Co-authored-by: sino <sino2322@gmail.com>
Co-authored-by: Jhvcc <37662342+Jhvcc@users.noreply.github.com>
Co-authored-by: lowell <lowell.hu@zkteco.in>
Co-authored-by: Boris Polonsky <BorisPolonsky@users.noreply.github.com>
Co-authored-by: Ademílson Tonato <ademilsonft@outlook.com>
Co-authored-by: Ademílson Tonato <ademilson.tonato@refurbed.com>
Co-authored-by: IWAI, Masaharu <iwaim.sub@gmail.com>
Co-authored-by: Yueh-Po Peng (Yabi) <94939112+y10ab1@users.noreply.github.com>
Co-authored-by: Jason <ggbbddjm@gmail.com>
Co-authored-by: Xin Zhang <sjhpzx@gmail.com>
Co-authored-by: yjc980121 <3898524+yjc980121@users.noreply.github.com>
Co-authored-by: heyszt <36215648+hieheihei@users.noreply.github.com>
Co-authored-by: Abdullah AlOsaimi <osaimiacc@gmail.com>
Co-authored-by: Abdullah AlOsaimi <189027247+osaimi@users.noreply.github.com>
Co-authored-by: Yingchun Lai <laiyingchun@apache.org>
Co-authored-by: Hash Brown <hi@xzd.me>
Co-authored-by: zuodongxu <192560071+zuodongxu@users.noreply.github.com>
Co-authored-by: Masashi Tomooka <tmokmss@users.noreply.github.com>
Co-authored-by: aplio <ryo.091219@gmail.com>
Co-authored-by: Obada Khalili <54270856+obadakhalili@users.noreply.github.com>
Co-authored-by: Nam Vu <zuzoovn@gmail.com>
Co-authored-by: Kei YAMAZAKI <1715090+kei-yamazaki@users.noreply.github.com>
Co-authored-by: TechnoHouse <13776377+deephbz@users.noreply.github.com>
Co-authored-by: Riddhimaan-Senapati <114703025+Riddhimaan-Senapati@users.noreply.github.com>
Co-authored-by: MaFee921 <31881301+2284730142@users.noreply.github.com>
Co-authored-by: te-chan <t-nakanome@sakura-is.co.jp>
Co-authored-by: HQidea <HQidea@users.noreply.github.com>
Co-authored-by: Joshbly <36315710+Joshbly@users.noreply.github.com>
Co-authored-by: xhe <xw897002528@gmail.com>
Co-authored-by: weiwenyan-dev <154779315+weiwenyan-dev@users.noreply.github.com>
Co-authored-by: ex_wenyan.wei <ex_wenyan.wei@tcl.com>
Co-authored-by: engchina <12236799+engchina@users.noreply.github.com>
Co-authored-by: engchina <atjapan2015@gmail.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: 呆萌闷油瓶 <253605712@qq.com>
Co-authored-by: Kemal <kemalmeler@outlook.com>
Co-authored-by: Lazy_Frog <4590648+lazyFrogLOL@users.noreply.github.com>
Co-authored-by: Yi Xiao <54782454+YIXIAO0@users.noreply.github.com>
Co-authored-by: Steven sun <98230804+Tuyohai@users.noreply.github.com>
Co-authored-by: steven <sunzwj@digitalchina.com>
Co-authored-by: Kalo Chin <91766386+fdb02983rhy@users.noreply.github.com>
Co-authored-by: Katy Tao <34019945+KatyTao@users.noreply.github.com>
Co-authored-by: depy <42985524+h4ckdepy@users.noreply.github.com>
Co-authored-by: 胡春东 <gycm520@gmail.com>
Co-authored-by: Junjie.M <118170653@qq.com>
Co-authored-by: MuYu <mr.muzea@gmail.com>
Co-authored-by: Naoki Takashima <39912547+takatea@users.noreply.github.com>
Co-authored-by: Summer-Gu <37869445+gubinjie@users.noreply.github.com>
Co-authored-by: Fei He <droxer.he@gmail.com>
Co-authored-by: ybalbert001 <120714773+ybalbert001@users.noreply.github.com>
Co-authored-by: Yuanbo Li <ybalbert@amazon.com>
Co-authored-by: douxc <7553076+douxc@users.noreply.github.com>
Co-authored-by: liuzhenghua <1090179900@qq.com>
Co-authored-by: Wu Jiayang <62842862+Wu-Jiayang@users.noreply.github.com>
Co-authored-by: Your Name <you@example.com>
Co-authored-by: kimjion <45935338+kimjion@users.noreply.github.com>
Co-authored-by: AugNSo <song.tiankai@icloud.com>
Co-authored-by: llinvokerl <38915183+llinvokerl@users.noreply.github.com>
Co-authored-by: liusurong.lsr <liusurong.lsr@alibaba-inc.com>
Co-authored-by: Vasu Negi <vasu-negi@users.noreply.github.com>
Co-authored-by: Hundredwz <1808096180@qq.com>
Co-authored-by: Xiyuan Chen <52963600+GareArc@users.noreply.github.com>
2025-02-17 17:05:13 +08:00
222df44d21 Retrieval Service efficiency optimization (#13543) 2025-02-17 14:09:57 +08:00
566e548713 fix: update trial expire time to 3/17 (#13796) 2025-02-17 10:02:21 +08:00
1434d54e7a Revert "Feat: compliance report download" (#13799) 2025-02-17 09:58:56 +08:00
4229d0f9a7 Revert "Feat/compliance" (#13798) 2025-02-16 20:58:25 -05:00
7f9eb35e1f Feat: compliance report download (#13282) 2025-02-17 09:43:41 +08:00
ed7d7a74ea Feat/compliance (#13548) 2025-02-16 20:31:52 -05:00
035e54ba4d fix: add install a package to improve the accuracy of guessing mime type and file extension (main) (#13752) 2025-02-16 21:39:40 +08:00
284707c3a8 perf(message): optimize message loading and reduce SQL queries (#13720) 2025-02-15 12:19:01 +08:00
1f63028a83 fix: reranking_enable setting failed #13668 (#13721) 2025-02-14 17:42:09 +08:00
8a0aa91ed7 Non-Streaming Models Do Not Return Results Properly in _handle_invoke_result (#13571)
Co-authored-by: crazywoola <427733928@qq.com>
2025-02-14 17:02:04 +08:00
62079991b7 fix:Knowledge Base with Parent-Child segment mode not support in Agent (#13663) 2025-02-14 14:34:59 +08:00
4e7e172ff3 Chore/format code (#13691)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-02-14 13:38:17 +08:00
33a565a719 perf: Implemented short-circuit evaluation for logical conditions (#13674)
Co-authored-by: liusurong.lsr <liusurong.lsr@alibaba-inc.com>
2025-02-13 19:35:03 +08:00
f0b9257387 fix: error in obtaining end_to_node_id during conditional parallel execution (#13673) 2025-02-13 18:00:28 +08:00
c398c9cb6a chore:Remove duplicate code, lines 8 to 27, same as lines 29 & 45 to 62. (#13659)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-02-13 14:51:38 +08:00
a3d3e30e3a fix: fix tongyi models blocking mode with incremental_output=stream (#13620) 2025-02-13 10:24:05 +08:00
2b86465d4c fix document extractor node incorrectly processing doc and ppt files (#12902) 2025-02-12 18:04:28 +08:00
6529240da6 fix: no longer using old app detail cover when switch pathname (#13585) 2025-02-12 15:02:11 +08:00
0751ad1eeb feat(vdb): add HNSW vector index for TiDB vector store with TiFlash (#12043) 2025-02-12 13:53:51 +08:00
786550bdc9 fix: changed topics/keywords to topic/keywords (#13544) 2025-02-12 09:15:15 +08:00
bde756a1ab chore:Remove useless brackets and format code (#13479)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-02-11 22:05:29 +08:00
423fb2d7bc Ensure the 'inputs' field in /chat-messages takes effect every time (#7955)
Co-authored-by: Your Name <you@example.com>
Co-authored-by: -LAN- <laipz8200@outlook.com>
2025-02-11 18:44:56 +08:00
f96b4f287a fix: iteration node log time error (#13511) 2025-02-11 16:35:21 +08:00
c00e7d3f65 fix: retry log running error (#13472)
Co-authored-by: Novice Lee <novicelee@NoviPro.local>
2025-02-11 15:48:55 +08:00
1f38d4846b fix: issue #13483 and #13434 (#13518)
Signed-off-by: yihong0618 <zouzou0208@gmail.com>
2025-02-11 12:45:49 +08:00
47a64610ca Fix the issue of repeated escaping of quotes in hit test (#13477) 2025-02-11 09:58:31 +08:00
f0a845f0f9 fix: removed LLM output from the main README (#13504) 2025-02-11 09:09:07 +08:00
abec23118d feat: add support for X-Forwarded-Port in ProxyFix middleware (#13102) 2025-02-10 22:28:29 +08:00
0957119550 fix: update UTC time format for consistency (#13471) 2025-02-10 19:37:50 +08:00
f48fa3e4e8 chore: translate i18n files (#13452)
Co-authored-by: douxc <7553076+douxc@users.noreply.github.com>
2025-02-10 14:14:15 +08:00
5ffc58d6ca feat: improve think content display (#13431) 2025-02-10 14:08:17 +08:00
7d958635f0 Fix/add trial expire tip time (#13464) 2025-02-10 12:53:59 +08:00
33990426c1 fix: add ids in FetchDatasetsParams (#13459) 2025-02-10 12:28:36 +08:00
9f3fc7ebf8 ci: make ci safe using zizmor (#13397)
Signed-off-by: yihong0618 <zouzou0208@gmail.com>
2025-02-10 12:26:08 +08:00
c8357da13b [Fix] Sagemaker LLM Provider can't adjust context size, it'a always 2… (#13462)
Co-authored-by: Yuanbo Li <ybalbert@amazon.com>
2025-02-10 12:25:04 +08:00
2290f14fb1 feat: add tooltip if user's anthropic trial quota still available (#13418) 2025-02-10 10:44:20 +08:00
7796984444 Fix: Removed model params except max_token for deepseek r1 in volcengine (#13446) 2025-02-10 10:26:26 +08:00
75113c26c6 Feat : add deepseek support for tongyi (#13445) 2025-02-10 10:26:03 +08:00
xhe
939a9ecd21 chore: use the wrap thinking api for volcengine (#13432)
Signed-off-by: xhe <xw897002528@gmail.com>
2025-02-10 10:25:07 +08:00
f307c7cd88 feat: Docker adds SSRF-related timeout settings (#13395) 2025-02-10 10:21:31 +08:00
33ecceb90c Feat: add comparison table to main readme (#13435) 2025-02-10 10:13:46 +08:00
e0d1cab079 fix: add missed background color to iteration node (#13448) 2025-02-10 10:04:56 +08:00
811d72a727 feat: added a _position.yaml for vertex ai provider (#13367) 2025-02-09 10:29:07 +08:00
c3c575c2e1 Fix: model selector UI hover issue (#13396) 2025-02-09 10:24:57 +08:00
c189629eca Fix(i18n): Refine zh-Hant workflow translations (#13421) 2025-02-09 10:24:45 +08:00
37117c22d4 feat(model): support Gemini 2.0 Flash Lite Preview model (02-05) in Google's model provider (#13399) 2025-02-09 10:22:33 +08:00
b05e9d2ab4 feat: update backend documentation (#13374) 2025-02-08 20:36:33 +08:00
0451333990 fix(settings): add notClearable prop to language selection (#13406) 2025-02-08 20:36:23 +08:00
ab2e6c19a4 Fixes #13415 reset model-provider-page form value use schema.default (#13416) 2025-02-08 20:34:52 +08:00
f7959bc887 fix(chatbot): update button class to include text color for better visibility (#13411) 2025-02-08 20:34:37 +08:00
45874c699d Nitpick/fix typos in document (#13413) 2025-02-08 20:33:45 +08:00
286cdc41ab reasoning model unified think tag is <think></think> (#13392)
Co-authored-by: crazywoola <427733928@qq.com>
2025-02-08 16:19:41 +08:00
78708eb5d5 fix: merge conflict between #11301 and #11885 (#13391) 2025-02-08 14:38:10 +08:00
cf36745770 fix(workflow_tool): enable File parameter support after workflow is published as a tool (#13175) 2025-02-08 12:30:00 +08:00
6622c7f98d fix: Fix HTTP request node non 443 port SSL site inaccessible (#13376) 2025-02-08 12:00:45 +08:00
3112b74527 fix: build failed due to getPrevChatList no longer exists (#13383) 2025-02-08 11:59:02 +08:00
b3ae6b634f feat: add pan and zoom support for MiniMap (#13382) 2025-02-08 11:57:41 +08:00
982bca5d40 fix: add rate limiting to prevent brute force on password reset (#13292) 2025-02-08 10:28:31 +08:00
c8dcde6cd0 fix: Gemini 2.0 Flash 001 model yaml file naming (#13372) 2025-02-08 09:12:42 +08:00
8f9db61688 feat: added new silicon flow models (#13369) 2025-02-08 09:12:22 +08:00
ebdbaf34e6 chore: translate i18n files (#13349)
Co-authored-by: JzoNgKVO <27049666+JzoNgKVO@users.noreply.github.com>
2025-02-07 22:41:25 +08:00
a081b1e79e fix: add compatibility config for third-party S3-compatible providers (#13354)
Co-authored-by: zhaoqingyu.1075 <zhaoqingyu.1075@bytedance.com>
2025-02-07 22:35:24 +08:00
38c31e64db add enable_search parameter to qwen_max, plus, turbo (#13335)
Co-authored-by: steven <sunzwj@digitalchina.com>
2025-02-07 22:16:26 +08:00
ae6f67420c Chore: update app detail panel (#13337) 2025-02-07 18:56:43 +08:00
ca19bd31d4 chore(*): Bump version to 0.15.3 (#13308)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-07 15:20:05 +08:00
413dfd5628 feat: add completion mode and context size options for LLM configuration (#13325)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-07 15:08:53 +08:00
f9515901cc fix: Azure AI Foundry model cannot be used in the workflow (#13323)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-07 14:52:57 +08:00
3f42fabff8 chore:improve thinking display for llm from xinference and ollama pro… (#13318) 2025-02-07 14:29:29 +08:00
1caa578771 chore(*): Update style of thinking (#13319)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-07 14:06:35 +08:00
b7c11c1818 Fix the problem of Workflow terminates after parallel tasks execution, merge node not triggered (#12498)
Co-authored-by: Novice Lee <novicelee@NoviPro.local>
2025-02-07 13:56:08 +08:00
3eb3db0663 chore: refactor the OpenAICompatible and improve thinking display (#13299) 2025-02-07 13:28:46 +08:00
be46f32056 fix(credits): require model name equals (#13314)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-07 13:28:17 +08:00
6e5c915f96 feat(model): add deepseek-r1 for openrouter (#13312) 2025-02-07 12:39:13 +08:00
04d13a8116 feat(credits): Allow to configure model-credit mapping (#13274)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-07 11:01:31 +08:00
e638ede3f2 Update README_TR.md (#13294) 2025-02-07 09:11:39 +08:00
2348abe4bf feat: added a couple of models not defined in vertex ai, that were already … (#13296) 2025-02-07 09:11:25 +08:00
f7e7a399d9 feat:add think tag display for xinference deepseek r1 (#13291) 2025-02-06 22:04:58 +08:00
ba91f34636 fix: incorrect transferMethod assignment for remote file (#13286) 2025-02-06 19:32:21 +08:00
16865d43a8 feat: add deepseek models for volcengine provider (#13283)
Co-authored-by: zhaoqingyu.1075 <zhaoqingyu.1075@bytedance.com>
2025-02-06 18:20:03 +08:00
0d13aee15c feat:add deepseek r1 think display for ollama provider (#13272) 2025-02-06 15:32:10 +08:00
49b4144ffd fix: add dataset edit permissions (#13223) 2025-02-06 14:26:16 +08:00
186e2d972e chore(deps): bump katex from 0.16.10 to 0.16.21 in /web (#13270)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-06 13:27:07 +08:00
40dd63ecef Upgrade oracle models (#13174)
Co-authored-by: engchina <atjapan2015@gmail.com>
2025-02-06 13:24:27 +08:00
6d66d6da15 feat(model_providers): Support deepseek-r1 for Nvidia Catalog (#13269)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-06 13:03:19 +08:00
03ec3513f3 Fix bug large data no render (#12683)
Co-authored-by: ex_wenyan.wei <ex_wenyan.wei@tcl.com>
2025-02-06 13:00:04 +08:00
87763fc234 feat(model_providers): Support deepseek for Azure AI Foundry (#13267)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-06 12:45:48 +08:00
f6c44cae2e feat(model): add gemini-2.0 model (#13266) 2025-02-06 12:28:59 +08:00
xhe
da2ee04fce fix: correct linewrap think display in generic openai api (#13260)
Signed-off-by: xhe <xw897002528@gmail.com>
2025-02-06 10:53:08 +08:00
7673c36af3 feat(model): add gemini-2.0-flash-thinking-exp-01-21 (#13230) 2025-02-06 10:01:00 +08:00
9457b2af2f feat: added models :gemini 2.0 flash 001 and gemini 2.0 pro exp 02-05 (#13247) 2025-02-06 09:58:39 +08:00
7203991032 feat: add parameter "reasoning_effort" and Openai o3-mini (#13243) 2025-02-06 09:29:48 +08:00
xhe
5a685f7156 feat: add think display for volcengine and generic openapi (#13234)
Signed-off-by: xhe <xw897002528@gmail.com>
2025-02-06 09:24:40 +08:00
a6a25030ad fix: updated _position.yaml to include the latest model already integ… (#13245) 2025-02-06 09:21:51 +08:00
00458a31d5 feat: added deepseek r1 and v3 to siliconflow (#13238) 2025-02-05 21:59:18 +08:00
c6ddf6d6cc feat(model_providers): Add Groq DeepSeek-R1-Distill-Llama-70b (#13229)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-05 19:15:29 +08:00
34b21b3065 feat: Add o3-mini and o3-mini-2025-01-31 model variants (#13129)
Co-authored-by: crazywoola <427733928@qq.com>
2025-02-05 17:04:45 +08:00
8fbb355cd2 chore: squash system dependencies installation steps (#13206) 2025-02-05 16:42:53 +08:00
e8b3b7e578 Fix new variables in the conversation opener would override prompt_variables (#13191) 2025-02-05 16:16:00 +08:00
59ca44f493 chore(model_runtime): Move deepseek ahead in the providers list. (#13197)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-05 16:08:28 +08:00
9e1457c2c3 fix: mypy checks violation in AzureBlobStorage (#13215) 2025-02-05 15:56:23 +08:00
fac83e14bc Use DefaultAzureCredential for managed identity in azure blob extention (#11559) 2025-02-05 13:43:43 +08:00
a97cec57e4 fix: SSRF proxy file descriptor leak in concurrent requests (#13108) 2025-02-05 13:10:27 +08:00
38c10b47d3 Feat: add linkedin to readme (#13203) 2025-02-05 12:27:58 +08:00
1a2523fd15 feat: bedrock_endpoint_url (#12838) 2025-02-05 12:24:24 +08:00
03243cb422 Modify params for bedrock retrieve generate (#13182) 2025-02-05 12:17:42 +08:00
2ad7ee0344 chore: add tests for build docker image when dockerfile changed (#10732) 2025-02-05 11:40:22 +08:00
55ce3618ce fix: Dollar Sign Handling in Markdown (#13178)
Co-authored-by: crazywoola <427733928@qq.com>
2025-02-05 11:00:56 +08:00
e9e34c1ab2 Install apt dependencies using bookworm source, consistent with base image. Remove unnecessary, error-prone pins (#13176) 2025-02-05 10:07:22 +08:00
d4c916b496 chore(pyproject): Add type stubs into pyproject.toml (#13145)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-04 12:01:28 +08:00
8fbc9c9342 Solve circular dependency issue between workflow/constants.ts file and default.ts file (#13165) 2025-02-04 09:26:01 +08:00
1b6fd9dfe8 fix: set indexing technique from dataset during update-by-text (#13155) 2025-02-03 11:06:03 +08:00
304467e3f5 fix: not install libmagic raise error (#13146) 2025-02-03 11:05:20 +08:00
7452032d81 add azure openai api version 2024-12-01-preview (#13135) 2025-02-03 11:04:20 +08:00
87e2048f1b nitpick: fix small typos in template.en.mdx (#13156) 2025-02-03 11:03:11 +08:00
d876084392 chore: upgrade libldap2 (#13158) 2025-02-03 11:02:14 +08:00
840729afa5 feat: the think tag display of siliconflow's deepseek r1 (#13153) 2025-02-02 21:55:13 +08:00
941ad03f3c pass model and cost so that langfuse can show cost (#13117) 2025-02-02 15:27:27 +08:00
d73d191f99 feature. add feat to modify metadata via dataset api (#13116) 2025-02-02 15:27:12 +08:00
c2664e0283 chore: fix wrong VectorType match case (#13123) 2025-02-02 15:26:59 +08:00
ee61cede4e test(huggingface_hub): Skip the failed test temporarily. (#13142)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-02 14:47:26 +08:00
b47669b80b fix: deduct LLM quota after processing invoke result (#13075)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-02-02 12:05:11 +08:00
c0d0c63592 feat: switch to chat messages before regenerated (#11301)
Co-authored-by: zuodongxu <192560071+zuodongxu@users.noreply.github.com>
2025-01-31 13:05:10 +08:00
b09c39c8dc refactor: avoid to use extra space when finding model by name (#13043) 2025-01-30 15:08:29 +08:00
b4b09ddc3c add tongyi qwen2.5-14b/7b-instruct-1m model (#13089) 2025-01-29 11:58:01 +08:00
d0a21086bd refactor: Update Firecrawl API parameters and default settings (#13082) 2025-01-29 11:21:05 +08:00
d44882c1b5 refactor: reduce duplciate code by inheritance (#13073) 2025-01-28 10:52:01 +08:00
23c68efa2d fix: fix the formatter is not applied on log file (#12704) 2025-01-28 10:49:58 +08:00
560c5de1b7 Fixed Novita AI color and added DeepSeek R1 model (#13074) 2025-01-28 10:38:54 +08:00
5d91dbd000 Set default LOG_LEVEL to INFO for celery workers and beat (#13066)
Co-authored-by: Abdullah AlOsaimi <189027247+osaimi@users.noreply.github.com>
2025-01-27 17:09:41 +08:00
6c31ee36cd fix qwen-vl blocking mode (#13052) 2025-01-27 11:35:23 +08:00
edc29780ed fix: "Model schema not found" error only in agents (#12655) (#12760) 2025-01-27 11:33:13 +08:00
aad7e4dd1c fix:Improve MIME type detection for remote URL uploads using python-magic (#12693) 2025-01-27 11:33:03 +08:00
a6a727e8a4 feat: add inner API to create workspace without requiring email (#13021) 2025-01-26 15:36:56 +08:00
d1fc65fabc fix: adjust iteration node dark style (#13051) 2025-01-26 11:19:41 +08:00
d4be5ef9de Update Novita AI predefined models (#13045) 2025-01-26 09:25:29 +08:00
1374be5a31 fix: Unexpected tag creation when pressing enter during tag conversion (#13041) 2025-01-25 19:30:26 +08:00
b2bbc28580 support bedrock kb: retrieve and generate (#13027) 2025-01-25 17:28:06 +08:00
59b3e672aa feat: add agent thinking content display of deepseek R1 (#12949) 2025-01-24 20:13:42 +08:00
a2f8bce8f5 chore: add Japanese translation: model_providers/bedrock (#13016) 2025-01-24 18:43:33 +08:00
a2b9adb3a2 Change typo in translation (#13004) 2025-01-24 13:48:21 +08:00
28067640b5 fix: wrong zh_Hans translation: Ohio (#13006) 2025-01-24 13:41:20 +08:00
da67916843 feat: add glm-4-air-0111 (#12997)
Co-authored-by: lowell <lowell.hu@zkteco.in>
2025-01-24 10:04:46 +08:00
e54ce479ad Feat/prompt editor dark theme (#12976) 2025-01-23 16:20:00 +08:00
6024d8a42d refactor: Update Firecrawl to use v1 API (#12574)
Co-authored-by: Ademílson Tonato <ademilson.tonato@refurbed.com>
2025-01-23 11:14:48 +08:00
f565f08aa0 fix: get property of string type variable caused page crash (#12969) 2025-01-23 11:02:29 +08:00
fd4afe09f8 fix: tools translate search (#12950)
Co-authored-by: lowell <lowell.hu@zkteco.in>
2025-01-22 19:27:02 +08:00
dd0904f95c feat: add giteeAI risk control identification. (#12946) 2025-01-22 19:26:25 +08:00
4c3076f2a4 feat: add pg vector index (#12338)
Co-authored-by: huangzhuo <huangzhuo1@xiaomi.com>
2025-01-22 17:07:18 +08:00
1e73f63ff8 chore: update version to 0.15.2 in packaging and docker configurations (#12940)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-01-22 16:40:44 +08:00
d167d5b1be feat(ark): support doubao 1.5 series of models (#12935) 2025-01-22 15:25:57 +08:00
71fa14f791 fix: resolve clipboard.writeText failure under HTTP protocol (#12936) 2025-01-22 15:18:23 +08:00
8dd1873e76 feat: workflow note dark theme (#12932) 2025-01-22 14:22:33 +08:00
f91f5c7401 fix(batch_create_segment_to_index_task): count max_position in memory. (#12929) 2025-01-22 13:39:02 +08:00
c62b7cc679 chore(build): bump poetry from 1.x to 2.x (#12369) 2025-01-22 13:38:24 +08:00
3ee213ddca add milvus full text search setting (#12930) 2025-01-22 13:36:39 +08:00
8429877b02 fix: Agent is configured for ReAct inference mode, an error is reported when viewing the agent log (#12920)
Co-authored-by: crazywoola <427733928@qq.com>
2025-01-22 13:20:32 +08:00
05a0faff6a fix: app token's last_used_at can't be updated when last_used_at is null (#12770) 2025-01-22 11:01:45 +08:00
e09f6e4987 feat: support config chunk length by env (#12925) 2025-01-22 10:43:40 +08:00
e23f4b0265 feat: add gemini-2.0-flash-thinking-exp-01-21 (#12924) 2025-01-22 10:14:37 +08:00
f582d4a13e feat: Add ability to change profile avatar (#12642) 2025-01-22 10:11:31 +08:00
2f41bd495d fix:Fix a bug that returns null when the passed path is a file. (#12775)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-01-22 10:10:03 +08:00
162a8c4393 fix update segment keyword with same content (#12908) 2025-01-21 19:19:32 +08:00
3d1ce4c53f bug: fixed bedrock rerank bug (#12774)
Co-authored-by: hobo.l <hobo.l@binance.com>
2025-01-21 19:09:36 +08:00
6db3ae9b8e chore: remove webapp ga (#12909) 2025-01-21 18:38:33 +08:00
6d0cb9dc33 fix: variable panel scrollable (#12769)
Co-authored-by: zhaoqingyu.1075 <zhaoqingyu.1075@bytedance.com>
2025-01-21 17:50:42 +08:00
46e95e8309 fix: OpenAI o1 Bad Request Error (#12839) 2025-01-21 15:29:13 +08:00
a7b9375877 Update deepseek model configuration (#12899) 2025-01-21 15:28:11 +08:00
0c6a8a130e fix: external dataset hit test display issue(#12564) (#12612)
Co-authored-by: zhuxinliang <zhuxinliang@didiglobal.com>
2025-01-21 14:31:45 +08:00
9903f1e703 add deepseek-reasoner (#12898) 2025-01-21 12:40:58 +08:00
6fad719e42 chore(fix): Invalid quotes for using Array[String] in HTTP request node as JSON body (#12761) 2025-01-21 10:38:44 +08:00
9aaee8ee47 fix: Issues related to the deletion of conversation_id (#12488) (#12665) 2025-01-21 10:25:35 +08:00
166221d784 chore(lint): fix quotes for f-string formatting by bumping ruff to 0.9.x (#12702) 2025-01-21 10:12:29 +08:00
925d69a2ee feat:Support Minimax-Text-01 (#12763) 2025-01-21 10:08:53 +08:00
5ff08e241a fix: serply credential check query might return empty records (#12784) 2025-01-21 09:38:56 +08:00
3defd24087 feat: allow updating chunk settings for the existing documents (#12833) 2025-01-21 09:25:40 +08:00
9d86147d20 fix: SparkLite API Auth error (#12781) (#12790) 2025-01-20 22:21:21 +08:00
80801ac4ab fix: "parmas" spelling mistake. (#12875) 2025-01-20 22:18:30 +08:00
210926cd91 Fix suggested_question_prompt (#12738) 2025-01-20 22:16:30 +08:00
677a69deed fix(i18n): correct typo in zh-Hant translation (#12852) 2025-01-20 22:15:41 +08:00
8dfdee21ce chore: fix chinese translation for 'recall' (#12772)
Co-authored-by: zhaoqingyu.1075 <zhaoqingyu.1075@bytedance.com>
2025-01-20 22:15:26 +08:00
6ea77ab4cd fix: DeepSeek API Error with response format active (text and json_object) (#12747) 2025-01-20 22:04:18 +08:00
e3c996688d feat: enhance credential extraction logic based on configurate method (#12853) 2025-01-20 21:59:22 +08:00
bc3a570dda fix: Fix rerank model switching issue (#12721)
ok
2025-01-14 15:42:45 +08:00
0800021a2d chore: translate i18n files (#12708)
Co-authored-by: JzoNgKVO <27049666+JzoNgKVO@users.noreply.github.com>
2025-01-14 13:35:23 +08:00
435eddd867 Feat: copyright modification (#12707) 2025-01-14 10:00:57 +08:00
6e0fb055d1 chore: bump version to 0.15.1 (#12690)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-01-13 19:21:06 +08:00
eux
1e9ac7ffeb feat: add table of contents to Knowledge API doc (#12688) 2025-01-13 18:31:43 +08:00
b4873ecb43 [fix] support feature restore (#12563) 2025-01-13 18:29:06 +08:00
mbo
1859d57784 api tool support multiple env url (#12249)
Co-authored-by: mabo <mabo@aeyes.ai>
2025-01-13 17:49:30 +08:00
69d58fbb50 Add new integration with Opik Tracking tool (#11501) 2025-01-13 17:41:44 +08:00
cb34991663 fix: add type hints for App model and improve error handling in audio services (#12677)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-01-13 15:55:16 +08:00
c700364e1c fix: Update variable handling in VariableAssignerNode and clean up app_dsl_service (#12672)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-01-13 15:54:26 +08:00
9a6b1dc3a1 Revert "Feat/new saas billing" (#12673) 2025-01-13 15:17:43 +08:00
54b5b80a07 fix(workflow): fix answer node stream processing in conditional branches (#12510) 2025-01-13 14:54:21 +08:00
831459b895 fix: ruff with statements (#12578)
Signed-off-by: yihong0618 <zouzou0208@gmail.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-01-13 09:55:55 +08:00
4e101604c3 fix: ruff check for True if ... else (#12576)
Signed-off-by: yihong0618 <zouzou0208@gmail.com>
2025-01-13 09:38:48 +08:00
a6455269f0 chore: Adjust translations to align with Taiwanese Mandarin conventions (#12633) 2025-01-13 09:12:43 +08:00
cd257b91c5 Fix pandas indexing method for knowledge base imports (#12637) (#12638)
Co-authored-by: CN-P5 <heibai2006@qq.com>
2025-01-13 09:06:59 +08:00
d8f57bf899 Feat/new saas billing (#12591) 2025-01-12 14:50:46 +08:00
989fb11fd7 improve the readability of the function generate_api_key (#12552) 2025-01-09 21:30:17 +08:00
140965b738 chore: translate i18n files (#12543)
Co-authored-by: WTW0313 <30284043+WTW0313@users.noreply.github.com>
2025-01-09 20:30:06 +08:00
14ee51aead Feat/add knowledge include all filter (#12537) 2025-01-09 20:21:25 +08:00
2e97ba5700 fix: Add datasets list access control and fix datasets config display issue (#12533)
Co-authored-by: nite-knite <nkCoding@gmail.com>
2025-01-09 17:44:11 +08:00
f549d53b68 fix: sum costs return error value on overview page (#12534) 2025-01-09 16:04:14 +08:00
a085ad4719 feat: show workflow running status (#12531) 2025-01-09 15:36:13 +08:00
f230a9232e fix: Parsing OpenAPI spec for external tools (#12518) (#12530) 2025-01-09 15:30:43 +08:00
e84bf35e2a fix: same chunk insert deadlock (#12502)
Co-authored-by: huangzhuo <huangzhuo1@xiaomi.com>
2025-01-09 15:16:41 +08:00
eux
20f090537f feat: add GET upload file API endpoint to dataset service api (#11899) 2025-01-09 14:52:09 +08:00
dbe7a7c4fd Fix: Add a INFO-level log when fallback to gpt2tokenizer (#12508) 2025-01-09 14:37:46 +08:00
b7a4e3903e fix: add last_refresh_time to track the validity of is_other_tab_refreshing (#12517) 2025-01-09 10:40:45 +08:00
407 changed files with 8893 additions and 1639 deletions

View File

@ -4,7 +4,6 @@ on:
pull_request:
branches:
- main
- plugins/beta
paths:
- api/**
- docker/**
@ -27,6 +26,9 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Setup Poetry and Python ${{ matrix.python-version }}
uses: ./.github/actions/setup-poetry

View File

@ -5,8 +5,6 @@ on:
branches:
- "main"
- "deploy/dev"
- "plugins/beta"
- "dev/plugin-deploy"
release:
types: [published]
@ -81,10 +79,12 @@ jobs:
cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
- name: Export digest
env:
DIGEST: ${{ steps.build.outputs.digest }}
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
sanitized_digest=${DIGEST#sha256:}
touch "/tmp/digests/${sanitized_digest}"
- name: Upload digest
uses: actions/upload-artifact@v4
@ -134,23 +134,15 @@ jobs:
- name: Create manifest list and push
working-directory: /tmp/digests
env:
IMAGE_NAME: ${{ env[matrix.image_name_env] }}
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env[matrix.image_name_env] }}@sha256:%s ' *)
$(printf "$IMAGE_NAME@sha256:%s " *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env[matrix.image_name_env] }}:${{ steps.meta.outputs.version }}
- name: print context var
uses: actions/checkout@v4
- name: deploy pod in plugin env
if: github.ref == 'refs/heads/dev/plugin-deploy'
env:
IMAGEHASH: ${{ github.sha }}
APICMD: "${{ secrets.PLUGIN_CD_API_CURL }}"
WEBCMD: "${{ secrets.PLUGIN_CD_WEB_CURL }}"
IMAGE_NAME: ${{ env[matrix.image_name_env] }}
IMAGE_VERSION: ${{ steps.meta.outputs.version }}
run: |
bash -c "${APICMD/yourNewVersion/$IMAGEHASH}"
bash -c "${WEBCMD/yourNewVersion/$IMAGEHASH}"
docker buildx imagetools inspect "$IMAGE_NAME:$IMAGE_VERSION"

View File

@ -20,6 +20,9 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Setup Poetry and Python
uses: ./.github/actions/setup-poetry

View File

@ -1,23 +0,0 @@
name: Deploy Plugin Dev
on:
workflow_run:
workflows: ["Build and Push API & Web"]
branches:
- "dev/plugin-deploy"
types:
- completed
jobs:
deploy:
runs-on: ubuntu-latest
if: |
github.event.workflow_run.conclusion == 'success'
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
with:
host: ${{ secrets.SSH_HOST }}
username: ${{ secrets.SSH_USER }}
key: ${{ secrets.SSH_PRIVATE_KEY }}
script: "echo 123"

View File

@ -9,6 +9,6 @@ yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compos
yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml
yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml
yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/docker-compose.yaml
yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/tidb/docker-compose.yaml
echo "Ports exposed for sandbox, weaviate, tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase"

View File

@ -4,7 +4,6 @@ on:
pull_request:
branches:
- main
- plugins/beta
concurrency:
group: style-${{ github.head_ref || github.run_id }}
@ -18,6 +17,9 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Check changed files
id: changed-files
@ -60,6 +62,9 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Check changed files
id: changed-files
@ -87,7 +92,7 @@ jobs:
- name: Web style check
if: steps.changed-files.outputs.any_changed == 'true'
run: yarn run lint
run: pnpm run lint
docker-compose-template:
name: Docker Compose Template
@ -96,6 +101,9 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Check changed files
id: changed-files
@ -124,6 +132,9 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Check changed files
id: changed-files
@ -141,7 +152,7 @@ jobs:
if: steps.changed-files.outputs.any_changed == 'true'
env:
BASH_SEVERITY: warning
DEFAULT_BRANCH: plugins/beta
DEFAULT_BRANCH: main
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IGNORE_GENERATED_FILES: true
IGNORE_GITIGNORED_FILES: true

View File

@ -26,6 +26,9 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v4
@ -35,7 +38,7 @@ jobs:
cache-dependency-path: 'pnpm-lock.yaml'
- name: Install Dependencies
run: pnpm install
run: pnpm install --frozen-lockfile
- name: Test
run: pnpm test

View File

@ -16,6 +16,7 @@ jobs:
- uses: actions/checkout@v4
with:
fetch-depth: 2 # last 2 commits
persist-credentials: false
- name: Check for file changes in i18n/en-US
id: check_files

View File

@ -28,6 +28,9 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Setup Poetry and Python ${{ matrix.python-version }}
uses: ./.github/actions/setup-poetry
@ -51,7 +54,15 @@ jobs:
- name: Expose Service Ports
run: sh .github/workflows/expose_service_ports.sh
- name: Set up Vector Stores (TiDB, Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase)
- name: Set up Vector Store (TiDB)
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: docker/tidb/docker-compose.yaml
services: |
tidb
tiflash
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase)
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: |
@ -67,7 +78,9 @@ jobs:
pgvector
chroma
elasticsearch
tidb
- name: Check TiDB Ready
run: poetry run -P api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
- name: Test Vector Stores
run: poetry run -P api bash dev/pytest/pytest_vdb.sh

View File

@ -22,25 +22,34 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
with:
files: web/**
# to run pnpm, should install package canvas, but it always install failed on amd64 under ubuntu-latest
# - name: Install pnpm
# uses: pnpm/action-setup@v4
# with:
# version: 10
# run_install: false
- name: Setup Node.js
uses: actions/setup-node@v4
if: steps.changed-files.outputs.any_changed == 'true'
with:
node-version: 20
cache: pnpm
cache-dependency-path: ./web/package.json
# - name: Setup Node.js
# uses: actions/setup-node@v4
# if: steps.changed-files.outputs.any_changed == 'true'
# with:
# node-version: 20
# cache: pnpm
# cache-dependency-path: ./web/package.json
- name: Install dependencies
if: steps.changed-files.outputs.any_changed == 'true'
run: pnpm install --frozen-lockfile
# - name: Install dependencies
# if: steps.changed-files.outputs.any_changed == 'true'
# run: pnpm install --frozen-lockfile
- name: Run tests
if: steps.changed-files.outputs.any_changed == 'true'
run: pnpm test
# - name: Run tests
# if: steps.changed-files.outputs.any_changed == 'true'
# run: pnpm test

1
.gitignore vendored
View File

@ -163,6 +163,7 @@ docker/volumes/db/data/*
docker/volumes/redis/data/*
docker/volumes/weaviate/*
docker/volumes/qdrant/*
docker/tidb/volumes/*
docker/volumes/etcd/*
docker/volumes/minio/*
docker/volumes/milvus/*

View File

@ -108,6 +108,72 @@ Please refer to our [FAQ](https://docs.dify.ai/getting-started/install-self-host
**7. Backend-as-a-Service**:
All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic.
## Feature Comparison
<table style="width: 100%;">
<tr>
<th align="center">Feature</th>
<th align="center">Dify.AI</th>
<th align="center">LangChain</th>
<th align="center">Flowise</th>
<th align="center">OpenAI Assistants API</th>
</tr>
<tr>
<td align="center">Programming Approach</td>
<td align="center">API + App-oriented</td>
<td align="center">Python Code</td>
<td align="center">App-oriented</td>
<td align="center">API-oriented</td>
</tr>
<tr>
<td align="center">Supported LLMs</td>
<td align="center">Rich Variety</td>
<td align="center">Rich Variety</td>
<td align="center">Rich Variety</td>
<td align="center">OpenAI-only</td>
</tr>
<tr>
<td align="center">RAG Engine</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Agent</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Workflow</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Observability</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Enterprise Feature (SSO/Access control)</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Local Deployment</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
</table>
## Using Dify

View File

@ -55,7 +55,7 @@
Dify est une plateforme de développement d'applications LLM open source. Son interface intuitive combine un flux de travail d'IA, un pipeline RAG, des capacités d'agent, une gestion de modèles, des fonctionnalités d'observabilité, et plus encore, vous permettant de passer rapidement du prototype à la production. Voici une liste des fonctionnalités principales:
</br> </br>
**1. Flux de travail**:
**1. Flux de travail** :
Construisez et testez des flux de travail d'IA puissants sur un canevas visuel, en utilisant toutes les fonctionnalités suivantes et plus encore.
@ -63,27 +63,25 @@ Dify est une plateforme de développement d'applications LLM open source. Son in
**2. Prise en charge complète des modèles**:
**2. Prise en charge complète des modèles** :
Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers).
![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3)
**3. IDE de prompt**:
**3. IDE de prompt** :
Interface intuitive pour créer des prompts, comparer les performances des modèles et ajouter des fonctionnalités supplémentaires telles que la synthèse vocale à une application basée sur des chats.
**4. Pipeline RAG**:
**4. Pipeline RAG** :
Des capacités RAG étendues qui couvrent tout, de l'ingestion de documents à la récupération, avec un support prêt à l'emploi pour l'extraction de texte à partir de PDF, PPT et autres formats de document courants.
**5. Capac
ités d'agent**:
**5. Capacités d'agent** :
Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha.
**6. LLMOps**:
**6. LLMOps** :
Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations.
**7. Backend-as-a-Service**:
**7. Backend-as-a-Service** :
Toutes les offres de Dify sont accompagnées d'API correspondantes, vous permettant d'intégrer facilement Dify dans votre propre logique métier.

View File

@ -164,7 +164,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
- **企業/組織向けのDify</br>**
企業中心の機能を提供しています。[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。 </br>
> AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自分のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングして、ロゴやブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。
> AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t23mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自分のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングして、ロゴやブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。
## 最新の情報を入手

View File

@ -87,9 +87,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
## Feature Comparison
<table style="width: 100%;">
<tr
>
<tr>
<th align="center">Feature</th>
<th align="center">Dify.AI</th>
<th align="center">LangChain</th>

View File

@ -106,6 +106,73 @@ Prosimo, glejte naša pogosta vprašanja [FAQ](https://docs.dify.ai/getting-star
**7. Backend-as-a-Service**:
AVse ponudbe Difyja so opremljene z ustreznimi API-ji, tako da lahko Dify brez težav integrirate v svojo poslovno logiko.
## Primerjava Funkcij
<table style="width: 100%;">
<tr>
<th align="center">Funkcija</th>
<th align="center">Dify.AI</th>
<th align="center">LangChain</th>
<th align="center">Flowise</th>
<th align="center">OpenAI Assistants API</th>
</tr>
<tr>
<td align="center">Programski pristop</td>
<td align="center">API + usmerjeno v aplikacije</td>
<td align="center">Python koda</td>
<td align="center">Usmerjeno v aplikacije</td>
<td align="center">Usmerjeno v API</td>
</tr>
<tr>
<td align="center">Podprti LLM-ji</td>
<td align="center">Bogata izbira</td>
<td align="center">Bogata izbira</td>
<td align="center">Bogata izbira</td>
<td align="center">Samo OpenAI</td>
</tr>
<tr>
<td align="center">RAG pogon</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Agent</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Potek dela</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Spremljanje</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Funkcija za podjetja (SSO/nadzor dostopa)</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
<tr>
<td align="center">Lokalna namestitev</td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
<td align="center"></td>
</tr>
</table>
## Uporaba Dify
@ -187,4 +254,4 @@ Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj
## Licenca
To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami.
To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami.

View File

@ -55,9 +55,11 @@ RUN \
# basic environment
curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
# For Security
# expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
# install a chinese font to support the use of tools like matplotlib
fonts-noto-cjk \
# install a package to improve the accuracy of guessing mime type and file extension
media-types \
# install libmagic to support the use of python-magic guess MIMETYPE
libmagic1 \
&& apt-get autoremove -y \

View File

@ -37,7 +37,13 @@
4. Create environment.
Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. You can execute `poetry shell` to activate the environment.
Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. First, you need to add the poetry shell plugin, if you don't have it already, in order to run in a virtual environment. [Note: Poetry shell is no longer a native command so you need to install the poetry plugin beforehand]
```bash
poetry self add poetry-plugin-shell
```
Then, You can execute `poetry shell` to activate the environment.
5. Install dependencies

View File

@ -707,12 +707,13 @@ def extract_unique_plugins(output_file: str, input_file: str):
@click.option(
"--output_file", prompt=True, help="The file to store the installed plugins.", default="installed_plugins.jsonl"
)
def install_plugins(input_file: str, output_file: str):
@click.option("--workers", prompt=True, help="The number of workers to install plugins.", default=100)
def install_plugins(input_file: str, output_file: str, workers: int):
"""
Install plugins.
"""
click.echo(click.style("Starting install plugins.", fg="white"))
PluginMigration.install_plugins(input_file, output_file)
PluginMigration.install_plugins(input_file, output_file, workers)
click.echo(click.style("Install plugins completed.", fg="green"))

View File

@ -373,8 +373,8 @@ class HttpConfig(BaseSettings):
)
RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
description="Enable or disable the X-Forwarded-For Proxy Fix middleware from Werkzeug"
" to respect X-* headers to redirect clients",
description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers"
" when the app is behind a single trusted reverse proxy.",
default=False,
)

View File

@ -1,3 +1,4 @@
import os
from typing import Any, Literal, Optional
from urllib.parse import quote_plus
@ -166,6 +167,11 @@ class DatabaseConfig(BaseSettings):
default=False,
)
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
description="Number of processes for the retrieval service, default to CPU cores.",
default=os.cpu_count(),
)
@computed_field
def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
return {

View File

@ -15,7 +15,7 @@ AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
if dify_config.ETL_TYPE == "Unstructured":
DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls"]
DOCUMENT_EXTENSIONS.extend(("docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
DOCUMENT_EXTENSIONS.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
if dify_config.UNSTRUCTURED_API_URL:
DOCUMENT_EXTENSIONS.append("ppt")
DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])

View File

@ -14,6 +14,7 @@ from controllers.console.wraps import account_initialization_required, enterpris
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
from core.indexing_runner import IndexingRunner
from core.model_runtime.entities.model_entities import ModelType
from core.plugin.entities.plugin import ModelProviderID
from core.provider_manager import ProviderManager
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.extractor.entity.extract_setting import ExtractSetting
@ -72,7 +73,9 @@ class DatasetListApi(Resource):
data = marshal(datasets, dataset_detail_fields)
for item in data:
# convert embedding_model_provider to plugin standard format
if item["indexing_technique"] == "high_quality":
item["embedding_model_provider"] = str(ModelProviderID(item["embedding_model_provider"]))
item_model = f"{item['embedding_model']}:{item['embedding_model_provider']}"
if item_model in model_names:
item["embedding_available"] = True
@ -620,7 +623,6 @@ class DatasetRetrievalSettingApi(Resource):
match vector_type:
case (
VectorType.RELYT
| VectorType.PGVECTOR
| VectorType.TIDB_VECTOR
| VectorType.CHROMA
| VectorType.TENCENT

View File

@ -1,3 +1,5 @@
from urllib.parse import quote
from flask import Response, request
from flask_restful import Resource, reqparse # type: ignore
from werkzeug.exceptions import NotFound
@ -71,7 +73,8 @@ class FilePreviewApi(Resource):
if upload_file.size > 0:
response.headers["Content-Length"] = str(upload_file.size)
if args["as_attachment"]:
response.headers["Content-Disposition"] = f"attachment; filename={upload_file.name}"
encoded_filename = quote(upload_file.name)
response.headers["Content-Disposition"] = f"attachment; filename*=UTF-8''{encoded_filename}"
return response

View File

@ -50,8 +50,8 @@ class EnterpriseWorkspaceNoOwnerEmail(Resource):
"plan": tenant.plan,
"status": tenant.status,
"custom_config": json.loads(tenant.custom_config) if tenant.custom_config else {},
"created_at": tenant.created_at.isoformat() if tenant.created_at else None,
"updated_at": tenant.updated_at.isoformat() if tenant.updated_at else None,
"created_at": tenant.created_at.isoformat() + "Z" if tenant.created_at else None,
"updated_at": tenant.updated_at.isoformat() + "Z" if tenant.updated_at else None,
}
return {

View File

@ -10,6 +10,7 @@ from controllers.service_api.app.error import NotChatAppError
from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token
from core.app.entities.app_invoke_entities import InvokeFrom
from fields.conversation_fields import message_file_fields
from fields.message_fields import feedback_fields, retriever_resource_fields
from fields.raws import FilesContainedField
from libs.helper import TimestampField, uuid_value
from models.model import App, AppMode, EndUser
@ -18,26 +19,6 @@ from services.message_service import MessageService
class MessageListApi(Resource):
feedback_fields = {"rating": fields.String}
retriever_resource_fields = {
"id": fields.String,
"message_id": fields.String,
"position": fields.Integer,
"dataset_id": fields.String,
"dataset_name": fields.String,
"document_id": fields.String,
"document_name": fields.String,
"data_source_type": fields.String,
"segment_id": fields.String,
"score": fields.Float,
"hit_count": fields.Integer,
"word_count": fields.Integer,
"segment_position": fields.Integer,
"index_node_hash": fields.String,
"content": fields.String,
"created_at": TimestampField,
}
agent_thought_fields = {
"id": fields.String,
"chain_id": fields.String,
@ -89,7 +70,7 @@ class MessageListApi(Resource):
try:
return MessageService.pagination_by_first_id(
app_model, end_user, args["conversation_id"], args["first_id"], args["limit"]
app_model, end_user, args["conversation_id"], args["first_id"], args["limit"], "desc"
)
except services.errors.conversation.ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")

View File

@ -336,6 +336,10 @@ class DocumentUpdateByFileApi(DatasetApiResource):
if not dataset:
raise ValueError("Dataset is not exist.")
# indexing_technique is already set in dataset since this is an update
args["indexing_technique"] = dataset.indexing_technique
if "file" in request.files:
# save file info
file = request.files["file"]

View File

@ -21,7 +21,7 @@ from core.app.entities.app_invoke_entities import InvokeFrom
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.model_runtime.errors.invoke import InvokeError
from fields.conversation_fields import message_file_fields
from fields.message_fields import agent_thought_fields
from fields.message_fields import agent_thought_fields, feedback_fields, retriever_resource_fields
from fields.raws import FilesContainedField
from libs import helper
from libs.helper import TimestampField, uuid_value
@ -34,27 +34,6 @@ from services.message_service import MessageService
class MessageListApi(WebApiResource):
feedback_fields = {"rating": fields.String}
retriever_resource_fields = {
"id": fields.String,
"message_id": fields.String,
"position": fields.Integer,
"dataset_id": fields.String,
"dataset_name": fields.String,
"document_id": fields.String,
"document_name": fields.String,
"data_source_type": fields.String,
"segment_id": fields.String,
"score": fields.Float,
"hit_count": fields.Integer,
"word_count": fields.Integer,
"segment_position": fields.Integer,
"index_node_hash": fields.String,
"content": fields.String,
"created_at": TimestampField,
}
message_fields = {
"id": fields.String,
"conversation_id": fields.String,

View File

@ -329,6 +329,7 @@ class BaseAgentRunner(AppRunner):
)
if not updated_agent_thought:
raise ValueError("agent thought not found")
agent_thought = updated_agent_thought
if thought:
agent_thought.thought = thought

View File

@ -140,9 +140,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
app_config=app_config,
file_upload_config=file_extra_config,
conversation_id=conversation.id if conversation else None,
inputs=conversation.inputs
if conversation
else self._prepare_user_inputs(
inputs=self._prepare_user_inputs(
user_inputs=inputs, variables=app_config.variables, tenant_id=app_model.tenant_id
),
query=query,

View File

@ -384,6 +384,7 @@ class AdvancedChatAppGenerateTaskPipeline:
task_id=self._application_generate_entity.task_id,
workflow_node_execution=workflow_node_execution,
)
session.commit()
if node_finish_resp:
yield node_finish_resp

View File

@ -149,9 +149,7 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
model_conf=ModelConfigConverter.convert(app_config),
file_upload_config=file_extra_config,
conversation_id=conversation.id if conversation else None,
inputs=conversation.inputs
if conversation
else self._prepare_user_inputs(
inputs=self._prepare_user_inputs(
user_inputs=inputs, variables=app_config.variables, tenant_id=app_model.tenant_id
),
query=query,

View File

@ -141,9 +141,7 @@ class ChatAppGenerator(MessageBasedAppGenerator):
model_conf=ModelConfigConverter.convert(app_config),
file_upload_config=file_extra_config,
conversation_id=conversation.id if conversation else None,
inputs=conversation.inputs
if conversation
else self._prepare_user_inputs(
inputs=self._prepare_user_inputs(
user_inputs=inputs, variables=app_config.variables, tenant_id=app_model.tenant_id
),
query=query,

View File

@ -42,7 +42,6 @@ class MessageBasedAppGenerator(BaseAppGenerator):
ChatAppGenerateEntity,
CompletionAppGenerateEntity,
AgentChatAppGenerateEntity,
AgentChatAppGenerateEntity,
],
queue_manager: AppQueueManager,
conversation: Conversation,

View File

@ -387,7 +387,6 @@ class WorkflowBasedAppRunner(AppRunner):
status=event.status,
data=event.data,
metadata=event.metadata,
node_id=event.node_id,
)
)
elif isinstance(event, ParallelBranchRunStartedEvent):

View File

@ -331,7 +331,6 @@ class QueueAgentLogEvent(AppQueueEvent):
status: str
data: Mapping[str, Any]
metadata: Optional[Mapping[str, Any]] = None
node_id: str
class QueueNodeRetryEvent(QueueNodeStartedEvent):

View File

@ -719,7 +719,6 @@ class AgentLogStreamResponse(StreamResponse):
status: str
data: Mapping[str, Any]
metadata: Optional[Mapping[str, Any]] = None
node_id: str
event: StreamEvent = StreamEvent.AGENT_LOG
data: Data

View File

@ -844,7 +844,7 @@ class WorkflowCycleManage:
if node_execution_id not in self._workflow_node_executions:
raise ValueError(f"Workflow node execution not found: {node_execution_id}")
cached_workflow_node_execution = self._workflow_node_executions[node_execution_id]
return cached_workflow_node_execution
return session.merge(cached_workflow_node_execution)
def _handle_agent_log(self, task_id: str, event: QueueAgentLogEvent) -> AgentLogStreamResponse:
"""
@ -864,6 +864,5 @@ class WorkflowCycleManage:
status=event.status,
data=event.data,
metadata=event.metadata,
node_id=event.node_id,
),
)

View File

@ -7,6 +7,7 @@ from json import JSONDecodeError
from typing import Optional
from pydantic import BaseModel, ConfigDict
from sqlalchemy import or_
from constants import HIDDEN_VALUE
from core.entities import DEFAULT_PLUGIN_ID
@ -28,6 +29,7 @@ from core.model_runtime.entities.provider_entities import (
)
from core.model_runtime.model_providers.__base.ai_model import AIModel
from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory
from core.plugin.entities.plugin import ModelProviderID
from extensions.ext_database import db
from models.provider import (
LoadBalancingModelConfig,
@ -190,8 +192,11 @@ class ProviderConfiguration(BaseModel):
db.session.query(Provider)
.filter(
Provider.tenant_id == self.tenant_id,
Provider.provider_name == self.provider.provider,
Provider.provider_type == ProviderType.CUSTOM.value,
or_(
Provider.provider_name == ModelProviderID(self.provider.provider).plugin_name,
Provider.provider_name == self.provider.provider,
),
)
.first()
)
@ -279,7 +284,10 @@ class ProviderConfiguration(BaseModel):
db.session.query(Provider)
.filter(
Provider.tenant_id == self.tenant_id,
Provider.provider_name == self.provider.provider,
or_(
Provider.provider_name == ModelProviderID(self.provider.provider).plugin_name,
Provider.provider_name == self.provider.provider,
),
Provider.provider_type == ProviderType.CUSTOM.value,
)
.first()

View File

@ -65,8 +65,7 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
retries += 1
if retries <= max_retries:
time.sleep(BACKOFF_FACTOR * (2 ** (retries - 1)))
raise MaxRetriesExceededError(
f"Reached maximum retries ({max_retries}) for URL {url}")
raise MaxRetriesExceededError(f"Reached maximum retries ({max_retries}) for URL {url}")
def get(url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):

View File

@ -228,7 +228,7 @@ class LargeLanguageModel(AIModel):
:return: result generator
"""
callbacks = callbacks or []
prompt_message = AssistantPromptMessage(content="")
assistant_message = AssistantPromptMessage(content="")
usage = None
system_fingerprint = None
real_model = model
@ -250,7 +250,7 @@ class LargeLanguageModel(AIModel):
callbacks=callbacks,
)
prompt_message.content += chunk.delta.message.content
assistant_message.content += chunk.delta.message.content
real_model = chunk.model
if chunk.delta.usage:
usage = chunk.delta.usage
@ -265,7 +265,7 @@ class LargeLanguageModel(AIModel):
result=LLMResult(
model=real_model,
prompt_messages=prompt_messages,
message=prompt_message,
message=assistant_message,
usage=usage or LLMUsage.empty_usage(),
system_fingerprint=system_fingerprint,
),

View File

@ -20,6 +20,7 @@ from core.model_runtime.model_providers.__base.text_embedding_model import TextE
from core.model_runtime.model_providers.__base.tts_model import TTSModel
from core.model_runtime.schema_validators.model_credential_schema_validator import ModelCredentialSchemaValidator
from core.model_runtime.schema_validators.provider_credential_schema_validator import ProviderCredentialSchemaValidator
from core.plugin.entities.plugin import ModelProviderID
from core.plugin.entities.plugin_daemon import PluginModelProviderEntity
from core.plugin.manager.asset import PluginAssetManager
from core.plugin.manager.model import PluginModelManager
@ -112,6 +113,9 @@ class ModelProviderFactory:
:param provider: provider name
:return: provider schema
"""
if "/" not in provider:
provider = str(ModelProviderID(provider))
# fetch plugin model providers
plugin_model_provider_entities = self.get_plugin_model_providers()
@ -363,4 +367,4 @@ class ModelProviderFactory:
plugin_id = "/".join(provider.split("/")[:-1])
provider_name = provider.split("/")[-1]
return plugin_id, provider_name
return str(plugin_id), provider_name

View File

@ -0,0 +1,22 @@
- claude-3-haiku@20240307
- claude-3-opus@20240229
- claude-3-sonnet@20240229
- claude-3-5-sonnet-v2@20241022
- claude-3-5-sonnet@20240620
- gemini-1.0-pro-vision-001
- gemini-1.0-pro-002
- gemini-1.5-flash-001
- gemini-1.5-flash-002
- gemini-1.5-pro-001
- gemini-1.5-pro-002
- gemini-2.0-flash-001
- gemini-2.0-flash-exp
- gemini-2.0-flash-lite-preview-02-05
- gemini-2.0-flash-thinking-exp-01-21
- gemini-2.0-flash-thinking-exp-1219
- gemini-2.0-pro-exp-02-05
- gemini-exp-1114
- gemini-exp-1121
- gemini-exp-1206
- gemini-flash-experimental
- gemini-pro-experimental

View File

@ -159,7 +159,7 @@ class GenericProviderID:
if re.match(r"^[a-z0-9_-]+$", value):
value = f"langgenius/{value}/{value}"
else:
raise ValueError("Invalid plugin id")
raise ValueError(f"Invalid plugin id {value}")
self.organization, self.plugin_name, self.provider_name = value.split("/")
self.is_hardcoded = is_hardcoded
@ -169,6 +169,21 @@ class GenericProviderID:
return f"{self.organization}/{self.plugin_name}"
class ModelProviderID(GenericProviderID):
def __init__(self, value: str, is_hardcoded: bool = False) -> None:
super().__init__(value, is_hardcoded)
if self.organization == "langgenius" and self.provider_name == "google":
self.plugin_name = "gemini"
class ToolProviderID(GenericProviderID):
def __init__(self, value: str, is_hardcoded: bool = False) -> None:
super().__init__(value, is_hardcoded)
if self.organization == "langgenius":
if self.provider_name in ["jina", "siliconflow", "stepfun"]:
self.plugin_name = f"{self.provider_name}_tool"
class PluginDependency(BaseModel):
class Type(enum.StrEnum):
Github = PluginInstallationSource.Github.value

View File

@ -3,7 +3,7 @@ from typing import Any, Optional
from pydantic import BaseModel
from core.plugin.entities.plugin import GenericProviderID
from core.plugin.entities.plugin import GenericProviderID, ToolProviderID
from core.plugin.entities.plugin_daemon import PluginBasicBooleanResponse, PluginToolProviderEntity
from core.plugin.manager.base import BasePluginManager
from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
@ -45,7 +45,7 @@ class PluginToolManager(BasePluginManager):
"""
Fetch tool provider for the given tenant and plugin.
"""
tool_provider_id = GenericProviderID(provider)
tool_provider_id = ToolProviderID(provider)
def transformer(json_response: dict[str, Any]) -> dict:
data = json_response.get("data")

View File

@ -30,6 +30,7 @@ from core.model_runtime.entities.provider_entities import (
ProviderEntity,
)
from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory
from core.plugin.entities.plugin import ModelProviderID
from extensions import ext_hosting_provider
from extensions.ext_database import db
from extensions.ext_redis import redis_client
@ -99,6 +100,15 @@ class ProviderManager:
tenant_id, provider_name_to_provider_records_dict
)
# append providers with langgenius/openai/openai
provider_name_list = list(provider_name_to_provider_records_dict.keys())
for provider_name in provider_name_list:
provider_id = ModelProviderID(provider_name)
if str(provider_id) not in provider_name_list:
provider_name_to_provider_records_dict[str(provider_id)] = provider_name_to_provider_records_dict[
provider_name
]
# Get all provider model records of the workspace
provider_name_to_provider_model_records_dict = self._get_all_provider_models(tenant_id)
@ -191,7 +201,7 @@ class ProviderManager:
model_settings=model_settings,
)
provider_configurations[provider_name] = provider_configuration
provider_configurations[str(ModelProviderID(provider_name))] = provider_configuration
# Return the encapsulated object
return provider_configurations
@ -453,11 +463,9 @@ class ProviderManager:
provider_name_to_provider_load_balancing_model_configs_dict = defaultdict(list)
for provider_load_balancing_config in provider_load_balancing_configs:
(
provider_name_to_provider_load_balancing_model_configs_dict[
provider_load_balancing_config.provider_name
].append(provider_load_balancing_config)
)
provider_name_to_provider_load_balancing_model_configs_dict[
provider_load_balancing_config.provider_name
].append(provider_load_balancing_config)
return provider_name_to_provider_load_balancing_model_configs_dict

View File

@ -1,8 +1,12 @@
import threading
import concurrent.futures
import json
from concurrent.futures import ThreadPoolExecutor
from typing import Optional
from flask import Flask, current_app
from sqlalchemy.orm import load_only
from configs import dify_config
from core.rag.data_post_processor.data_post_processor import DataPostProcessor
from core.rag.datasource.keyword.keyword_factory import Keyword
from core.rag.datasource.vdb.vector_factory import Vector
@ -26,6 +30,7 @@ default_retrieval_model = {
class RetrievalService:
# Cache precompiled regular expressions to avoid repeated compilation
@classmethod
def retrieve(
cls,
@ -40,74 +45,62 @@ class RetrievalService:
):
if not query:
return []
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
if not dataset:
return []
dataset = cls._get_dataset(dataset_id)
if not dataset or dataset.available_document_count == 0 or dataset.available_segment_count == 0:
return []
all_documents: list[Document] = []
threads: list[threading.Thread] = []
exceptions: list[str] = []
# retrieval_model source with keyword
if retrieval_method == "keyword_search":
keyword_thread = threading.Thread(
target=RetrievalService.keyword_search,
kwargs={
"flask_app": current_app._get_current_object(), # type: ignore
"dataset_id": dataset_id,
"query": query,
"top_k": top_k,
"all_documents": all_documents,
"exceptions": exceptions,
},
)
threads.append(keyword_thread)
keyword_thread.start()
# retrieval_model source with semantic
if RetrievalMethod.is_support_semantic_search(retrieval_method):
embedding_thread = threading.Thread(
target=RetrievalService.embedding_search,
kwargs={
"flask_app": current_app._get_current_object(), # type: ignore
"dataset_id": dataset_id,
"query": query,
"top_k": top_k,
"score_threshold": score_threshold,
"reranking_model": reranking_model,
"all_documents": all_documents,
"retrieval_method": retrieval_method,
"exceptions": exceptions,
},
)
threads.append(embedding_thread)
embedding_thread.start()
# retrieval source with full text
if RetrievalMethod.is_support_fulltext_search(retrieval_method):
full_text_index_thread = threading.Thread(
target=RetrievalService.full_text_index_search,
kwargs={
"flask_app": current_app._get_current_object(), # type: ignore
"dataset_id": dataset_id,
"query": query,
"retrieval_method": retrieval_method,
"score_threshold": score_threshold,
"top_k": top_k,
"reranking_model": reranking_model,
"all_documents": all_documents,
"exceptions": exceptions,
},
)
threads.append(full_text_index_thread)
full_text_index_thread.start()
for thread in threads:
thread.join()
# Optimize multithreading with thread pools
with ThreadPoolExecutor(max_workers=dify_config.RETRIEVAL_SERVICE_EXECUTORS) as executor: # type: ignore
futures = []
if retrieval_method == "keyword_search":
futures.append(
executor.submit(
cls.keyword_search,
flask_app=current_app._get_current_object(), # type: ignore
dataset_id=dataset_id,
query=query,
top_k=top_k,
all_documents=all_documents,
exceptions=exceptions,
)
)
if RetrievalMethod.is_support_semantic_search(retrieval_method):
futures.append(
executor.submit(
cls.embedding_search,
flask_app=current_app._get_current_object(), # type: ignore
dataset_id=dataset_id,
query=query,
top_k=top_k,
score_threshold=score_threshold,
reranking_model=reranking_model,
all_documents=all_documents,
retrieval_method=retrieval_method,
exceptions=exceptions,
)
)
if RetrievalMethod.is_support_fulltext_search(retrieval_method):
futures.append(
executor.submit(
cls.full_text_index_search,
flask_app=current_app._get_current_object(), # type: ignore
dataset_id=dataset_id,
query=query,
top_k=top_k,
score_threshold=score_threshold,
reranking_model=reranking_model,
all_documents=all_documents,
retrieval_method=retrieval_method,
exceptions=exceptions,
)
)
concurrent.futures.wait(futures, timeout=30, return_when=concurrent.futures.ALL_COMPLETED)
if exceptions:
exception_message = ";\n".join(exceptions)
raise ValueError(exception_message)
raise ValueError(";\n".join(exceptions))
if retrieval_method == RetrievalMethod.HYBRID_SEARCH.value:
data_post_processor = DataPostProcessor(
@ -132,18 +125,21 @@ class RetrievalService:
)
return all_documents
@classmethod
def _get_dataset(cls, dataset_id: str) -> Optional[Dataset]:
return db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
@classmethod
def keyword_search(
cls, flask_app: Flask, dataset_id: str, query: str, top_k: int, all_documents: list, exceptions: list
):
with flask_app.app_context():
try:
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
dataset = cls._get_dataset(dataset_id)
if not dataset:
raise ValueError("dataset not found")
keyword = Keyword(dataset=dataset)
documents = keyword.search(cls.escape_query_for_search(query), top_k=top_k)
all_documents.extend(documents)
except Exception as e:
@ -164,14 +160,13 @@ class RetrievalService:
):
with flask_app.app_context():
try:
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
dataset = cls._get_dataset(dataset_id)
if not dataset:
raise ValueError("dataset not found")
vector = Vector(dataset=dataset)
documents = vector.search_by_vector(
cls.escape_query_for_search(query),
query,
search_type="similarity_score_threshold",
top_k=top_k,
score_threshold=score_threshold,
@ -186,7 +181,7 @@ class RetrievalService:
and retrieval_method == RetrievalMethod.SEMANTIC_SEARCH.value
):
data_post_processor = DataPostProcessor(
str(dataset.tenant_id), RerankMode.RERANKING_MODEL.value, reranking_model, None, False
str(dataset.tenant_id), str(RerankMode.RERANKING_MODEL.value), reranking_model, None, False
)
all_documents.extend(
data_post_processor.invoke(
@ -216,13 +211,11 @@ class RetrievalService:
):
with flask_app.app_context():
try:
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
dataset = cls._get_dataset(dataset_id)
if not dataset:
raise ValueError("dataset not found")
vector_processor = Vector(
dataset=dataset,
)
vector_processor = Vector(dataset=dataset)
documents = vector_processor.search_by_full_text(cls.escape_query_for_search(query), top_k=top_k)
if documents:
@ -233,7 +226,7 @@ class RetrievalService:
and retrieval_method == RetrievalMethod.FULL_TEXT_SEARCH.value
):
data_post_processor = DataPostProcessor(
str(dataset.tenant_id), RerankMode.RERANKING_MODEL.value, reranking_model, None, False
str(dataset.tenant_id), str(RerankMode.RERANKING_MODEL.value), reranking_model, None, False
)
all_documents.extend(
data_post_processor.invoke(
@ -250,66 +243,106 @@ class RetrievalService:
@staticmethod
def escape_query_for_search(query: str) -> str:
return query.replace('"', '\\"')
return json.dumps(query).strip('"')
@classmethod
def format_retrieval_documents(cls, documents: list[Document]) -> list[RetrievalSegments]:
"""Format retrieval documents with optimized batch processing"""
if not documents:
return []
try:
# Collect document IDs
document_ids = {doc.metadata.get("document_id") for doc in documents if "document_id" in doc.metadata}
if not document_ids:
return []
# Batch query dataset documents
dataset_documents = {
doc.id: doc
for doc in db.session.query(DatasetDocument)
.filter(DatasetDocument.id.in_(document_ids))
.options(load_only(DatasetDocument.id, DatasetDocument.doc_form, DatasetDocument.dataset_id))
.all()
}
records = []
include_segment_ids = set()
segment_child_map = {}
# Process documents
for document in documents:
document_id = document.metadata.get("document_id")
if document_id not in dataset_documents:
continue
dataset_document = dataset_documents[document_id]
@staticmethod
def format_retrieval_documents(documents: list[Document]) -> list[RetrievalSegments]:
records = []
include_segment_ids = []
segment_child_map = {}
for document in documents:
document_id = document.metadata.get("document_id")
dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == document_id).first()
if dataset_document:
if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX:
# Handle parent-child documents
child_index_node_id = document.metadata.get("doc_id")
result = (
db.session.query(ChildChunk, DocumentSegment)
.join(DocumentSegment, ChildChunk.segment_id == DocumentSegment.id)
child_chunk = (
db.session.query(ChildChunk).filter(ChildChunk.index_node_id == child_index_node_id).first()
)
if not child_chunk:
continue
segment = (
db.session.query(DocumentSegment)
.filter(
ChildChunk.index_node_id == child_index_node_id,
DocumentSegment.dataset_id == dataset_document.dataset_id,
DocumentSegment.enabled == True,
DocumentSegment.status == "completed",
DocumentSegment.id == child_chunk.segment_id,
)
.options(
load_only(
DocumentSegment.id,
DocumentSegment.content,
DocumentSegment.answer,
)
)
.first()
)
if result:
child_chunk, segment = result
if not segment:
continue
if segment.id not in include_segment_ids:
include_segment_ids.append(segment.id)
child_chunk_detail = {
"id": child_chunk.id,
"content": child_chunk.content,
"position": child_chunk.position,
"score": document.metadata.get("score", 0.0),
}
map_detail = {
"max_score": document.metadata.get("score", 0.0),
"child_chunks": [child_chunk_detail],
}
segment_child_map[segment.id] = map_detail
record = {
"segment": segment,
}
records.append(record)
else:
child_chunk_detail = {
"id": child_chunk.id,
"content": child_chunk.content,
"position": child_chunk.position,
"score": document.metadata.get("score", 0.0),
}
segment_child_map[segment.id]["child_chunks"].append(child_chunk_detail)
segment_child_map[segment.id]["max_score"] = max(
segment_child_map[segment.id]["max_score"], document.metadata.get("score", 0.0)
)
else:
if not segment:
continue
if segment.id not in include_segment_ids:
include_segment_ids.add(segment.id)
child_chunk_detail = {
"id": child_chunk.id,
"content": child_chunk.content,
"position": child_chunk.position,
"score": document.metadata.get("score", 0.0),
}
map_detail = {
"max_score": document.metadata.get("score", 0.0),
"child_chunks": [child_chunk_detail],
}
segment_child_map[segment.id] = map_detail
record = {
"segment": segment,
}
records.append(record)
else:
child_chunk_detail = {
"id": child_chunk.id,
"content": child_chunk.content,
"position": child_chunk.position,
"score": document.metadata.get("score", 0.0),
}
segment_child_map[segment.id]["child_chunks"].append(child_chunk_detail)
segment_child_map[segment.id]["max_score"] = max(
segment_child_map[segment.id]["max_score"], document.metadata.get("score", 0.0)
)
else:
index_node_id = document.metadata["doc_id"]
# Handle normal documents
index_node_id = document.metadata.get("doc_id")
if not index_node_id:
continue
segment = (
db.session.query(DocumentSegment)
@ -324,16 +357,21 @@ class RetrievalService:
if not segment:
continue
include_segment_ids.append(segment.id)
include_segment_ids.add(segment.id)
record = {
"segment": segment,
"score": document.metadata.get("score", None),
"score": document.metadata.get("score"), # type: ignore
}
records.append(record)
# Add child chunks information to records
for record in records:
if record["segment"].id in segment_child_map:
record["child_chunks"] = segment_child_map[record["segment"].id].get("child_chunks", None)
record["child_chunks"] = segment_child_map[record["segment"].id].get("child_chunks") # type: ignore
record["score"] = segment_child_map[record["segment"].id]["max_score"]
return [RetrievalSegments(**record) for record in records]
return [RetrievalSegments(**record) for record in records]
except Exception as e:
db.session.rollback()
raise e

View File

@ -111,8 +111,9 @@ class ChromaVector(BaseVector):
for index in range(len(ids)):
distance = distances[index]
metadata = dict(metadatas[index])
if distance >= score_threshold:
metadata["score"] = distance
score = 1 - distance
if score > score_threshold:
metadata["score"] = score
doc = Document(
page_content=documents[index],
metadata=metadata,

View File

@ -9,6 +9,7 @@ from sqlalchemy import text as sql_text
from sqlalchemy.orm import Session, declarative_base
from configs import dify_config
from core.rag.datasource.vdb.field import Field
from core.rag.datasource.vdb.vector_base import BaseVector
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
from core.rag.datasource.vdb.vector_type import VectorType
@ -54,14 +55,13 @@ class TiDBVector(BaseVector):
return Table(
self._collection_name,
self._orm_base.metadata,
Column("id", String(36), primary_key=True, nullable=False),
Column(Field.PRIMARY_KEY.value, String(36), primary_key=True, nullable=False),
Column(
"vector",
Field.VECTOR.value,
VectorType(dim),
nullable=False,
comment="" if self._distance_func is None else f"hnsw(distance={self._distance_func})",
),
Column("text", TEXT, nullable=False),
Column(Field.TEXT_KEY.value, TEXT, nullable=False),
Column("meta", JSON, nullable=False),
Column("create_time", DateTime, server_default=sqlalchemy.text("CURRENT_TIMESTAMP")),
Column(
@ -96,6 +96,7 @@ class TiDBVector(BaseVector):
collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name)
if redis_client.get(collection_exist_cache_key):
return
tidb_dist_func = self._get_distance_func()
with Session(self._engine) as session:
session.begin()
create_statement = sql_text(f"""
@ -104,14 +105,14 @@ class TiDBVector(BaseVector):
text TEXT NOT NULL,
meta JSON NOT NULL,
doc_id VARCHAR(64) AS (JSON_UNQUOTE(JSON_EXTRACT(meta, '$.doc_id'))) STORED,
KEY (doc_id),
vector VECTOR<FLOAT>({dimension}) NOT NULL COMMENT "hnsw(distance={self._distance_func})",
vector VECTOR<FLOAT>({dimension}) NOT NULL,
create_time DATETIME DEFAULT CURRENT_TIMESTAMP,
update_time DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
update_time DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
KEY (doc_id),
VECTOR INDEX idx_vector (({tidb_dist_func}(vector))) USING HNSW
);
""")
session.execute(create_statement)
# tidb vector not support 'CREATE/ADD INDEX' now
session.commit()
redis_client.set(collection_exist_cache_key, 1, ex=3600)
@ -194,23 +195,30 @@ class TiDBVector(BaseVector):
)
docs = []
if self._distance_func == "l2":
tidb_func = "Vec_l2_distance"
elif self._distance_func == "cosine":
tidb_func = "Vec_Cosine_distance"
else:
tidb_func = "Vec_Cosine_distance"
tidb_dist_func = self._get_distance_func()
with Session(self._engine) as session:
select_statement = sql_text(
f"""SELECT meta, text, distance FROM (
SELECT meta, text, {tidb_func}(vector, "{query_vector_str}") as distance
FROM {self._collection_name}
ORDER BY distance
LIMIT {top_k}
) t WHERE distance < {distance};"""
select_statement = sql_text(f"""
SELECT meta, text, distance
FROM (
SELECT
meta,
text,
{tidb_dist_func}(vector, :query_vector_str) AS distance
FROM {self._collection_name}
ORDER BY distance ASC
LIMIT :top_k
) t
WHERE distance <= :distance
""")
res = session.execute(
select_statement,
params={
"query_vector_str": query_vector_str,
"distance": distance,
"top_k": top_k,
},
)
res = session.execute(select_statement)
results = [(row[0], row[1], row[2]) for row in res]
for meta, text, distance in results:
metadata = json.loads(meta)
@ -227,6 +235,16 @@ class TiDBVector(BaseVector):
session.execute(sql_text(f"""DROP TABLE IF EXISTS {self._collection_name};"""))
session.commit()
def _get_distance_func(self) -> str:
match self._distance_func:
case "l2":
tidb_dist_func = "VEC_L2_DISTANCE"
case "cosine":
tidb_dist_func = "VEC_COSINE_DISTANCE"
case _:
tidb_dist_func = "VEC_COSINE_DISTANCE"
return tidb_dist_func
class TiDBVectorFactory(AbstractVectorFactory):
def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> TiDBVector:

View File

@ -105,10 +105,10 @@ class ApiTool(Tool):
needed_parameters = [parameter for parameter in (self.api_bundle.parameters or []) if parameter.required]
for parameter in needed_parameters:
if parameter.required and parameter.name not in parameters:
raise ToolParameterValidationError(f"Missing required parameter {parameter.name}")
if parameter.default is not None and parameter.name not in parameters:
parameters[parameter.name] = parameter.default
if parameter.default is not None:
parameters[parameter.name] = parameter.default
else:
raise ToolParameterValidationError(f"Missing required parameter {parameter.name}")
return headers

View File

@ -246,10 +246,11 @@ class ToolEngine:
+ "you do not need to create it, just tell the user to check it now."
)
elif response.type == ToolInvokeMessage.MessageType.JSON:
text = json.dumps(cast(ToolInvokeMessage.JsonMessage, response.message).json_object, ensure_ascii=False)
result += f"tool response: {text}."
result = json.dumps(
cast(ToolInvokeMessage.JsonMessage, response.message).json_object, ensure_ascii=False
)
else:
result += f"tool response: {response.message!r}."
result += str(response.message)
return result

View File

@ -160,8 +160,8 @@ class ToolManager:
"""
get the tool runtime
:param provider_type: the type of the provider
:param provider_name: the name of the provider
:param provider_type: the type of the provider
:param provider_name: the name of the provider
:param tool_name: the name of the tool
:return: the tool

View File

@ -3,11 +3,13 @@ from typing import Any
from pydantic import BaseModel, Field
from core.rag.datasource.retrieval_service import RetrievalService
from core.rag.entities.context_entities import DocumentContext
from core.rag.models.document import Document as RetrievalDocument
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from core.tools.utils.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool
from extensions.ext_database import db
from models.dataset import Dataset, Document, DocumentSegment
from models.dataset import Dataset
from models.dataset import Document as DatasetDocument
from services.external_knowledge_service import ExternalDatasetService
default_retrieval_model = {
@ -54,7 +56,6 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
if not dataset:
return ""
for hit_callback in self.hit_callbacks:
hit_callback.on_query(query, dataset.id)
if dataset.provider == "external":
@ -125,7 +126,6 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
)
else:
documents = []
for hit_callback in self.hit_callbacks:
hit_callback.on_tool_end(documents)
document_score_list = {}
@ -134,50 +134,46 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
if item.metadata is not None and item.metadata.get("score"):
document_score_list[item.metadata["doc_id"]] = item.metadata["score"]
document_context_list = []
index_node_ids = [document.metadata["doc_id"] for document in documents]
segments = DocumentSegment.query.filter(
DocumentSegment.dataset_id == self.dataset_id,
DocumentSegment.completed_at.isnot(None),
DocumentSegment.status == "completed",
DocumentSegment.enabled == True,
DocumentSegment.index_node_id.in_(index_node_ids),
).all()
if segments:
index_node_id_to_position = {id: position for position, id in enumerate(index_node_ids)}
sorted_segments = sorted(
segments, key=lambda segment: index_node_id_to_position.get(segment.index_node_id, float("inf"))
)
for segment in sorted_segments:
records = RetrievalService.format_retrieval_documents(documents)
if records:
for record in records:
segment = record.segment
if segment.answer:
document_context_list.append(
f"question:{segment.get_sign_content()} answer:{segment.answer}"
DocumentContext(
content=f"question:{segment.get_sign_content()} answer:{segment.answer}",
score=record.score,
)
)
else:
document_context_list.append(segment.get_sign_content())
document_context_list.append(
DocumentContext(
content=segment.get_sign_content(),
score=record.score,
)
)
retrieval_resource_list = []
if self.return_resource:
context_list = []
resource_number = 1
for segment in sorted_segments:
document_segment = Document.query.filter(
Document.id == segment.document_id,
Document.enabled == True,
Document.archived == False,
for record in records:
segment = record.segment
dataset = Dataset.query.filter_by(id=segment.dataset_id).first()
document = DatasetDocument.query.filter(
DatasetDocument.id == segment.document_id,
DatasetDocument.enabled == True,
DatasetDocument.archived == False,
).first()
if not document_segment:
continue
if dataset and document_segment:
if dataset and document:
source = {
"position": resource_number,
"dataset_id": dataset.id,
"dataset_name": dataset.name,
"document_id": document_segment.id,
"document_name": document_segment.name,
"data_source_type": document_segment.data_source_type,
"document_id": document.id, # type: ignore
"document_name": document.name, # type: ignore
"data_source_type": document.data_source_type, # type: ignore
"segment_id": segment.id,
"retriever_from": self.retriever_from,
"score": document_score_list.get(segment.index_node_id, None),
"score": record.score or 0.0,
}
if self.retriever_from == "dev":
source["hit_count"] = segment.hit_count
source["word_count"] = segment.word_count
@ -187,10 +183,19 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
source["content"] = f"question:{segment.content} \nanswer:{segment.answer}"
else:
source["content"] = segment.content
context_list.append(source)
resource_number += 1
retrieval_resource_list.append(source)
for hit_callback in self.hit_callbacks:
hit_callback.return_retriever_resource_info(context_list)
return str("\n".join(document_context_list))
if self.return_resource and retrieval_resource_list:
retrieval_resource_list = sorted(
retrieval_resource_list,
key=lambda x: x.get("score") or 0.0,
reverse=True,
)
for position, item in enumerate(retrieval_resource_list, start=1): # type: ignore
item["position"] = position # type: ignore
for hit_callback in self.hit_callbacks:
hit_callback.return_retriever_resource_info(retrieval_resource_list)
if document_context_list:
document_context_list = sorted(document_context_list, key=lambda x: x.score or 0.0, reverse=True)
return str("\n".join([document_context.content for document_context in document_context_list]))
return ""

View File

@ -207,7 +207,6 @@ class AgentLogEvent(BaseAgentEvent):
status: str = Field(..., description="status")
data: Mapping[str, Any] = Field(..., description="data")
metadata: Optional[Mapping[str, Any]] = Field(default=None, description="metadata")
node_id: str = Field(..., description="agent node id")
InNodeEvent = BaseNodeEvent | BaseParallelBranchEvent | BaseIterationEvent | BaseAgentEvent

View File

@ -18,7 +18,6 @@ from core.workflow.entities.node_entities import AgentNodeStrategyInit, NodeRunM
from core.workflow.entities.variable_pool import VariablePool, VariableValue
from core.workflow.graph_engine.condition_handlers.condition_manager import ConditionManager
from core.workflow.graph_engine.entities.event import (
BaseAgentEvent,
BaseIterationEvent,
GraphEngineEvent,
GraphRunFailedEvent,
@ -502,7 +501,7 @@ class GraphEngine:
break
yield event
if not isinstance(event, BaseAgentEvent) and event.parallel_id == parallel_id:
if event.parallel_id == parallel_id:
if isinstance(event, ParallelBranchRunSucceededEvent):
succeeded_count += 1
if succeeded_count == len(futures):
@ -666,7 +665,7 @@ class GraphEngine:
retries += 1
route_node_state.node_run_result = run_result
yield NodeRunRetryEvent(
id=node_instance.id,
id=str(uuid.uuid4()),
node_id=node_instance.node_id,
node_type=node_instance.node_type,
node_data=node_instance.node_data,
@ -681,7 +680,7 @@ class GraphEngine:
start_at=retry_start_at,
)
time.sleep(retry_interval)
continue
break
route_node_state.set_finished(run_result=run_result)
if run_result.status == WorkflowNodeExecutionStatus.FAILED:

View File

@ -8,12 +8,12 @@ from core.model_manager import ModelManager
from core.model_runtime.entities.model_entities import ModelType
from core.plugin.manager.exc import PluginDaemonClientSideError
from core.plugin.manager.plugin import PluginInstallationManager
from core.tools.entities.tool_entities import ToolParameter, ToolProviderType
from core.tools.entities.tool_entities import ToolProviderType
from core.tools.tool_manager import ToolManager
from core.workflow.entities.node_entities import NodeRunResult
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.enums import SystemVariableKey
from core.workflow.nodes.agent.entities import AgentNodeData, ParamsAutoGenerated
from core.workflow.nodes.agent.entities import AgentNodeData
from core.workflow.nodes.base.entities import BaseNodeData
from core.workflow.nodes.enums import NodeType
from core.workflow.nodes.event.event import RunCompletedEvent
@ -161,29 +161,11 @@ class AgentNode(ToolNode):
value = cast(list[dict[str, Any]], value)
tool_value = []
for tool in value:
provider_type = ToolProviderType(tool.get("type", ToolProviderType.BUILT_IN.value))
# handle the original settings
original_parameters = tool.get("parameters", {})
setting_params = tool.get("settings", {})
manual_input_params = []
# handle legacy data compatibility
if not all(isinstance(v, dict) for _, v in original_parameters.items()):
parameters = original_parameters
else:
params = {}
for key, param in original_parameters.items():
if param.get("auto", ParamsAutoGenerated.OPEN.value) == ParamsAutoGenerated.CLOSE.value:
params[key] = param.get("value", "")
manual_input_params.append(key)
else:
params[key] = None
settings = {k: v.get("value", None) for k, v in setting_params.items()}
parameters = {**params, **settings}
entity = AgentToolEntity(
provider_id=tool.get("provider_name", ""),
provider_type=provider_type,
provider_type=ToolProviderType.BUILT_IN,
tool_name=tool.get("tool_name", ""),
tool_parameters=parameters,
tool_parameters=tool.get("parameters", {}),
plugin_unique_identifier=tool.get("plugin_unique_identifier", None),
)
@ -196,27 +178,13 @@ class AgentNode(ToolNode):
tool_runtime.entity.description.llm = (
extra.get("descrption", "") or tool_runtime.entity.description.llm
)
for params in tool_runtime.entity.parameters:
params.form = (
ToolParameter.ToolParameterForm.FORM
if params.name in manual_input_params
else params.form
)
if tool_runtime.entity.parameters:
manual_input_value = {
key: value for key, value in parameters.items() if key in manual_input_params
tool_value.append(
{
**tool_runtime.entity.model_dump(mode="json"),
"runtime_parameters": tool_runtime.runtime.runtime_parameters,
}
runtime_parameters = {
**tool_runtime.runtime.runtime_parameters,
**manual_input_value,
}
tool_value.append(
{
**tool_runtime.entity.model_dump(mode="json"),
"runtime_parameters": runtime_parameters,
"provider_type": provider_type.value,
}
)
)
value = tool_value
if parameter.type == "model-selector":
value = cast(dict[str, Any], value)

View File

@ -1,4 +1,3 @@
from enum import Enum
from typing import Any, Literal, Union
from pydantic import BaseModel
@ -17,8 +16,3 @@ class AgentNodeData(BaseNodeData):
type: Literal["mixed", "variable", "constant"]
agent_parameters: dict[str, AgentInput]
class ParamsAutoGenerated(Enum):
CLOSE = 0
OPEN = 1

View File

@ -107,8 +107,10 @@ def _extract_text_by_mime_type(*, file_content: bytes, mime_type: str) -> str:
return _extract_text_from_plain_text(file_content)
case "application/pdf":
return _extract_text_from_pdf(file_content)
case "application/vnd.openxmlformats-officedocument.wordprocessingml.document" | "application/msword":
case "application/msword":
return _extract_text_from_doc(file_content)
case "application/vnd.openxmlformats-officedocument.wordprocessingml.document":
return _extract_text_from_docx(file_content)
case "text/csv":
return _extract_text_from_csv(file_content)
case "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" | "application/vnd.ms-excel":
@ -142,8 +144,10 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str)
return _extract_text_from_yaml(file_content)
case ".pdf":
return _extract_text_from_pdf(file_content)
case ".doc" | ".docx":
case ".doc":
return _extract_text_from_doc(file_content)
case ".docx":
return _extract_text_from_docx(file_content)
case ".csv":
return _extract_text_from_csv(file_content)
case ".xls" | ".xlsx":
@ -203,7 +207,33 @@ def _extract_text_from_pdf(file_content: bytes) -> str:
def _extract_text_from_doc(file_content: bytes) -> str:
"""
Extract text from a DOC/DOCX file.
Extract text from a DOC file.
"""
from unstructured.partition.api import partition_via_api
if not (dify_config.UNSTRUCTURED_API_URL and dify_config.UNSTRUCTURED_API_KEY):
raise TextExtractionError("UNSTRUCTURED_API_URL and UNSTRUCTURED_API_KEY must be set")
try:
with tempfile.NamedTemporaryFile(suffix=".doc", delete=False) as temp_file:
temp_file.write(file_content)
temp_file.flush()
with open(temp_file.name, "rb") as file:
elements = partition_via_api(
file=file,
metadata_filename=temp_file.name,
api_url=dify_config.UNSTRUCTURED_API_URL,
api_key=dify_config.UNSTRUCTURED_API_KEY,
)
os.unlink(temp_file.name)
return "\n".join([getattr(element, "text", "") for element in elements])
except Exception as e:
raise TextExtractionError(f"Failed to extract text from DOC: {str(e)}") from e
def _extract_text_from_docx(file_content: bytes) -> str:
"""
Extract text from a DOCX file.
For now support only paragraph and table add more if needed
"""
try:
@ -255,13 +285,13 @@ def _extract_text_from_doc(file_content: bytes) -> str:
text.append(markdown_table)
except Exception as e:
logger.warning(f"Failed to extract table from DOC/DOCX: {e}")
logger.warning(f"Failed to extract table from DOC: {e}")
continue
return "\n".join(text)
except Exception as e:
raise TextExtractionError(f"Failed to extract text from DOC/DOCX: {str(e)}") from e
raise TextExtractionError(f"Failed to extract text from DOCX: {str(e)}") from e
def _download_file_content(file: File) -> bytes:
@ -329,14 +359,29 @@ def _extract_text_from_excel(file_content: bytes) -> str:
def _extract_text_from_ppt(file_content: bytes) -> str:
from unstructured.partition.api import partition_via_api
from unstructured.partition.ppt import partition_ppt
try:
with io.BytesIO(file_content) as file:
elements = partition_ppt(file=file)
if dify_config.UNSTRUCTURED_API_URL and dify_config.UNSTRUCTURED_API_KEY:
with tempfile.NamedTemporaryFile(suffix=".ppt", delete=False) as temp_file:
temp_file.write(file_content)
temp_file.flush()
with open(temp_file.name, "rb") as file:
elements = partition_via_api(
file=file,
metadata_filename=temp_file.name,
api_url=dify_config.UNSTRUCTURED_API_URL,
api_key=dify_config.UNSTRUCTURED_API_KEY,
)
os.unlink(temp_file.name)
else:
with io.BytesIO(file_content) as file:
elements = partition_ppt(file=file)
return "\n".join([getattr(element, "text", "") for element in elements])
except Exception as e:
raise TextExtractionError(f"Failed to extract text from PPT: {str(e)}") from e
raise TextExtractionError(f"Failed to extract text from PPTX: {str(e)}") from e
def _extract_text_from_pptx(file_content: bytes) -> str:

View File

@ -64,7 +64,7 @@ class EndStreamGeneratorRouter:
node_type = node.get("data", {}).get("type")
if (
variable_selector.value_selector not in value_selectors
and (node_type in (NodeType.LLM.value, NodeType.AGENT.value))
and node_type == NodeType.LLM.value
and variable_selector.value_selector[1] == "text"
):
value_selectors.append(list(variable_selector.value_selector))

View File

@ -590,6 +590,7 @@ class IterationNode(BaseNode[IterationNodeData]):
with flask_app.app_context():
parallel_mode_run_id = uuid.uuid4().hex
graph_engine_copy = graph_engine.create_copy()
graph_engine_copy.graph_runtime_state.total_tokens = 0
variable_pool_copy = graph_engine_copy.graph_runtime_state.variable_pool
variable_pool_copy.add([self.node_id, "index"], index)
variable_pool_copy.add([self.node_id, "item"], item)

View File

@ -247,6 +247,24 @@ class LLMNode(BaseNode[LLMNodeData]):
def _handle_invoke_result(self, invoke_result: LLMResult | Generator) -> Generator[NodeEvent, None, None]:
if isinstance(invoke_result, LLMResult):
content = invoke_result.message.content
if content is None:
message_text = ""
elif isinstance(content, str):
message_text = content
elif isinstance(content, list):
# Assuming the list contains PromptMessageContent objects with a "data" attribute
message_text = "".join(
item.data if hasattr(item, "data") and isinstance(item.data, str) else str(item) for item in content
)
else:
message_text = str(content)
yield ModelInvokeCompletedEvent(
text=message_text,
usage=invoke_result.usage,
finish_reason=None,
)
return
model = None

View File

@ -44,13 +44,11 @@ class QuestionClassifierNode(LLMNode):
variable_pool = self.graph_runtime_state.variable_pool
# extract variables
variable = variable_pool.get(
node_data.query_variable_selector) if node_data.query_variable_selector else None
variable = variable_pool.get(node_data.query_variable_selector) if node_data.query_variable_selector else None
query = variable.value if variable else None
variables = {"query": query}
# fetch model config
model_instance, model_config = self._fetch_model_config(
node_data.model)
model_instance, model_config = self._fetch_model_config(node_data.model)
# fetch memory
memory = self._fetch_memory(
node_data_memory=node_data.memory,
@ -58,8 +56,7 @@ class QuestionClassifierNode(LLMNode):
)
# fetch instruction
node_data.instruction = node_data.instruction or ""
node_data.instruction = variable_pool.convert_template(
node_data.instruction).text
node_data.instruction = variable_pool.convert_template(node_data.instruction).text
files = (
self._fetch_files(
@ -181,15 +178,12 @@ class QuestionClassifierNode(LLMNode):
variable_mapping = {"query": node_data.query_variable_selector}
variable_selectors = []
if node_data.instruction:
variable_template_parser = VariableTemplateParser(
template=node_data.instruction)
variable_selectors.extend(
variable_template_parser.extract_variable_selectors())
variable_template_parser = VariableTemplateParser(template=node_data.instruction)
variable_selectors.extend(variable_template_parser.extract_variable_selectors())
for variable_selector in variable_selectors:
variable_mapping[variable_selector.variable] = variable_selector.value_selector
variable_mapping = {node_id + "." + key: value for key,
value in variable_mapping.items()}
variable_mapping = {node_id + "." + key: value for key, value in variable_mapping.items()}
return variable_mapping
@ -210,8 +204,7 @@ class QuestionClassifierNode(LLMNode):
context: Optional[str],
) -> int:
prompt_transform = AdvancedPromptTransform(with_variable_tmpl=True)
prompt_template = self._get_prompt_template(
node_data, query, None, 2000)
prompt_template = self._get_prompt_template(node_data, query, None, 2000)
prompt_messages = prompt_transform.get_prompt(
prompt_template=prompt_template,
inputs={},
@ -224,15 +217,13 @@ class QuestionClassifierNode(LLMNode):
)
rest_tokens = 2000
model_context_tokens = model_config.model_schema.model_properties.get(
ModelPropertyKey.CONTEXT_SIZE)
model_context_tokens = model_config.model_schema.model_properties.get(ModelPropertyKey.CONTEXT_SIZE)
if model_context_tokens:
model_instance = ModelInstance(
provider_model_bundle=model_config.provider_model_bundle, model=model_config.model
)
curr_message_tokens = model_instance.get_llm_num_tokens(
prompt_messages)
curr_message_tokens = model_instance.get_llm_num_tokens(prompt_messages)
max_tokens = 0
for parameter_rule in model_config.model_schema.parameter_rules:
@ -273,8 +264,7 @@ class QuestionClassifierNode(LLMNode):
prompt_messages: list[LLMNodeChatModelMessage] = []
if model_mode == ModelMode.CHAT:
system_prompt_messages = LLMNodeChatModelMessage(
role=PromptMessageRole.SYSTEM, text=QUESTION_CLASSIFIER_SYSTEM_PROMPT.format(
histories=memory_str)
role=PromptMessageRole.SYSTEM, text=QUESTION_CLASSIFIER_SYSTEM_PROMPT.format(histories=memory_str)
)
prompt_messages.append(system_prompt_messages)
user_prompt_message_1 = LLMNodeChatModelMessage(
@ -315,5 +305,4 @@ class QuestionClassifierNode(LLMNode):
)
else:
raise InvalidModelTypeError(
f"Model mode {model_mode} not support.")
raise InvalidModelTypeError(f"Model mode {model_mode} not support.")

View File

@ -338,7 +338,6 @@ class ToolNode(BaseNode[ToolNodeData]):
data=message.message.data,
label=message.message.label,
metadata=message.message.metadata,
node_id=self.node_id,
)
# check if the agent log is already in the list

View File

@ -1,6 +1,3 @@
from collections.abc import Mapping, Sequence
from typing import Any
from core.workflow.entities.node_entities import NodeRunResult
from core.workflow.nodes.base import BaseNode
from core.workflow.nodes.enums import NodeType
@ -36,16 +33,3 @@ class VariableAggregatorNode(BaseNode[VariableAssignerNodeData]):
break
return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs=outputs, inputs=inputs)
@classmethod
def _extract_variable_selector_to_variable_mapping(
cls, *, graph_config: Mapping[str, Any], node_id: str, node_data: VariableAssignerNodeData
) -> Mapping[str, Sequence[str]]:
"""
Extract variable selector to variable mapping
:param graph_config: graph config
:param node_id: node id
:param node_data: node data
:return:
"""
return {}

View File

@ -64,6 +64,10 @@ class ConditionProcessor:
expected=expected_value,
)
group_results.append(result)
# Implemented short-circuit evaluation for logical conditions
if (operator == "and" and not result) or (operator == "or" and result):
final_result = result
return input_conditions, group_results, final_result
final_result = all(group_results) if operator == "and" else any(group_results)
return input_conditions, group_results, final_result

View File

@ -6,4 +6,4 @@ def init_app(app: DifyApp):
if dify_config.RESPECT_XFORWARD_HEADERS_ENABLED:
from werkzeug.middleware.proxy_fix import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app) # type: ignore
app.wsgi_app = ProxyFix(app.wsgi_app, x_port=1) # type: ignore

View File

@ -95,10 +95,6 @@ model_config_fields = {
"agent_mode": fields.Raw,
}
simple_configs_fields = {
"prompt_template": fields.String,
}
simple_model_config_fields = {
"model": fields.Raw(attribute="model_dict"),
"pre_prompt": fields.String,
@ -212,14 +208,3 @@ conversation_infinite_scroll_pagination_fields = {
"has_more": fields.Boolean,
"data": fields.List(fields.Nested(simple_conversation_fields)),
}
conversation_with_model_config_fields = {
**simple_conversation_fields,
"model_config": fields.Raw,
}
conversation_with_model_config_infinite_scroll_pagination_fields = {
"limit": fields.Integer,
"has_more": fields.Boolean,
"data": fields.List(fields.Nested(conversation_with_model_config_fields)),
}

View File

@ -7,27 +7,6 @@ from .raws import FilesContainedField
feedback_fields = {"rating": fields.String}
retriever_resource_fields = {
"id": fields.String,
"message_id": fields.String,
"position": fields.Integer,
"dataset_id": fields.String,
"dataset_name": fields.String,
"document_id": fields.String,
"document_name": fields.String,
"data_source_type": fields.String,
"segment_id": fields.String,
"score": fields.Float,
"hit_count": fields.Integer,
"word_count": fields.Integer,
"segment_position": fields.Integer,
"index_node_hash": fields.String,
"content": fields.String,
"created_at": TimestampField,
}
feedback_fields = {"rating": fields.String}
agent_thought_fields = {
"id": fields.String,
"chain_id": fields.String,

View File

@ -748,7 +748,7 @@ class DatasetKeywordTable(db.Model): # type: ignore[name-defined]
if keyword_table_text:
return json.loads(keyword_table_text.decode("utf-8"), cls=SetDecoder)
return None
except Exception:
except Exception as e:
logging.exception(f"Failed to load keyword table from file: {file_key}")
return None

View File

@ -1289,7 +1289,7 @@ class MessageAnnotation(Base):
return account
class AppAnnotationHitHistory(Base):
class AppAnnotationHitHistory(db.Model): # type: ignore[name-defined]
__tablename__ = "app_annotation_hit_histories"
__table_args__ = (
db.PrimaryKeyConstraint("id", name="app_annotation_hit_histories_pkey"),

View File

@ -1,27 +0,0 @@
from datetime import datetime
from sqlalchemy.orm import Mapped
from extensions.ext_database import db
from models.base import Base
from .types import StringUUID
class StagingAccountWhitelist(Base):
__tablename__ = "staging_account_whitelists"
__table_args__ = (
db.PrimaryKeyConstraint("id", name="staging_account_whitelist_pkey"),
db.Index("account_email_idx", "email"),
)
id: Mapped[str] = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
email: Mapped[str] = db.Column(db.String(255), nullable=False)
disabled: Mapped[bool] = db.Column(db.Boolean, nullable=False, server_default=db.text("false"))
created_at: Mapped[datetime] = db.Column(
db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")
)
updated_at: Mapped[datetime] = db.Column(
db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")
)

530
api/poetry.lock generated
View File

@ -644,15 +644,15 @@ files = [
[[package]]
name = "bce-python-sdk"
version = "0.9.25"
version = "0.9.29"
description = "BCE SDK for python"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,<4,>=2.7"
groups = ["storage"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "bce_python_sdk-0.9.25-py3-none-any.whl", hash = "sha256:cd1ab4c887e163adba6bfb3cd40465a365e5f4255705a015b0cdbe768e649877"},
{file = "bce_python_sdk-0.9.25.tar.gz", hash = "sha256:93a0623fbb1bf3a58b4f2d7bdbd799a3b342a538f0c72950c77168e431470e86"},
{file = "bce_python_sdk-0.9.29-py3-none-any.whl", hash = "sha256:6518dc0ada422acd1841eeabcb7f89cfc07e3bb1a4be3c75945cab953907b555"},
{file = "bce_python_sdk-0.9.29.tar.gz", hash = "sha256:326fbd50d57bf6d2fc21d58f589b069e0e84fc0a8733be9575c109293ab08cc4"},
]
[package.dependencies]
@ -769,15 +769,15 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.36.16"
version = "1.36.21"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "botocore-1.36.16-py3-none-any.whl", hash = "sha256:aca0348ccd730332082489b6817fdf89e1526049adcf6e9c8c11c96dd9f42c03"},
{file = "botocore-1.36.16.tar.gz", hash = "sha256:10c6aa386ba1a9a0faef6bb5dbfc58fc2563a3c6b95352e86a583cd5f14b11f3"},
{file = "botocore-1.36.21-py3-none-any.whl", hash = "sha256:24a7052e792639dc2726001bd474cd0aaa959c1e18ddd92c17f3adc6efa1b132"},
{file = "botocore-1.36.21.tar.gz", hash = "sha256:da746240e2ad64fd4997f7f3664a0a8e303d18075fc1d473727cb6375080ea16"},
]
[package.dependencies]
@ -860,10 +860,6 @@ files = [
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
@ -876,14 +872,8 @@ files = [
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
@ -894,24 +884,8 @@ files = [
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
{file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
{file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
{file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
{file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
{file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
@ -921,10 +895,6 @@ files = [
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
@ -936,10 +906,6 @@ files = [
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
@ -952,10 +918,6 @@ files = [
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
@ -968,10 +930,6 @@ files = [
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
@ -1820,40 +1778,44 @@ files = [
[[package]]
name = "cryptography"
version = "44.0.0"
version = "44.0.1"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = "!=3.9.0,!=3.9.1,>=3.7"
groups = ["main", "storage", "vdb"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123"},
{file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092"},
{file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f"},
{file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"},
{file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"},
{file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"},
{file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"},
{file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"},
{file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"},
{file = "cryptography-44.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd"},
{file = "cryptography-44.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591"},
{file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7"},
{file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc"},
{file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"},
{file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"},
{file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"},
{file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"},
{file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"},
{file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"},
{file = "cryptography-44.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede"},
{file = "cryptography-44.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731"},
{file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4"},
{file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756"},
{file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c"},
{file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa"},
{file = "cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c"},
{file = "cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02"},
{file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"},
{file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"},
{file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2"},
{file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911"},
{file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69"},
{file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026"},
{file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd"},
{file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0"},
{file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf"},
{file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864"},
{file = "cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a"},
{file = "cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00"},
{file = "cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008"},
{file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862"},
{file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3"},
{file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7"},
{file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a"},
{file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c"},
{file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62"},
{file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41"},
{file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b"},
{file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7"},
{file = "cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9"},
{file = "cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f"},
{file = "cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183"},
{file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12"},
{file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83"},
{file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420"},
{file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4"},
{file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"},
{file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"},
]
[package.dependencies]
@ -1866,7 +1828,7 @@ nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"]
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
sdist = ["build (>=1.0.0)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.0)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test-randomorder = ["pytest-randomly"]
[[package]]
@ -2316,15 +2278,15 @@ sqlalchemy = ">=2.0.16"
[[package]]
name = "flatbuffers"
version = "25.1.24"
version = "25.2.10"
description = "The FlatBuffers serialization format for Python"
optional = false
python-versions = "*"
groups = ["vdb"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "flatbuffers-25.1.24-py2.py3-none-any.whl", hash = "sha256:1abfebaf4083117225d0723087ea909896a34e3fec933beedb490d595ba24145"},
{file = "flatbuffers-25.1.24.tar.gz", hash = "sha256:e0f7b7d806c0abdf166275492663130af40c11f89445045fbef0aa3c9a8643ad"},
{file = "flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051"},
{file = "flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e"},
]
[[package]]
@ -4166,15 +4128,15 @@ rapidfuzz = ">=3.9.0,<4.0.0"
[[package]]
name = "litellm"
version = "1.60.8"
version = "1.61.6"
description = "Library to easily interface with LLM API providers"
optional = false
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
groups = ["main"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "litellm-1.60.8-py3-none-any.whl", hash = "sha256:260bdcc9749c769f1a84dc927abe7c91f6294a97da05abc6b513c5dd2dcf17a1"},
{file = "litellm-1.60.8.tar.gz", hash = "sha256:4a0aca9bd226d727ca4a41aaf8722f825fc10cf33f37a177a3cceb4ee2c442d8"},
{file = "litellm-1.61.6-py3-none-any.whl", hash = "sha256:eef4c4a84a2c93de4c6d5a05a785f9b0cc61f63bafb3b3dc83d977db649e1b13"},
{file = "litellm-1.61.6.tar.gz", hash = "sha256:2c613823f86ce2aa7956e2458857ab6aa62258dc7da9816bfdac90735be270be"},
]
[package.dependencies]
@ -4228,159 +4190,159 @@ files = [
[[package]]
name = "lxml"
version = "5.3.0"
version = "5.3.1"
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
optional = false
python-versions = ">=3.6"
groups = ["main"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"},
{file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"},
{file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"},
{file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"},
{file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"},
{file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"},
{file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"},
{file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"},
{file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"},
{file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"},
{file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"},
{file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"},
{file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"},
{file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"},
{file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"},
{file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"},
{file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"},
{file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"},
{file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"},
{file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"},
{file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"},
{file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"},
{file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"},
{file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"},
{file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"},
{file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"},
{file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"},
{file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"},
{file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"},
{file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"},
{file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"},
{file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"},
{file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"},
{file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"},
{file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"},
{file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"},
{file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"},
{file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"},
{file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"},
{file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"},
{file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"},
{file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"},
{file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"},
{file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"},
{file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"},
{file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"},
{file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"},
{file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"},
{file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"},
{file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"},
{file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"},
{file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"},
{file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"},
{file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"},
{file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"},
{file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"},
{file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"},
{file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"},
{file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"},
{file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"},
{file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"},
{file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"},
{file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"},
{file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"},
{file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"},
{file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"},
{file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"},
{file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"},
{file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"},
{file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"},
{file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"},
{file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"},
{file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"},
{file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"},
{file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"},
{file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"},
{file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"},
{file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"},
{file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"},
{file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"},
{file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"},
{file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"},
{file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"},
{file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"},
{file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"},
{file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"},
{file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"},
{file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"},
{file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"},
{file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"},
{file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"},
{file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"},
{file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"},
{file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"},
{file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"},
{file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"},
{file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"},
{file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"},
{file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"},
{file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"},
{file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"},
{file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"},
{file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"},
{file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"},
{file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"},
{file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"},
{file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"},
{file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"},
{file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"},
{file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"},
{file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"},
{file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"},
{file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"},
{file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"},
{file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"},
{file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"},
{file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"},
{file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"},
{file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"},
{file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"},
{file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"},
{file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"},
{file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"},
{file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"},
{file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"},
{file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"},
{file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"},
{file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"},
{file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"},
{file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"},
{file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"},
{file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"},
{file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"},
{file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"},
{file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"},
{file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"},
{file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"},
{file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"},
{file = "lxml-5.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b"},
{file = "lxml-5.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b"},
{file = "lxml-5.3.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:528f3a0498a8edc69af0559bdcf8a9f5a8bf7c00051a6ef3141fdcf27017bbf5"},
{file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db4743e30d6f5f92b6d2b7c86b3ad250e0bad8dee4b7ad8a0c44bfb276af89a3"},
{file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17b5d7f8acf809465086d498d62a981fa6a56d2718135bb0e4aa48c502055f5c"},
{file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:928e75a7200a4c09e6efc7482a1337919cc61fe1ba289f297827a5b76d8969c2"},
{file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a997b784a639e05b9d4053ef3b20c7e447ea80814a762f25b8ed5a89d261eac"},
{file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7b82e67c5feb682dbb559c3e6b78355f234943053af61606af126df2183b9ef9"},
{file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:f1de541a9893cf8a1b1db9bf0bf670a2decab42e3e82233d36a74eda7822b4c9"},
{file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:de1fc314c3ad6bc2f6bd5b5a5b9357b8c6896333d27fdbb7049aea8bd5af2d79"},
{file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7c0536bd9178f754b277a3e53f90f9c9454a3bd108b1531ffff720e082d824f2"},
{file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68018c4c67d7e89951a91fbd371e2e34cd8cfc71f0bb43b5332db38497025d51"},
{file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa826340a609d0c954ba52fd831f0fba2a4165659ab0ee1a15e4aac21f302406"},
{file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:796520afa499732191e39fc95b56a3b07f95256f2d22b1c26e217fb69a9db5b5"},
{file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3effe081b3135237da6e4c4530ff2a868d3f80be0bda027e118a5971285d42d0"},
{file = "lxml-5.3.1-cp310-cp310-win32.whl", hash = "sha256:a22f66270bd6d0804b02cd49dae2b33d4341015545d17f8426f2c4e22f557a23"},
{file = "lxml-5.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:0bcfadea3cdc68e678d2b20cb16a16716887dd00a881e16f7d806c2138b8ff0c"},
{file = "lxml-5.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e220f7b3e8656ab063d2eb0cd536fafef396829cafe04cb314e734f87649058f"},
{file = "lxml-5.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f2cfae0688fd01f7056a17367e3b84f37c545fb447d7282cf2c242b16262607"},
{file = "lxml-5.3.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67d2f8ad9dcc3a9e826bdc7802ed541a44e124c29b7d95a679eeb58c1c14ade8"},
{file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db0c742aad702fd5d0c6611a73f9602f20aec2007c102630c06d7633d9c8f09a"},
{file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:198bb4b4dd888e8390afa4f170d4fa28467a7eaf857f1952589f16cfbb67af27"},
{file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2a3e412ce1849be34b45922bfef03df32d1410a06d1cdeb793a343c2f1fd666"},
{file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b8969dbc8d09d9cd2ae06362c3bad27d03f433252601ef658a49bd9f2b22d79"},
{file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5be8f5e4044146a69c96077c7e08f0709c13a314aa5315981185c1f00235fe65"},
{file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:133f3493253a00db2c870d3740bc458ebb7d937bd0a6a4f9328373e0db305709"},
{file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:52d82b0d436edd6a1d22d94a344b9a58abd6c68c357ed44f22d4ba8179b37629"},
{file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b6f92e35e2658a5ed51c6634ceb5ddae32053182851d8cad2a5bc102a359b33"},
{file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:203b1d3eaebd34277be06a3eb880050f18a4e4d60861efba4fb946e31071a295"},
{file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:155e1a5693cf4b55af652f5c0f78ef36596c7f680ff3ec6eb4d7d85367259b2c"},
{file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22ec2b3c191f43ed21f9545e9df94c37c6b49a5af0a874008ddc9132d49a2d9c"},
{file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7eda194dd46e40ec745bf76795a7cccb02a6a41f445ad49d3cf66518b0bd9cff"},
{file = "lxml-5.3.1-cp311-cp311-win32.whl", hash = "sha256:fb7c61d4be18e930f75948705e9718618862e6fc2ed0d7159b2262be73f167a2"},
{file = "lxml-5.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c809eef167bf4a57af4b03007004896f5c60bd38dc3852fcd97a26eae3d4c9e6"},
{file = "lxml-5.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e69add9b6b7b08c60d7ff0152c7c9a6c45b4a71a919be5abde6f98f1ea16421c"},
{file = "lxml-5.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4e52e1b148867b01c05e21837586ee307a01e793b94072d7c7b91d2c2da02ffe"},
{file = "lxml-5.3.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4b382e0e636ed54cd278791d93fe2c4f370772743f02bcbe431a160089025c9"},
{file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2e49dc23a10a1296b04ca9db200c44d3eb32c8d8ec532e8c1fd24792276522a"},
{file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4399b4226c4785575fb20998dc571bc48125dc92c367ce2602d0d70e0c455eb0"},
{file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5412500e0dc5481b1ee9cf6b38bb3b473f6e411eb62b83dc9b62699c3b7b79f7"},
{file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c93ed3c998ea8472be98fb55aed65b5198740bfceaec07b2eba551e55b7b9ae"},
{file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:63d57fc94eb0bbb4735e45517afc21ef262991d8758a8f2f05dd6e4174944519"},
{file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:b450d7cabcd49aa7ab46a3c6aa3ac7e1593600a1a0605ba536ec0f1b99a04322"},
{file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:4df0ec814b50275ad6a99bc82a38b59f90e10e47714ac9871e1b223895825468"},
{file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d184f85ad2bb1f261eac55cddfcf62a70dee89982c978e92b9a74a1bfef2e367"},
{file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b725e70d15906d24615201e650d5b0388b08a5187a55f119f25874d0103f90dd"},
{file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a31fa7536ec1fb7155a0cd3a4e3d956c835ad0a43e3610ca32384d01f079ea1c"},
{file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3c3c8b55c7fc7b7e8877b9366568cc73d68b82da7fe33d8b98527b73857a225f"},
{file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d61ec60945d694df806a9aec88e8f29a27293c6e424f8ff91c80416e3c617645"},
{file = "lxml-5.3.1-cp312-cp312-win32.whl", hash = "sha256:f4eac0584cdc3285ef2e74eee1513a6001681fd9753b259e8159421ed28a72e5"},
{file = "lxml-5.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:29bfc8d3d88e56ea0a27e7c4897b642706840247f59f4377d81be8f32aa0cfbf"},
{file = "lxml-5.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c093c7088b40d8266f57ed71d93112bd64c6724d31f0794c1e52cc4857c28e0e"},
{file = "lxml-5.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0884e3f22d87c30694e625b1e62e6f30d39782c806287450d9dc2fdf07692fd"},
{file = "lxml-5.3.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1637fa31ec682cd5760092adfabe86d9b718a75d43e65e211d5931809bc111e7"},
{file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a364e8e944d92dcbf33b6b494d4e0fb3499dcc3bd9485beb701aa4b4201fa414"},
{file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:779e851fd0e19795ccc8a9bb4d705d6baa0ef475329fe44a13cf1e962f18ff1e"},
{file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4393600915c308e546dc7003d74371744234e8444a28622d76fe19b98fa59d1"},
{file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:673b9d8e780f455091200bba8534d5f4f465944cbdd61f31dc832d70e29064a5"},
{file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:2e4a570f6a99e96c457f7bec5ad459c9c420ee80b99eb04cbfcfe3fc18ec6423"},
{file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:71f31eda4e370f46af42fc9f264fafa1b09f46ba07bdbee98f25689a04b81c20"},
{file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:42978a68d3825eaac55399eb37a4d52012a205c0c6262199b8b44fcc6fd686e8"},
{file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:8b1942b3e4ed9ed551ed3083a2e6e0772de1e5e3aca872d955e2e86385fb7ff9"},
{file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:85c4f11be9cf08917ac2a5a8b6e1ef63b2f8e3799cec194417e76826e5f1de9c"},
{file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:231cf4d140b22a923b1d0a0a4e0b4f972e5893efcdec188934cc65888fd0227b"},
{file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5865b270b420eda7b68928d70bb517ccbe045e53b1a428129bb44372bf3d7dd5"},
{file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dbf7bebc2275016cddf3c997bf8a0f7044160714c64a9b83975670a04e6d2252"},
{file = "lxml-5.3.1-cp313-cp313-win32.whl", hash = "sha256:d0751528b97d2b19a388b302be2a0ee05817097bab46ff0ed76feeec24951f78"},
{file = "lxml-5.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:91fb6a43d72b4f8863d21f347a9163eecbf36e76e2f51068d59cd004c506f332"},
{file = "lxml-5.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:016b96c58e9a4528219bb563acf1aaaa8bc5452e7651004894a973f03b84ba81"},
{file = "lxml-5.3.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82a4bb10b0beef1434fb23a09f001ab5ca87895596b4581fd53f1e5145a8934a"},
{file = "lxml-5.3.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d68eeef7b4d08a25e51897dac29bcb62aba830e9ac6c4e3297ee7c6a0cf6439"},
{file = "lxml-5.3.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:f12582b8d3b4c6be1d298c49cb7ae64a3a73efaf4c2ab4e37db182e3545815ac"},
{file = "lxml-5.3.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2df7ed5edeb6bd5590914cd61df76eb6cce9d590ed04ec7c183cf5509f73530d"},
{file = "lxml-5.3.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:585c4dc429deebc4307187d2b71ebe914843185ae16a4d582ee030e6cfbb4d8a"},
{file = "lxml-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:06a20d607a86fccab2fc15a77aa445f2bdef7b49ec0520a842c5c5afd8381576"},
{file = "lxml-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:057e30d0012439bc54ca427a83d458752ccda725c1c161cc283db07bcad43cf9"},
{file = "lxml-5.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4867361c049761a56bd21de507cab2c2a608c55102311d142ade7dab67b34f32"},
{file = "lxml-5.3.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dddf0fb832486cc1ea71d189cb92eb887826e8deebe128884e15020bb6e3f61"},
{file = "lxml-5.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bcc211542f7af6f2dfb705f5f8b74e865592778e6cafdfd19c792c244ccce19"},
{file = "lxml-5.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaca5a812f050ab55426c32177091130b1e49329b3f002a32934cd0245571307"},
{file = "lxml-5.3.1-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:236610b77589faf462337b3305a1be91756c8abc5a45ff7ca8f245a71c5dab70"},
{file = "lxml-5.3.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:aed57b541b589fa05ac248f4cb1c46cbb432ab82cbd467d1c4f6a2bdc18aecf9"},
{file = "lxml-5.3.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:75fa3d6946d317ffc7016a6fcc44f42db6d514b7fdb8b4b28cbe058303cb6e53"},
{file = "lxml-5.3.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:96eef5b9f336f623ffc555ab47a775495e7e8846dde88de5f941e2906453a1ce"},
{file = "lxml-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:ef45f31aec9be01379fc6c10f1d9c677f032f2bac9383c827d44f620e8a88407"},
{file = "lxml-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0611da6b07dd3720f492db1b463a4d1175b096b49438761cc9f35f0d9eaaef5"},
{file = "lxml-5.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2aca14c235c7a08558fe0a4786a1a05873a01e86b474dfa8f6df49101853a4e"},
{file = "lxml-5.3.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae82fce1d964f065c32c9517309f0c7be588772352d2f40b1574a214bd6e6098"},
{file = "lxml-5.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7aae7a3d63b935babfdc6864b31196afd5145878ddd22f5200729006366bc4d5"},
{file = "lxml-5.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8e0d177b1fe251c3b1b914ab64135475c5273c8cfd2857964b2e3bb0fe196a7"},
{file = "lxml-5.3.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:6c4dd3bfd0c82400060896717dd261137398edb7e524527438c54a8c34f736bf"},
{file = "lxml-5.3.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f1208c1c67ec9e151d78aa3435aa9b08a488b53d9cfac9b699f15255a3461ef2"},
{file = "lxml-5.3.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c6aacf00d05b38a5069826e50ae72751cb5bc27bdc4d5746203988e429b385bb"},
{file = "lxml-5.3.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5881aaa4bf3a2d086c5f20371d3a5856199a0d8ac72dd8d0dbd7a2ecfc26ab73"},
{file = "lxml-5.3.1-cp38-cp38-win32.whl", hash = "sha256:45fbb70ccbc8683f2fb58bea89498a7274af1d9ec7995e9f4af5604e028233fc"},
{file = "lxml-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:7512b4d0fc5339d5abbb14d1843f70499cab90d0b864f790e73f780f041615d7"},
{file = "lxml-5.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5885bc586f1edb48e5d68e7a4b4757b5feb2a496b64f462b4d65950f5af3364f"},
{file = "lxml-5.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1b92fe86e04f680b848fff594a908edfa72b31bfc3499ef7433790c11d4c8cd8"},
{file = "lxml-5.3.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a091026c3bf7519ab1e64655a3f52a59ad4a4e019a6f830c24d6430695b1cf6a"},
{file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ffb141361108e864ab5f1813f66e4e1164181227f9b1f105b042729b6c15125"},
{file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3715cdf0dd31b836433af9ee9197af10e3df41d273c19bb249230043667a5dfd"},
{file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88b72eb7222d918c967202024812c2bfb4048deeb69ca328363fb8e15254c549"},
{file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa59974880ab5ad8ef3afaa26f9bda148c5f39e06b11a8ada4660ecc9fb2feb3"},
{file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3bb8149840daf2c3f97cebf00e4ed4a65a0baff888bf2605a8d0135ff5cf764e"},
{file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:0d6b2fa86becfa81f0a0271ccb9eb127ad45fb597733a77b92e8a35e53414914"},
{file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:136bf638d92848a939fd8f0e06fcf92d9f2e4b57969d94faae27c55f3d85c05b"},
{file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:89934f9f791566e54c1d92cdc8f8fd0009447a5ecdb1ec6b810d5f8c4955f6be"},
{file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8ade0363f776f87f982572c2860cc43c65ace208db49c76df0a21dde4ddd16e"},
{file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bfbbab9316330cf81656fed435311386610f78b6c93cc5db4bebbce8dd146675"},
{file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:172d65f7c72a35a6879217bcdb4bb11bc88d55fb4879e7569f55616062d387c2"},
{file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e3c623923967f3e5961d272718655946e5322b8d058e094764180cdee7bab1af"},
{file = "lxml-5.3.1-cp39-cp39-win32.whl", hash = "sha256:ce0930a963ff593e8bb6fda49a503911accc67dee7e5445eec972668e672a0f0"},
{file = "lxml-5.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:f7b64fcd670bca8800bc10ced36620c6bbb321e7bc1214b9c0c0df269c1dddc2"},
{file = "lxml-5.3.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:afa578b6524ff85fb365f454cf61683771d0170470c48ad9d170c48075f86725"},
{file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f5e80adf0aafc7b5454f2c1cb0cde920c9b1f2cbd0485f07cc1d0497c35c5d"},
{file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dd0b80ac2d8f13ffc906123a6f20b459cb50a99222d0da492360512f3e50f84"},
{file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:422c179022ecdedbe58b0e242607198580804253da220e9454ffe848daa1cfd2"},
{file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:524ccfded8989a6595dbdda80d779fb977dbc9a7bc458864fc9a0c2fc15dc877"},
{file = "lxml-5.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:48fd46bf7155def2e15287c6f2b133a2f78e2d22cdf55647269977b873c65499"},
{file = "lxml-5.3.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:05123fad495a429f123307ac6d8fd6f977b71e9a0b6d9aeeb8f80c017cb17131"},
{file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a243132767150a44e6a93cd1dde41010036e1cbc63cc3e9fe1712b277d926ce3"},
{file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c92ea6d9dd84a750b2bae72ff5e8cf5fdd13e58dda79c33e057862c29a8d5b50"},
{file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2f1be45d4c15f237209bbf123a0e05b5d630c8717c42f59f31ea9eae2ad89394"},
{file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a83d3adea1e0ee36dac34627f78ddd7f093bb9cfc0a8e97f1572a949b695cb98"},
{file = "lxml-5.3.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3edbb9c9130bac05d8c3fe150c51c337a471cc7fdb6d2a0a7d3a88e88a829314"},
{file = "lxml-5.3.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2f23cf50eccb3255b6e913188291af0150d89dab44137a69e14e4dcb7be981f1"},
{file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df7e5edac4778127f2bf452e0721a58a1cfa4d1d9eac63bdd650535eb8543615"},
{file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:094b28ed8a8a072b9e9e2113a81fda668d2053f2ca9f2d202c2c8c7c2d6516b1"},
{file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:514fe78fc4b87e7a7601c92492210b20a1b0c6ab20e71e81307d9c2e377c64de"},
{file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8fffc08de02071c37865a155e5ea5fce0282e1546fd5bde7f6149fcaa32558ac"},
{file = "lxml-5.3.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4b0d5cdba1b655d5b18042ac9c9ff50bda33568eb80feaaca4fc237b9c4fbfde"},
{file = "lxml-5.3.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3031e4c16b59424e8d78522c69b062d301d951dc55ad8685736c3335a97fc270"},
{file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb659702a45136c743bc130760c6f137870d4df3a9e14386478b8a0511abcfca"},
{file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a11b16a33656ffc43c92a5343a28dc71eefe460bcc2a4923a96f292692709f6"},
{file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5ae125276f254b01daa73e2c103363d3e99e3e10505686ac7d9d2442dd4627a"},
{file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c76722b5ed4a31ba103e0dc77ab869222ec36efe1a614e42e9bcea88a36186fe"},
{file = "lxml-5.3.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:33e06717c00c788ab4e79bc4726ecc50c54b9bfb55355eae21473c145d83c2d2"},
{file = "lxml-5.3.1.tar.gz", hash = "sha256:106b7b5d2977b339f1e97efe2778e2ab20e99994cbb0ec5e55771ed0795920c8"},
]
[package.extras]
cssselect = ["cssselect (>=0.7)"]
html-clean = ["lxml-html-clean"]
html-clean = ["lxml_html_clean"]
html5 = ["html5lib"]
htmlsoup = ["BeautifulSoup4"]
source = ["Cython (>=3.0.11)"]
source = ["Cython (>=3.0.11,<3.1.0)"]
[[package]]
name = "lz4"
@ -6074,15 +6036,15 @@ pydantic = ">=1.9,<3.0"
[[package]]
name = "posthog"
version = "3.11.0"
version = "3.13.0"
description = "Integrate PostHog into any python application."
optional = false
python-versions = "*"
groups = ["vdb"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "posthog-3.11.0-py2.py3-none-any.whl", hash = "sha256:8cbd52c26bcdfbe65c4ea84a8090cfa2e046879d6b6d71da68e279a5b4aedb46"},
{file = "posthog-3.11.0.tar.gz", hash = "sha256:42a1f88cbcddeceaf6e8900a528db62d84fc56f6e5809f3d6dfb40e6f743091e"},
{file = "posthog-3.13.0-py2.py3-none-any.whl", hash = "sha256:0afd0132055a3da9c6b0ecf763e7f2ce2b66659ef16169883394d0835c30d501"},
{file = "posthog-3.13.0.tar.gz", hash = "sha256:54e9de232459846b1686a0cfb58acb02b7ccda379d837e1eb1c3af62c3775915"},
]
[package.dependencies]
@ -6093,10 +6055,10 @@ requests = ">=2.7,<3.0"
six = ">=1.5"
[package.extras]
dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"]
dev = ["black", "django-stubs", "flake8", "flake8-print", "isort", "lxml", "mypy", "mypy-baseline", "pre-commit", "pydantic", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six"]
langchain = ["langchain (>=0.2.0)"]
sentry = ["django", "sentry-sdk"]
test = ["anthropic", "coverage", "django", "flake8", "freezegun (==0.3.15)", "langchain-anthropic (>=0.2.0)", "langchain-community (>=0.2.0)", "langchain-openai (>=0.2.0)", "langgraph", "mock (>=2.0.0)", "openai", "pydantic", "pylint", "pytest", "pytest-asyncio", "pytest-timeout"]
test = ["anthropic", "coverage", "django", "flake8", "freezegun (==1.5.1)", "langchain-anthropic (>=0.2.0)", "langchain-community (>=0.2.0)", "langchain-openai (>=0.2.0)", "langgraph", "mock (>=2.0.0)", "openai", "pydantic", "pylint", "pytest", "pytest-asyncio", "pytest-timeout"]
[[package]]
name = "prompt-toolkit"
@ -6250,34 +6212,27 @@ files = [
[[package]]
name = "psutil"
version = "6.1.1"
description = "Cross-platform lib for process and system monitoring in Python."
version = "7.0.0"
description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
python-versions = ">=3.6"
groups = ["main"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"},
{file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"},
{file = "psutil-6.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8df0178ba8a9e5bc84fed9cfa61d54601b371fbec5c8eebad27575f1e105c0d4"},
{file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:1924e659d6c19c647e763e78670a05dbb7feaf44a0e9c94bf9e14dfc6ba50468"},
{file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:018aeae2af92d943fdf1da6b58665124897cfc94faa2ca92098838f83e1b1bca"},
{file = "psutil-6.1.1-cp27-none-win32.whl", hash = "sha256:6d4281f5bbca041e2292be3380ec56a9413b790579b8e593b1784499d0005dac"},
{file = "psutil-6.1.1-cp27-none-win_amd64.whl", hash = "sha256:c777eb75bb33c47377c9af68f30e9f11bc78e0f07fbf907be4a5d70b2fe5f030"},
{file = "psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8"},
{file = "psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377"},
{file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003"},
{file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160"},
{file = "psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3"},
{file = "psutil-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:384636b1a64b47814437d1173be1427a7c83681b17a450bfc309a1953e329603"},
{file = "psutil-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8be07491f6ebe1a693f17d4f11e69d0dc1811fa082736500f649f79df7735303"},
{file = "psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53"},
{file = "psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649"},
{file = "psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5"},
{file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"},
{file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"},
{file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"},
{file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"},
{file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"},
{file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"},
{file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"},
{file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"},
{file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"},
{file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"},
]
[package.extras]
dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"]
dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"]
test = ["pytest", "pytest-xdist", "setuptools"]
[[package]]
@ -6822,15 +6777,15 @@ diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pypdf"
version = "5.2.0"
version = "5.3.0"
description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pypdf-5.2.0-py3-none-any.whl", hash = "sha256:d107962ec45e65e3bd10c1d9242bdbbedaa38193c9e3a6617bd6d996e5747b19"},
{file = "pypdf-5.2.0.tar.gz", hash = "sha256:7c38e68420f038f2c4998fd9d6717b6db4f6cef1642e9cf384d519c9cf094663"},
{file = "pypdf-5.3.0-py3-none-any.whl", hash = "sha256:d7b6db242f5f8fdb4990ae11815c394b8e1b955feda0befcce862efd8559c181"},
{file = "pypdf-5.3.0.tar.gz", hash = "sha256:08393660dfea25b27ec6fe863fb2f2248e6270da5103fae49e9dea8178741951"},
]
[package.extras]
@ -7153,19 +7108,19 @@ cli = ["click (>=5.0)"]
[[package]]
name = "python-iso639"
version = "2025.1.28"
version = "2025.2.8"
description = "ISO 639 language codes, names, and other associated information"
optional = false
python-versions = ">=3.9"
groups = ["main"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "python_iso639-2025.1.28-py3-none-any.whl", hash = "sha256:66bcd88838727bc8ed1dfc9b5a354a27ed5c4bab8322473da81071fae265228b"},
{file = "python_iso639-2025.1.28.tar.gz", hash = "sha256:42b18bf52ad6ce5882c0e4acec9ae528310c7ef2966b776fc49d154c654580c5"},
{file = "python_iso639-2025.2.8-py3-none-any.whl", hash = "sha256:fc072f1f2007eae4a877778a73d7653c51020973e719b502e0d31e95a92c99d0"},
{file = "python_iso639-2025.2.8.tar.gz", hash = "sha256:94f27c0286fc81ff0e033d7b63d63a5967eef584b97f13289366bf178953f5d7"},
]
[package.extras]
dev = ["black (==24.10.0)", "build (==1.2.2)", "flake8 (==7.1.1)", "mypy (==1.14.1)", "pytest (==8.3.4)", "requests (==2.32.3)", "twine (==6.1.0)"]
dev = ["black (==25.1.0)", "build (==1.2.2)", "flake8 (==7.1.1)", "mypy (==1.15.0)", "pytest (==8.3.4)", "requests (==2.32.3)", "twine (==6.1.0)"]
[[package]]
name = "python-magic"
@ -7916,31 +7871,31 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
version = "0.9.5"
version = "0.9.6"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["lint"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "ruff-0.9.5-py3-none-linux_armv6l.whl", hash = "sha256:d466d2abc05f39018d53f681fa1c0ffe9570e6d73cde1b65d23bb557c846f442"},
{file = "ruff-0.9.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38840dbcef63948657fa7605ca363194d2fe8c26ce8f9ae12eee7f098c85ac8a"},
{file = "ruff-0.9.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d56ba06da53536b575fbd2b56517f6f95774ff7be0f62c80b9e67430391eeb36"},
{file = "ruff-0.9.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7cb2a01da08244c50b20ccfaeb5972e4228c3c3a1989d3ece2bc4b1f996001"},
{file = "ruff-0.9.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:96d5c76358419bc63a671caac70c18732d4fd0341646ecd01641ddda5c39ca0b"},
{file = "ruff-0.9.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:deb8304636ed394211f3a6d46c0e7d9535b016f53adaa8340139859b2359a070"},
{file = "ruff-0.9.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:df455000bf59e62b3e8c7ba5ed88a4a2bc64896f900f311dc23ff2dc38156440"},
{file = "ruff-0.9.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de92170dfa50c32a2b8206a647949590e752aca8100a0f6b8cefa02ae29dce80"},
{file = "ruff-0.9.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d28532d73b1f3f627ba88e1456f50748b37f3a345d2be76e4c653bec6c3e393"},
{file = "ruff-0.9.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c746d7d1df64f31d90503ece5cc34d7007c06751a7a3bbeee10e5f2463d52d2"},
{file = "ruff-0.9.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:11417521d6f2d121fda376f0d2169fb529976c544d653d1d6044f4c5562516ee"},
{file = "ruff-0.9.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b9d71c3879eb32de700f2f6fac3d46566f644a91d3130119a6378f9312a38e1"},
{file = "ruff-0.9.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2e36c61145e70febcb78483903c43444c6b9d40f6d2f800b5552fec6e4a7bb9a"},
{file = "ruff-0.9.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2f71d09aeba026c922aa7aa19a08d7bd27c867aedb2f74285a2639644c1c12f5"},
{file = "ruff-0.9.5-py3-none-win32.whl", hash = "sha256:134f958d52aa6fdec3b294b8ebe2320a950d10c041473c4316d2e7d7c2544723"},
{file = "ruff-0.9.5-py3-none-win_amd64.whl", hash = "sha256:78cc6067f6d80b6745b67498fb84e87d32c6fc34992b52bffefbdae3442967d6"},
{file = "ruff-0.9.5-py3-none-win_arm64.whl", hash = "sha256:18a29f1a005bddb229e580795627d297dfa99f16b30c7039e73278cf6b5f9fa9"},
{file = "ruff-0.9.5.tar.gz", hash = "sha256:11aecd7a633932875ab3cb05a484c99970b9d52606ce9ea912b690b02653d56c"},
{file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"},
{file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"},
{file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"},
{file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"},
{file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"},
{file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"},
{file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"},
{file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"},
{file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"},
{file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"},
{file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"},
{file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"},
{file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"},
{file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"},
{file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"},
{file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"},
{file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"},
{file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"},
]
[[package]]
@ -9812,6 +9767,27 @@ files = [
{file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"},
]
[[package]]
name = "xinference-client"
version = "1.2.2"
description = "Client for Xinference"
optional = false
python-versions = "*"
groups = ["vdb"]
markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "xinference-client-1.2.2.tar.gz", hash = "sha256:85d2ba0fcbaae616b06719c422364123cbac97f3e3c82e614095fe6d0e630ed0"},
{file = "xinference_client-1.2.2-py3-none-any.whl", hash = "sha256:6941d87cf61283a9d6e81cee6cb2609a183d34c6b7d808c6ba0c33437520518f"},
]
[package.dependencies]
pydantic = "*"
requests = "*"
typing-extensions = "*"
[package.extras]
dev = ["black", "cython (>=0.29)", "flake8 (>=3.8.0)", "ipython (>=6.5.0)", "pytest (>=3.5.0)", "pytest-asyncio (>=0.14.0)", "pytest-cov (>=2.5.0)", "pytest-forked (>=1.0)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=1.2.0)"]
[[package]]
name = "xlrd"
version = "2.0.1"
@ -10168,4 +10144,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.13"
content-hash = "7bb979b6667b099ad0ba75a9cad287409c6d7739b595c05197581ad1c0d702ce"
content-hash = "a61e69fbd8c0306ed676d791b3783f8ba9914f871855543b265c4e7b5d2af0f9"

View File

@ -134,6 +134,7 @@ tidb-vector = "0.0.9"
upstash-vector = "0.6.0"
volcengine-compat = "~1.0.156"
weaviate-client = "~3.21.0"
xinference-client = "~1.2.2"
############################################################
# [ Dev ] dependency group

View File

@ -1,4 +1,5 @@
[pytest]
continue-on-collection-errors = true
env =
ANTHROPIC_API_KEY = sk-ant-api11-IamNotARealKeyJustForMockTestKawaiiiiiiiiii-NotBaka-ASkksz
AZURE_OPENAI_API_BASE = https://difyai-openai.openai.azure.com

View File

@ -33,7 +33,6 @@ from models.account import (
TenantStatus,
)
from models.model import DifySetup
from models.staging import StagingAccountWhitelist
from services.billing_service import BillingService
from services.errors.account import (
AccountAlreadyInTenantError,
@ -349,9 +348,6 @@ class AccountService:
@staticmethod
def login(account: Account, *, ip_address: Optional[str] = None) -> TokenPair:
if not AccountService.verify_account_whitelist(account.email):
raise ValueError("Account is not whitelisted")
if ip_address:
AccountService.update_login_info(account=account, ip_address=ip_address)
@ -383,9 +379,6 @@ class AccountService:
if not account:
raise ValueError("Invalid account")
if not AccountService.verify_account_whitelist(account.email):
raise ValueError("Account is not whitelisted")
# Generate new access token and refresh token
new_access_token = AccountService.get_account_jwt_token(account)
new_refresh_token = _generate_refresh_token()
@ -395,16 +388,6 @@ class AccountService:
return TokenPair(access_token=new_access_token, refresh_token=new_refresh_token)
@staticmethod
def verify_account_whitelist(email: str) -> bool:
with Session(db.engine) as session:
return (
session.query(StagingAccountWhitelist)
.filter(StagingAccountWhitelist.email == email, StagingAccountWhitelist.disabled == False)
.first()
is not None
)
@staticmethod
def load_logged_in_account(*, account_id: str):
return AccountService.load_user(account_id)
@ -449,13 +432,9 @@ class AccountService:
def send_email_code_login_email(
cls, account: Optional[Account] = None, email: Optional[str] = None, language: Optional[str] = "en-US"
):
if email:
if not AccountService.verify_account_whitelist(email):
raise ValueError("Account is not whitelisted")
elif account:
if not AccountService.verify_account_whitelist(account.email):
raise ValueError("Account is not whitelisted")
email = account.email if account else email
if email is None:
raise ValueError("Email must be provided.")
if cls.email_code_login_rate_limiter.is_rate_limited(email):
from controllers.console.auth.error import EmailCodeLoginRateLimitExceededError

View File

@ -47,7 +47,7 @@ class HitTestingService:
all_documents = RetrievalService.retrieve(
retrieval_method=retrieval_model.get("search_method", "semantic_search"),
dataset_id=dataset.id,
query=cls.escape_query_for_search(query),
query=query,
top_k=retrieval_model.get("top_k", 2),
score_threshold=retrieval_model.get("score_threshold", 0.0)
if retrieval_model["score_threshold_enabled"]

View File

@ -46,6 +46,8 @@ class MessageService:
app_model=app_model, user=user, conversation_id=conversation_id
)
fetch_limit = limit + 1
if first_id:
first_message = (
db.session.query(Message)
@ -64,7 +66,7 @@ class MessageService:
Message.id != first_message.id,
)
.order_by(Message.created_at.desc())
.limit(limit)
.limit(fetch_limit)
.all()
)
else:
@ -72,25 +74,14 @@ class MessageService:
db.session.query(Message)
.filter(Message.conversation_id == conversation.id)
.order_by(Message.created_at.desc())
.limit(limit)
.limit(fetch_limit)
.all()
)
has_more = False
if len(history_messages) == limit:
current_page_first_message = history_messages[-1]
rest_count = (
db.session.query(Message)
.filter(
Message.conversation_id == conversation.id,
Message.created_at < current_page_first_message.created_at,
Message.id != current_page_first_message.id,
)
.count()
)
if rest_count > 0:
has_more = True
if len(history_messages) > limit:
has_more = True
history_messages = history_messages[:-1]
if order == "asc":
history_messages = list(reversed(history_messages))
@ -112,6 +103,8 @@ class MessageService:
base_query = db.session.query(Message)
fetch_limit = limit + 1
if conversation_id is not None:
conversation = ConversationService.get_conversation(
app_model=app_model, user=user, conversation_id=conversation_id
@ -131,21 +124,16 @@ class MessageService:
history_messages = (
base_query.filter(Message.created_at < last_message.created_at, Message.id != last_message.id)
.order_by(Message.created_at.desc())
.limit(limit)
.limit(fetch_limit)
.all()
)
else:
history_messages = base_query.order_by(Message.created_at.desc()).limit(limit).all()
history_messages = base_query.order_by(Message.created_at.desc()).limit(fetch_limit).all()
has_more = False
if len(history_messages) == limit:
current_page_first_message = history_messages[-1]
rest_count = base_query.filter(
Message.created_at < current_page_first_message.created_at, Message.id != current_page_first_message.id
).count()
if rest_count > 0:
has_more = True
if len(history_messages) > limit:
has_more = True
history_messages = history_messages[:-1]
return InfiniteScrollPagination(data=history_messages, limit=limit, has_more=has_more)

View File

@ -1,5 +1,5 @@
from core.helper import marketplace
from core.plugin.entities.plugin import GenericProviderID, PluginDependency, PluginInstallationSource
from core.plugin.entities.plugin import ModelProviderID, PluginDependency, PluginInstallationSource, ToolProviderID
from core.plugin.manager.plugin import PluginInstallationManager
@ -12,10 +12,7 @@ class DependenciesAnalysisService:
Convert the tool id to the plugin_id
"""
try:
tool_provider_id = GenericProviderID(tool_id)
if tool_id in ["jina", "siliconflow"]:
tool_provider_id.plugin_name = tool_provider_id.plugin_name + "_tool"
return tool_provider_id.plugin_id
return ToolProviderID(tool_id).plugin_id
except Exception as e:
raise e
@ -27,11 +24,7 @@ class DependenciesAnalysisService:
Convert the model provider id to the plugin_id
"""
try:
generic_provider_id = GenericProviderID(model_provider_id)
if model_provider_id == "google":
generic_provider_id.plugin_name = "gemini"
return generic_provider_id.plugin_id
return ModelProviderID(model_provider_id).plugin_id
except Exception as e:
raise e

View File

@ -1,7 +1,6 @@
import datetime
import json
import logging
import sys
import time
from collections.abc import Mapping, Sequence
from concurrent.futures import ThreadPoolExecutor
@ -15,9 +14,8 @@ from flask import Flask, current_app
from sqlalchemy.orm import Session
from core.agent.entities import AgentToolEntity
from core.entities import DEFAULT_PLUGIN_ID
from core.helper import marketplace
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.entities.plugin import ModelProviderID, PluginInstallationSource, ToolProviderID
from core.plugin.entities.plugin_daemon import PluginInstallTaskStatus
from core.plugin.manager.plugin import PluginInstallationManager
from core.tools.entities.tool_entities import ToolProviderType
@ -38,7 +36,6 @@ class PluginMigration:
"""
Migrate plugin.
"""
import concurrent.futures
from threading import Lock
click.echo(click.style("Migrating models/tools to new plugin Mechanism", fg="white"))
@ -55,7 +52,7 @@ class PluginMigration:
file_lock = Lock()
counter_lock = Lock()
thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=workers)
thread_pool = ThreadPoolExecutor(max_workers=workers)
def process_tenant(flask_app: Flask, tenant_id: str) -> None:
with flask_app.app_context():
@ -205,13 +202,7 @@ class PluginMigration:
result = []
for row in rs:
provider_name = str(row[0])
if provider_name and "/" not in provider_name:
if provider_name == "google":
provider_name = "gemini"
result.append(DEFAULT_PLUGIN_ID + "/" + provider_name)
elif provider_name:
result.append(provider_name)
result.append(ModelProviderID(provider_name).plugin_id)
return result
@ -224,30 +215,10 @@ class PluginMigration:
rs = session.query(BuiltinToolProvider).filter(BuiltinToolProvider.tenant_id == tenant_id).all()
result = []
for row in rs:
if "/" not in row.provider:
result.append(DEFAULT_PLUGIN_ID + "/" + row.provider)
else:
result.append(row.provider)
result.append(ToolProviderID(row.provider).plugin_id)
return result
@classmethod
def _handle_builtin_tool_provider(cls, provider_name: str) -> str:
"""
Handle builtin tool provider.
"""
if provider_name == "jina":
provider_name = "jina_tool"
elif provider_name == "siliconflow":
provider_name = "siliconflow_tool"
elif provider_name == "stepfun":
provider_name = "stepfun_tool"
if "/" not in provider_name:
return DEFAULT_PLUGIN_ID + "/" + provider_name
else:
return provider_name
@classmethod
def extract_workflow_tables(cls, tenant_id: str) -> Sequence[str]:
"""
@ -268,8 +239,7 @@ class PluginMigration:
provider_name = data.get("provider_name")
provider_type = data.get("provider_type")
if provider_name not in excluded_providers and provider_type == ToolProviderType.BUILT_IN.value:
provider_name = cls._handle_builtin_tool_provider(provider_name)
result.append(provider_name)
result.append(ToolProviderID(provider_name).plugin_id)
return result
@ -300,7 +270,7 @@ class PluginMigration:
tool_entity.provider_type == ToolProviderType.BUILT_IN.value
and tool_entity.provider_id not in excluded_providers
):
result.append(cls._handle_builtin_tool_provider(tool_entity.provider_id))
result.append(ToolProviderID(tool_entity.provider_id).plugin_id)
except Exception:
logger.exception(f"Failed to process tool {tool}")
@ -341,10 +311,14 @@ class PluginMigration:
plugin_ids.append(plugin_id)
def fetch_plugin(plugin_id):
unique_identifier = cls._fetch_plugin_unique_identifier(plugin_id)
if unique_identifier:
plugins[plugin_id] = unique_identifier
else:
try:
unique_identifier = cls._fetch_plugin_unique_identifier(plugin_id)
if unique_identifier:
plugins[plugin_id] = unique_identifier
else:
plugin_not_exist.append(plugin_id)
except Exception:
logger.exception(f"Failed to fetch plugin unique identifier for {plugin_id}")
plugin_not_exist.append(plugin_id)
with ThreadPoolExecutor(max_workers=10) as executor:
@ -353,7 +327,7 @@ class PluginMigration:
return {"plugins": plugins, "plugin_not_exist": plugin_not_exist}
@classmethod
def install_plugins(cls, extracted_plugins: str, output_file: str) -> None:
def install_plugins(cls, extracted_plugins: str, output_file: str, workers: int = 100) -> None:
"""
Install plugins.
"""
@ -367,7 +341,7 @@ class PluginMigration:
fake_tenant_id = uuid4().hex
logger.info(f"Installing {len(plugins['plugins'])} plugin instances for fake tenant {fake_tenant_id}")
thread_pool = ThreadPoolExecutor(max_workers=40)
thread_pool = ThreadPoolExecutor(max_workers=workers)
response = cls.handle_plugin_instance_install(fake_tenant_id, plugins["plugins"])
if response.get("failed"):
@ -375,10 +349,17 @@ class PluginMigration:
def install(tenant_id: str, plugin_ids: list[str]) -> None:
logger.info(f"Installing {len(plugin_ids)} plugins for tenant {tenant_id}")
# fetch plugin already installed
installed_plugins = manager.list_plugins(tenant_id)
installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
# at most 64 plugins one batch
for i in range(0, len(plugin_ids), 64):
batch_plugin_ids = plugin_ids[i : i + 64]
batch_plugin_identifiers = [plugins["plugins"][plugin_id] for plugin_id in batch_plugin_ids]
batch_plugin_identifiers = [
plugins["plugins"][plugin_id]
for plugin_id in batch_plugin_ids
if plugin_id not in installed_plugins_ids and plugin_id in plugins["plugins"]
]
manager.install_from_identifiers(
tenant_id,
batch_plugin_identifiers,
@ -418,8 +399,6 @@ class PluginMigration:
logger.info("Uninstall plugins")
sys.exit(-1)
# get installation
try:
installation = manager.list_plugins(fake_tenant_id)
@ -475,9 +454,9 @@ class PluginMigration:
reverse_map = {v: k for k, v in plugin_identifiers_map.items()}
# at most 64 plugins one batch
for i in range(0, len(plugin_identifiers_map), 64):
batch_plugin_ids = list(plugin_identifiers_map.keys())[i : i + 64]
# at most 8 plugins one batch
for i in range(0, len(plugin_identifiers_map), 8):
batch_plugin_ids = list(plugin_identifiers_map.keys())[i : i + 8]
batch_plugin_identifiers = [plugin_identifiers_map[plugin_id] for plugin_id in batch_plugin_ids]
try:

View File

@ -7,7 +7,7 @@ from sqlalchemy.orm import Session
from configs import dify_config
from core.helper.position_helper import is_filtered
from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.entities.plugin import GenericProviderID
from core.plugin.entities.plugin import GenericProviderID, ToolProviderID
from core.plugin.manager.exc import PluginDaemonClientSideError
from core.tools.builtin_tool.providers._positions import BuiltinToolProviderSort
from core.tools.entities.api_entities import ToolApiEntity, ToolProviderApiEntity
@ -240,10 +240,7 @@ class BuiltinToolManageService:
# rewrite db_providers
for db_provider in db_providers:
try:
GenericProviderID(db_provider.provider)
except Exception:
db_provider.provider = f"langgenius/{db_provider.provider}/{db_provider.provider}"
db_provider.provider = str(ToolProviderID(db_provider.provider))
# find provider
def find_provider(provider):
@ -258,7 +255,7 @@ class BuiltinToolManageService:
include_set=dify_config.POSITION_TOOL_INCLUDES_SET, # type: ignore
exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET, # type: ignore
data=provider_controller,
name_func=lambda x: x.entity.identity.name,
name_func=lambda x: x.identity.name,
):
continue

View File

@ -54,7 +54,7 @@ class ToolTransformService:
@staticmethod
def repack_provider(tenant_id: str, provider: Union[dict, ToolProviderApiEntity]):
"""
repack provider
repack provider
:param provider: the provider dict
"""

View File

@ -0,0 +1,59 @@
import time
import pymysql
def check_tiflash_ready() -> bool:
try:
connection = pymysql.connect(
host="localhost",
port=4000,
user="root",
password="",
)
with connection.cursor() as cursor:
# Doc reference:
# https://docs.pingcap.com/zh/tidb/stable/information-schema-cluster-hardware
select_tiflash_query = """
SELECT * FROM information_schema.cluster_hardware
WHERE TYPE='tiflash'
LIMIT 1;
"""
cursor.execute(select_tiflash_query)
result = cursor.fetchall()
return result is not None and len(result) > 0
except Exception as e:
print(f"TiFlash is not ready. Exception: {e}")
return False
finally:
if connection:
connection.close()
def main():
max_attempts = 30
retry_interval_seconds = 2
is_tiflash_ready = False
for attempt in range(max_attempts):
try:
is_tiflash_ready = check_tiflash_ready()
except Exception as e:
print(f"TiFlash is not ready. Exception: {e}")
is_tiflash_ready = False
if is_tiflash_ready:
break
else:
print(f"Attempt {attempt + 1} failedretry in {retry_interval_seconds} seconds...")
time.sleep(retry_interval_seconds)
if is_tiflash_ready:
print("TiFlash is ready in TiDB.")
else:
print(f"TiFlash is not ready in TiDB after {max_attempts} attempting checks.")
exit(1)
if __name__ == "__main__":
main()

View File

@ -68,8 +68,7 @@ def test_executor_with_json_body_and_object_variable():
system_variables={},
user_inputs={},
)
variable_pool.add(["pre_node_id", "object"], {
"name": "John Doe", "age": 30, "email": "john@example.com"})
variable_pool.add(["pre_node_id", "object"], {"name": "John Doe", "age": 30, "email": "john@example.com"})
# Prepare the node data
node_data = HttpRequestNodeData(
@ -124,8 +123,7 @@ def test_executor_with_json_body_and_nested_object_variable():
system_variables={},
user_inputs={},
)
variable_pool.add(["pre_node_id", "object"], {
"name": "John Doe", "age": 30, "email": "john@example.com"})
variable_pool.add(["pre_node_id", "object"], {"name": "John Doe", "age": 30, "email": "john@example.com"})
# Prepare the node data
node_data = HttpRequestNodeData(

View File

@ -18,14 +18,6 @@ from models.enums import UserFrom
from models.workflow import WorkflowNodeExecutionStatus, WorkflowType
def test_plain_text_to_dict():
assert _plain_text_to_dict("aa\n cc:") == {"aa": "", "cc": ""}
assert _plain_text_to_dict("aa:bb\n cc:dd") == {"aa": "bb", "cc": "dd"}
assert _plain_text_to_dict("aa:bb\n cc:dd\n") == {"aa": "bb", "cc": "dd"}
assert _plain_text_to_dict("aa:bb\n\n cc : dd\n\n") == {
"aa": "bb", "cc": "dd"}
def test_http_request_node_binary_file(monkeypatch):
data = HttpRequestNodeData(
title="test",
@ -191,8 +183,7 @@ def test_http_request_node_form_with_file(monkeypatch):
def attr_checker(*args, **kwargs):
assert kwargs["data"] == {"name": "test"}
assert kwargs["files"] == {
"file": (None, b"test", "application/octet-stream")}
assert kwargs["files"] == {"file": (None, b"test", "application/octet-stream")}
return httpx.Response(200, content=b"")
monkeypatch.setattr(

View File

@ -8,7 +8,7 @@ from core.variables.variables import StringVariable
from core.workflow.entities.node_entities import NodeRunResult
from core.workflow.nodes.document_extractor import DocumentExtractorNode, DocumentExtractorNodeData
from core.workflow.nodes.document_extractor.node import (
_extract_text_from_doc,
_extract_text_from_docx,
_extract_text_from_pdf,
_extract_text_from_plain_text,
)
@ -120,7 +120,7 @@ def test_run_extract_text(
monkeypatch.setattr("core.workflow.nodes.document_extractor.node._extract_text_from_pdf", mock_pdf_extract)
elif mime_type.startswith("application/vnd.openxmlformats"):
mock_docx_extract = Mock(return_value=expected_text[0])
monkeypatch.setattr("core.workflow.nodes.document_extractor.node._extract_text_from_doc", mock_docx_extract)
monkeypatch.setattr("core.workflow.nodes.document_extractor.node._extract_text_from_docx", mock_docx_extract)
result = document_extractor_node._run()
@ -163,14 +163,14 @@ def test_extract_text_from_pdf(mock_pdf_document):
@patch("docx.Document")
def test_extract_text_from_doc(mock_document):
def test_extract_text_from_docx(mock_document):
mock_paragraph1 = Mock()
mock_paragraph1.text = "Paragraph 1"
mock_paragraph2 = Mock()
mock_paragraph2.text = "Paragraph 2"
mock_document.return_value.paragraphs = [mock_paragraph1, mock_paragraph2]
text = _extract_text_from_doc(b"PK\x03\x04")
text = _extract_text_from_docx(b"PK\x03\x04")
assert text == "Paragraph 1\nParagraph 2"

View File

@ -887,6 +887,10 @@ SSRF_HTTP_PORT=3128
SSRF_COREDUMP_DIR=/var/spool/squid
SSRF_REVERSE_PROXY_PORT=8194
SSRF_SANDBOX_HOST=sandbox
SSRF_DEFAULT_TIME_OUT=5
SSRF_DEFAULT_CONNECT_TIME_OUT=5
SSRF_DEFAULT_READ_TIME_OUT=5
SSRF_DEFAULT_WRITE_TIME_OUT=5
# ------------------------------
# docker env var for specifying vector db type at startup

View File

@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:1.0.0
image: langgenius/dify-api:0.15.3
restart: always
environment:
# Use the shared environment variables.
@ -27,7 +27,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:1.0.0
image: langgenius/dify-api:0.15.3
restart: always
environment:
# Use the shared environment variables.
@ -51,7 +51,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.0.0
image: langgenius/dify-web:0.15.3
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -64,6 +64,7 @@ services:
MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace.dify.ai}
TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-}
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-}
PM2_INSTANCES: ${PM2_INSTANCES:-2}
# The postgres database.
db:
@ -128,7 +129,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.0.1-local
image: langgenius/dify-plugin-daemon:0.0.2-local
restart: always
environment:
# Use the shared environment variables.
@ -231,16 +232,6 @@ services:
- '${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}'
- '${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}'
# The TiDB vector store.
# For production use, please refer to https://github.com/pingcap/tidb-docker-compose
tidb:
image: pingcap/tidb:v8.4.0
profiles:
- tidb
command:
- --store=unistore
restart: always
# The Weaviate vector store.
weaviate:
image: semitechnologies/weaviate:1.19.0

View File

@ -66,7 +66,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.0.1-local
image: langgenius/dify-plugin-daemon:0.0.2-local
restart: always
environment:
# Use the shared environment variables.

View File

@ -377,6 +377,10 @@ x-shared-env: &shared-api-worker-env
SSRF_COREDUMP_DIR: ${SSRF_COREDUMP_DIR:-/var/spool/squid}
SSRF_REVERSE_PROXY_PORT: ${SSRF_REVERSE_PROXY_PORT:-8194}
SSRF_SANDBOX_HOST: ${SSRF_SANDBOX_HOST:-sandbox}
SSRF_DEFAULT_TIME_OUT: ${SSRF_DEFAULT_TIME_OUT:-5}
SSRF_DEFAULT_CONNECT_TIME_OUT: ${SSRF_DEFAULT_CONNECT_TIME_OUT:-5}
SSRF_DEFAULT_READ_TIME_OUT: ${SSRF_DEFAULT_READ_TIME_OUT:-5}
SSRF_DEFAULT_WRITE_TIME_OUT: ${SSRF_DEFAULT_WRITE_TIME_OUT:-5}
EXPOSE_NGINX_PORT: ${EXPOSE_NGINX_PORT:-80}
EXPOSE_NGINX_SSL_PORT: ${EXPOSE_NGINX_SSL_PORT:-443}
POSITION_TOOL_PINS: ${POSITION_TOOL_PINS:-}
@ -410,7 +414,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:1.0.0
image: langgenius/dify-api:0.15.3
restart: always
environment:
# Use the shared environment variables.
@ -435,7 +439,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:1.0.0
image: langgenius/dify-api:0.15.3
restart: always
environment:
# Use the shared environment variables.
@ -459,13 +463,11 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.0.0
image: langgenius/dify-web:0.15.3
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
APP_API_URL: ${APP_API_URL:-}
MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-}
MARKETPLACE_URL: ${MARKETPLACE_URL:-}
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
@ -474,6 +476,7 @@ services:
MARKETPLACE_URL: ${MARKETPLACE_URL:-https://marketplace.dify.ai}
TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-}
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-}
PM2_INSTANCES: ${PM2_INSTANCES:-2}
# The postgres database.
db:
@ -538,7 +541,7 @@ services:
# plugin daemon
plugin_daemon:
image: langgenius/dify-plugin-daemon:0.0.1-local
image: langgenius/dify-plugin-daemon:0.0.2-local
restart: always
environment:
# Use the shared environment variables.
@ -641,16 +644,6 @@ services:
- '${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}'
- '${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}'
# The TiDB vector store.
# For production use, please refer to https://github.com/pingcap/tidb-docker-compose
tidb:
image: pingcap/tidb:v8.4.0
profiles:
- tidb
command:
- --store=unistore
restart: always
# The Weaviate vector store.
weaviate:
image: semitechnologies/weaviate:1.19.0

View File

@ -31,6 +31,7 @@ server {
location /e {
proxy_pass http://plugin_daemon:5002;
proxy_set_header Dify-Hook-Url $scheme://$host$request_uri;
include proxy.conf;
}

View File

@ -3,6 +3,7 @@
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Port $server_port;
proxy_http_version 1.1;
proxy_set_header Connection "";
proxy_buffering off;

View File

@ -0,0 +1,4 @@
# PD Configuration File reference:
# https://docs.pingcap.com/tidb/stable/pd-configuration-file#pd-configuration-file
[replication]
max-replicas = 1

View File

@ -0,0 +1,13 @@
# TiFlash tiflash-learner.toml Configuration File reference:
# https://docs.pingcap.com/tidb/stable/tiflash-configuration#configure-the-tiflash-learnertoml-file
log-file = "/logs/tiflash_tikv.log"
[server]
engine-addr = "tiflash:4030"
addr = "0.0.0.0:20280"
advertise-addr = "tiflash:20280"
status-addr = "tiflash:20292"
[storage]
data-dir = "/data/flash"

View File

@ -0,0 +1,19 @@
# TiFlash tiflash.toml Configuration File reference:
# https://docs.pingcap.com/tidb/stable/tiflash-configuration#configure-the-tiflashtoml-file
listen_host = "0.0.0.0"
path = "/data"
[flash]
tidb_status_addr = "tidb:10080"
service_addr = "tiflash:4030"
[flash.proxy]
config = "/tiflash-learner.toml"
[logger]
errorlog = "/logs/tiflash_error.log"
log = "/logs/tiflash.log"
[raft]
pd_addr = "pd0:2379"

View File

@ -0,0 +1,62 @@
services:
pd0:
image: pingcap/pd:v8.5.1
# ports:
# - "2379"
volumes:
- ./config/pd.toml:/pd.toml:ro
- ./volumes/data:/data
- ./volumes/logs:/logs
command:
- --name=pd0
- --client-urls=http://0.0.0.0:2379
- --peer-urls=http://0.0.0.0:2380
- --advertise-client-urls=http://pd0:2379
- --advertise-peer-urls=http://pd0:2380
- --initial-cluster=pd0=http://pd0:2380
- --data-dir=/data/pd
- --config=/pd.toml
- --log-file=/logs/pd.log
restart: on-failure
tikv:
image: pingcap/tikv:v8.5.1
volumes:
- ./volumes/data:/data
- ./volumes/logs:/logs
command:
- --addr=0.0.0.0:20160
- --advertise-addr=tikv:20160
- --status-addr=tikv:20180
- --data-dir=/data/tikv
- --pd=pd0:2379
- --log-file=/logs/tikv.log
depends_on:
- "pd0"
restart: on-failure
tidb:
image: pingcap/tidb:v8.5.1
# ports:
# - "4000:4000"
volumes:
- ./volumes/logs:/logs
command:
- --advertise-address=tidb
- --store=tikv
- --path=pd0:2379
- --log-file=/logs/tidb.log
depends_on:
- "tikv"
restart: on-failure
tiflash:
image: pingcap/tiflash:v8.5.1
volumes:
- ./config/tiflash.toml:/tiflash.toml:ro
- ./config/tiflash-learner.toml:/tiflash-learner.toml:ro
- ./volumes/data:/data
- ./volumes/logs:/logs
command:
- --config=/tiflash.toml
depends_on:
- "tikv"
- "tidb"
restart: on-failure

View File

@ -10,10 +10,10 @@ NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
# console or api domain.
# example: http://udify.app/api
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
# The APIFREX for MARKETPLACE
NEXT_PUBLIC_MARKETPLACE_API_PREFIX=http://localhost:5002/api
# The API PREFIX for MARKETPLACE
NEXT_PUBLIC_MARKETPLACE_API_PREFIX=https://marketplace.dify.ai/api/v1
# The URL for MARKETPLACE
NEXT_PUBLIC_MARKETPLACE_URL_PREFIX=
NEXT_PUBLIC_MARKETPLACE_URL_PREFIX=https://marketplace.dify.ai
# SENTRY
NEXT_PUBLIC_SENTRY_DSN=

View File

@ -46,6 +46,7 @@ ENV MARKETPLACE_API_URL=http://127.0.0.1:5001
ENV MARKETPLACE_URL=http://127.0.0.1:5001
ENV PORT=3000
ENV NEXT_TELEMETRY_DISABLED=1
ENV PM2_INSTANCES=2
# set timezone
ENV TZ=UTC
@ -58,7 +59,6 @@ COPY --from=builder /app/web/public ./public
COPY --from=builder /app/web/.next/standalone ./
COPY --from=builder /app/web/.next/static ./.next/static
COPY docker/pm2.json ./pm2.json
COPY docker/entrypoint.sh ./entrypoint.sh

View File

@ -70,6 +70,8 @@ If you want to customize the host and port:
pnpm run start --port=3001 --host=0.0.0.0
```
If you want to customize the number of instances launched by PM2, you can configure `PM2_INSTANCES` in `docker-compose.yaml` or `Dockerfile`.
## Storybook
This project uses [Storybook](https://storybook.js.org/) for UI component development.

View File

@ -147,7 +147,8 @@ const AppDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
})
}
}
}, [appDetailRes, appId, getNavigations, isCurrentWorkspaceEditor, isLoadingAppDetail, isLoadingCurrentWorkspace, pathname, router, setAppDetail, systemFeatures.enable_web_sso_switch_component])
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [appDetailRes, appId, getNavigations, isCurrentWorkspaceEditor, isLoadingAppDetail, isLoadingCurrentWorkspace, router, setAppDetail, systemFeatures.enable_web_sso_switch_component])
useUnmount(() => {
setAppDetail()

View File

@ -53,7 +53,7 @@ export default function ChartView({ appId }: IChartViewProps) {
className='mt-0 !w-40'
onSelect={(item) => {
const id = item.value
const value = TIME_PERIOD_MAPPING[id]?.value || '-1'
const value = TIME_PERIOD_MAPPING[id]?.value ?? '-1'
const name = item.name || t('appLog.filter.period.allTime')
onSelect({ value, name })
}}

Some files were not shown because too many files have changed in this diff Show More