mirror of
https://github.com/langgenius/dify.git
synced 2026-01-29 00:06:00 +08:00
Compare commits
14 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 24cb992843 | |||
| 7907c0bf58 | |||
| ebf4fd9a09 | |||
| 38b9901274 | |||
| 642842d61b | |||
| e161c511af | |||
| f29e82685e | |||
| 3a5ae96e7b | |||
| b63a685386 | |||
| 877da82b06 | |||
| 6637629045 | |||
| e925b6c572 | |||
| 5412f4aba5 | |||
| 2d5ad0d208 |
49
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
49
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: "🕷️ Bug report"
|
||||
description: Report errors or unexpected behavior
|
||||
labels:
|
||||
- bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Please make sure to [search for existing issues](https://github.com/langgenius/dify/issues) before filing a new one!
|
||||
- type: input
|
||||
attributes:
|
||||
label: Dify version
|
||||
placeholder: 0.3.21
|
||||
description: See about section in Dify console
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: Cloud or Self Hosted
|
||||
description: How / Where was Dify installed from?
|
||||
multiple: true
|
||||
options:
|
||||
- Cloud
|
||||
- Self Hosted
|
||||
- Other (please specify in "Steps to Reproduce")
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: We highly suggest including screenshots and a bug report log.
|
||||
placeholder: Having detailed steps helps us reproduce the bug.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ✔️ Expected Behavior
|
||||
placeholder: What were you expecting?
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ❌ Actual Behavior
|
||||
placeholder: What happened instead?
|
||||
validations:
|
||||
required: false
|
||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
8
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: "\U0001F4DA Dify user documentation"
|
||||
url: https://docs.dify.ai/getting-started/readme
|
||||
about: Documentation for users of Dify
|
||||
- name: "\U0001F4DA Dify dev documentation"
|
||||
url: https://docs.dify.ai/getting-started/install-self-hosted
|
||||
about: Documentation for people interested in developing and contributing for Dify
|
||||
11
.github/ISSUE_TEMPLATE/document_issue.yml
vendored
Normal file
11
.github/ISSUE_TEMPLATE/document_issue.yml
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
name: "📚 Documentation Issue"
|
||||
description: Report issues in our documentation
|
||||
labels:
|
||||
- ducumentation
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Provide a description of requested docs changes
|
||||
placeholder: Briefly describe which document needs to be corrected and why.
|
||||
validations:
|
||||
required: true
|
||||
26
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
26
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
name: "⭐ Feature or enhancement request"
|
||||
description: Propose something new.
|
||||
labels:
|
||||
- enhancement
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Description of the new feature / enhancement
|
||||
placeholder: What is the expected behavior of the proposed feature?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Scenario when this would be used?
|
||||
placeholder: What is the scenario this would be used? Why is this important to your workflow as a dify user?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Supporting information
|
||||
placeholder: "Having additional evidence, data, tweets, blog posts, research, ... anything is extremely helpful. This information provides context to the scenario that may otherwise be lost."
|
||||
validations:
|
||||
required: false
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Please limit one request per issue.
|
||||
46
.github/ISSUE_TEMPLATE/translation_issue.yml
vendored
Normal file
46
.github/ISSUE_TEMPLATE/translation_issue.yml
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
name: "🌐 Localization/Translation issue"
|
||||
description: Report incorrect translations.
|
||||
labels:
|
||||
- translation
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: Please make sure to [search for existing issues](https://github.com/langgenius/dify/issues) before filing a new one!
|
||||
- type: input
|
||||
attributes:
|
||||
label: Dify version
|
||||
placeholder: 0.3.21
|
||||
description: Hover over system tray icon or look at Settings
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: Utility with translation issue
|
||||
placeholder: Some area
|
||||
description: Please input here the utility with the translation issue
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: 🌐 Language affected
|
||||
placeholder: "German"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ❌ Actual phrase(s)
|
||||
placeholder: What is there? Please include a screenshot as that is extremely helpful.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ✔️ Expected phrase(s)
|
||||
placeholder: What was expected?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: ℹ Why is the current translation wrong
|
||||
placeholder: Why do you feel this is incorrect?
|
||||
validations:
|
||||
required: true
|
||||
32
.github/ISSUE_TEMPLATE/🐛-bug-report.md
vendored
32
.github/ISSUE_TEMPLATE/🐛-bug-report.md
vendored
@ -1,32 +0,0 @@
|
||||
---
|
||||
name: "\U0001F41B Bug report"
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
Please provide a clear and concise description of what the bug is. Include
|
||||
screenshots if needed. Please test using the latest version of the relevant
|
||||
Dify packages to make sure your issue has not already been fixed.
|
||||
-->
|
||||
|
||||
Dify version: Cloud | Self Host
|
||||
|
||||
## Steps To Reproduce
|
||||
<!--
|
||||
Your bug will get fixed much faster if we can run your code and it doesn't
|
||||
have dependencies other than Dify. Issues without reproduction steps or
|
||||
code examples may be immediately closed as not actionable.
|
||||
-->
|
||||
|
||||
1.
|
||||
2.
|
||||
|
||||
|
||||
## The current behavior
|
||||
|
||||
|
||||
## The expected behavior
|
||||
20
.github/ISSUE_TEMPLATE/🚀-feature-request.md
vendored
20
.github/ISSUE_TEMPLATE/🚀-feature-request.md
vendored
@ -1,20 +0,0 @@
|
||||
---
|
||||
name: "\U0001F680 Feature request"
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
10
.github/ISSUE_TEMPLATE/🤔-questions-and-help.md
vendored
10
.github/ISSUE_TEMPLATE/🤔-questions-and-help.md
vendored
@ -1,10 +0,0 @@
|
||||
---
|
||||
name: "\U0001F914 Questions and Help"
|
||||
about: Ask a usage or consultation question
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@ FROM python:3.10-slim AS base
|
||||
|
||||
LABEL maintainer="takatost@gmail.com"
|
||||
|
||||
RUN apt-get update \
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends gcc g++ python3-dev libc-dev libffi-dev
|
||||
|
||||
COPY requirements.txt /requirements.txt
|
||||
@ -26,7 +26,7 @@ EXPOSE 5001
|
||||
|
||||
WORKDIR /app/api
|
||||
|
||||
RUN apt-get update \
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends bash curl wget vim nodejs \
|
||||
&& apt-get autoremove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@ -34,6 +34,9 @@ RUN apt-get update \
|
||||
COPY --from=base /pkg /usr/local
|
||||
COPY . /app/api/
|
||||
|
||||
RUN python -c "from transformers import GPT2TokenizerFast; GPT2TokenizerFast.from_pretrained('gpt2')"
|
||||
ENV TRANSFORMERS_OFFLINE true
|
||||
|
||||
COPY docker/entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
|
||||
109
api/commands.py
109
api/commands.py
@ -6,6 +6,7 @@ import string
|
||||
import time
|
||||
|
||||
import click
|
||||
from tqdm import tqdm
|
||||
from flask import current_app
|
||||
from langchain.embeddings import OpenAIEmbeddings
|
||||
from werkzeug.exceptions import NotFound
|
||||
@ -21,9 +22,9 @@ from libs.password import password_pattern, valid_password, hash_password
|
||||
from libs.helper import email as email_validate
|
||||
from extensions.ext_database import db
|
||||
from libs.rsa import generate_key_pair
|
||||
from models.account import InvitationCode, Tenant
|
||||
from models.account import InvitationCode, Tenant, TenantAccountJoin
|
||||
from models.dataset import Dataset, DatasetQuery, Document
|
||||
from models.model import Account
|
||||
from models.model import Account, AppModelConfig, App
|
||||
import secrets
|
||||
import base64
|
||||
|
||||
@ -439,6 +440,107 @@ def update_qdrant_indexes():
|
||||
|
||||
click.echo(click.style('Congratulations! Update {} dataset indexes.'.format(create_count), fg='green'))
|
||||
|
||||
@click.command('update_app_model_configs', help='Migrate data to support paragraph variable.')
|
||||
@click.option("--batch-size", default=500, help="Number of records to migrate in each batch.")
|
||||
def update_app_model_configs(batch_size):
|
||||
pre_prompt_template = '{{default_input}}'
|
||||
user_input_form_template = {
|
||||
"en-US": [
|
||||
{
|
||||
"paragraph": {
|
||||
"label": "Query",
|
||||
"variable": "default_input",
|
||||
"required": False,
|
||||
"default": ""
|
||||
}
|
||||
}
|
||||
],
|
||||
"zh-Hans": [
|
||||
{
|
||||
"paragraph": {
|
||||
"label": "查询内容",
|
||||
"variable": "default_input",
|
||||
"required": False,
|
||||
"default": ""
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
click.secho("Start migrate old data that the text generator can support paragraph variable.", fg='green')
|
||||
|
||||
total_records = db.session.query(AppModelConfig) \
|
||||
.join(App, App.app_model_config_id == AppModelConfig.id) \
|
||||
.filter(App.mode == 'completion') \
|
||||
.count()
|
||||
|
||||
if total_records == 0:
|
||||
click.secho("No data to migrate.", fg='green')
|
||||
return
|
||||
|
||||
num_batches = (total_records + batch_size - 1) // batch_size
|
||||
|
||||
with tqdm(total=total_records, desc="Migrating Data") as pbar:
|
||||
for i in range(num_batches):
|
||||
offset = i * batch_size
|
||||
limit = min(batch_size, total_records - offset)
|
||||
|
||||
click.secho(f"Fetching batch {i+1}/{num_batches} from source database...", fg='green')
|
||||
|
||||
data_batch = db.session.query(AppModelConfig) \
|
||||
.join(App, App.app_model_config_id == AppModelConfig.id) \
|
||||
.filter(App.mode == 'completion') \
|
||||
.order_by(App.created_at) \
|
||||
.offset(offset).limit(limit).all()
|
||||
|
||||
if not data_batch:
|
||||
click.secho("No more data to migrate.", fg='green')
|
||||
break
|
||||
|
||||
try:
|
||||
click.secho(f"Migrating {len(data_batch)} records...", fg='green')
|
||||
for data in data_batch:
|
||||
# click.secho(f"Migrating data {data.id}, pre_prompt: {data.pre_prompt}, user_input_form: {data.user_input_form}", fg='green')
|
||||
|
||||
if data.pre_prompt is None:
|
||||
data.pre_prompt = pre_prompt_template
|
||||
else:
|
||||
if pre_prompt_template in data.pre_prompt:
|
||||
continue
|
||||
data.pre_prompt += pre_prompt_template
|
||||
|
||||
app_data = db.session.query(App) \
|
||||
.filter(App.id == data.app_id) \
|
||||
.one()
|
||||
|
||||
account_data = db.session.query(Account) \
|
||||
.join(TenantAccountJoin, Account.id == TenantAccountJoin.account_id) \
|
||||
.filter(TenantAccountJoin.role == 'owner') \
|
||||
.filter(TenantAccountJoin.tenant_id == app_data.tenant_id) \
|
||||
.one_or_none()
|
||||
|
||||
if not account_data:
|
||||
continue
|
||||
|
||||
if data.user_input_form is None or data.user_input_form == 'null':
|
||||
data.user_input_form = json.dumps(user_input_form_template[account_data.interface_language])
|
||||
else:
|
||||
raw_json_data = json.loads(data.user_input_form)
|
||||
raw_json_data.append(user_input_form_template[account_data.interface_language][0])
|
||||
data.user_input_form = json.dumps(raw_json_data)
|
||||
|
||||
# click.secho(f"Updated data {data.id}, pre_prompt: {data.pre_prompt}, user_input_form: {data.user_input_form}", fg='green')
|
||||
|
||||
db.session.commit()
|
||||
|
||||
except Exception as e:
|
||||
click.secho(f"Error while migrating data: {e}, app_id: {data.app_id}, app_model_config_id: {data.id}", fg='red')
|
||||
continue
|
||||
|
||||
click.secho(f"Successfully migrated batch {i+1}/{num_batches}.", fg='green')
|
||||
|
||||
pbar.update(len(data_batch))
|
||||
|
||||
def register_commands(app):
|
||||
app.cli.add_command(reset_password)
|
||||
app.cli.add_command(reset_email)
|
||||
@ -448,4 +550,5 @@ def register_commands(app):
|
||||
app.cli.add_command(sync_anthropic_hosted_providers)
|
||||
app.cli.add_command(clean_unused_dataset_indexes)
|
||||
app.cli.add_command(create_qdrant_indexes)
|
||||
app.cli.add_command(update_qdrant_indexes)
|
||||
app.cli.add_command(update_qdrant_indexes)
|
||||
app.cli.add_command(update_app_model_configs)
|
||||
@ -100,7 +100,7 @@ class Config:
|
||||
self.CONSOLE_URL = get_env('CONSOLE_URL')
|
||||
self.API_URL = get_env('API_URL')
|
||||
self.APP_URL = get_env('APP_URL')
|
||||
self.CURRENT_VERSION = "0.3.20"
|
||||
self.CURRENT_VERSION = "0.3.22"
|
||||
self.COMMIT_SHA = get_env('COMMIT_SHA')
|
||||
self.EDITION = "SELF_HOSTED"
|
||||
self.DEPLOY_ENV = get_env('DEPLOY_ENV')
|
||||
|
||||
@ -38,7 +38,18 @@ model_templates = {
|
||||
"presence_penalty": 0,
|
||||
"frequency_penalty": 0
|
||||
}
|
||||
})
|
||||
}),
|
||||
'user_input_form': json.dumps([
|
||||
{
|
||||
"paragraph": {
|
||||
"label": "Query",
|
||||
"variable": "query",
|
||||
"required": True,
|
||||
"default": ""
|
||||
}
|
||||
}
|
||||
]),
|
||||
'pre_prompt': '{{query}}'
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@ -29,6 +29,7 @@ model_config_fields = {
|
||||
'suggested_questions': fields.Raw(attribute='suggested_questions_list'),
|
||||
'suggested_questions_after_answer': fields.Raw(attribute='suggested_questions_after_answer_dict'),
|
||||
'speech_to_text': fields.Raw(attribute='speech_to_text_dict'),
|
||||
'retriever_resource': fields.Raw(attribute='retriever_resource_dict'),
|
||||
'more_like_this': fields.Raw(attribute='more_like_this_dict'),
|
||||
'sensitive_word_avoidance': fields.Raw(attribute='sensitive_word_avoidance_dict'),
|
||||
'model': fields.Raw(attribute='model_dict'),
|
||||
|
||||
@ -39,9 +39,10 @@ class CompletionMessageApi(Resource):
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('inputs', type=dict, required=True, location='json')
|
||||
parser.add_argument('query', type=str, location='json')
|
||||
parser.add_argument('query', type=str, location='json', default='')
|
||||
parser.add_argument('model_config', type=dict, required=True, location='json')
|
||||
parser.add_argument('response_mode', type=str, choices=['blocking', 'streaming'], location='json')
|
||||
parser.add_argument('retriever_from', type=str, required=False, default='dev', location='json')
|
||||
args = parser.parse_args()
|
||||
|
||||
streaming = args['response_mode'] != 'blocking'
|
||||
@ -115,6 +116,7 @@ class ChatMessageApi(Resource):
|
||||
parser.add_argument('model_config', type=dict, required=True, location='json')
|
||||
parser.add_argument('conversation_id', type=uuid_value, location='json')
|
||||
parser.add_argument('response_mode', type=str, choices=['blocking', 'streaming'], location='json')
|
||||
parser.add_argument('retriever_from', type=str, required=False, default='dev', location='json')
|
||||
args = parser.parse_args()
|
||||
|
||||
streaming = args['response_mode'] != 'blocking'
|
||||
|
||||
@ -26,7 +26,7 @@ from models.model import UploadFile
|
||||
|
||||
cache = TTLCache(maxsize=None, ttl=30)
|
||||
|
||||
ALLOWED_EXTENSIONS = ['txt', 'markdown', 'md', 'pdf', 'html', 'htm', 'xlsx']
|
||||
ALLOWED_EXTENSIONS = ['txt', 'markdown', 'md', 'pdf', 'html', 'htm', 'xlsx', 'docx', 'csv']
|
||||
PREVIEW_WORDS_LIMIT = 3000
|
||||
|
||||
|
||||
|
||||
@ -31,8 +31,9 @@ class CompletionApi(InstalledAppResource):
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('inputs', type=dict, required=True, location='json')
|
||||
parser.add_argument('query', type=str, location='json')
|
||||
parser.add_argument('query', type=str, location='json', default='')
|
||||
parser.add_argument('response_mode', type=str, choices=['blocking', 'streaming'], location='json')
|
||||
parser.add_argument('retriever_from', type=str, required=False, default='explore_app', location='json')
|
||||
args = parser.parse_args()
|
||||
|
||||
streaming = args['response_mode'] == 'streaming'
|
||||
@ -92,6 +93,7 @@ class ChatApi(InstalledAppResource):
|
||||
parser.add_argument('query', type=str, required=True, location='json')
|
||||
parser.add_argument('response_mode', type=str, choices=['blocking', 'streaming'], location='json')
|
||||
parser.add_argument('conversation_id', type=uuid_value, location='json')
|
||||
parser.add_argument('retriever_from', type=str, required=False, default='explore_app', location='json')
|
||||
args = parser.parse_args()
|
||||
|
||||
streaming = args['response_mode'] == 'streaming'
|
||||
|
||||
@ -30,6 +30,25 @@ class MessageListApi(InstalledAppResource):
|
||||
'rating': fields.String
|
||||
}
|
||||
|
||||
retriever_resource_fields = {
|
||||
'id': fields.String,
|
||||
'message_id': fields.String,
|
||||
'position': fields.Integer,
|
||||
'dataset_id': fields.String,
|
||||
'dataset_name': fields.String,
|
||||
'document_id': fields.String,
|
||||
'document_name': fields.String,
|
||||
'data_source_type': fields.String,
|
||||
'segment_id': fields.String,
|
||||
'score': fields.Float,
|
||||
'hit_count': fields.Integer,
|
||||
'word_count': fields.Integer,
|
||||
'segment_position': fields.Integer,
|
||||
'index_node_hash': fields.String,
|
||||
'content': fields.String,
|
||||
'created_at': TimestampField
|
||||
}
|
||||
|
||||
message_fields = {
|
||||
'id': fields.String,
|
||||
'conversation_id': fields.String,
|
||||
@ -37,6 +56,7 @@ class MessageListApi(InstalledAppResource):
|
||||
'query': fields.String,
|
||||
'answer': fields.String,
|
||||
'feedback': fields.Nested(feedback_fields, attribute='user_feedback', allow_null=True),
|
||||
'retriever_resources': fields.List(fields.Nested(retriever_resource_fields)),
|
||||
'created_at': TimestampField
|
||||
}
|
||||
|
||||
|
||||
@ -24,6 +24,7 @@ class AppParameterApi(InstalledAppResource):
|
||||
'suggested_questions': fields.Raw,
|
||||
'suggested_questions_after_answer': fields.Raw,
|
||||
'speech_to_text': fields.Raw,
|
||||
'retriever_resource': fields.Raw,
|
||||
'more_like_this': fields.Raw,
|
||||
'user_input_form': fields.Raw,
|
||||
}
|
||||
@ -39,6 +40,7 @@ class AppParameterApi(InstalledAppResource):
|
||||
'suggested_questions': app_model_config.suggested_questions_list,
|
||||
'suggested_questions_after_answer': app_model_config.suggested_questions_after_answer_dict,
|
||||
'speech_to_text': app_model_config.speech_to_text_dict,
|
||||
'retriever_resource': app_model_config.retriever_resource_dict,
|
||||
'more_like_this': app_model_config.more_like_this_dict,
|
||||
'user_input_form': app_model_config.user_input_form_list
|
||||
}
|
||||
|
||||
@ -29,9 +29,11 @@ class UniversalChatApi(UniversalChatResource):
|
||||
parser.add_argument('provider', type=str, required=True, location='json')
|
||||
parser.add_argument('model', type=str, required=True, location='json')
|
||||
parser.add_argument('tools', type=list, required=True, location='json')
|
||||
parser.add_argument('retriever_from', type=str, required=False, default='universal_app', location='json')
|
||||
args = parser.parse_args()
|
||||
|
||||
app_model_config = app_model.app_model_config
|
||||
app_model_config
|
||||
|
||||
# update app model config
|
||||
args['model_config'] = app_model_config.to_dict()
|
||||
|
||||
@ -36,6 +36,25 @@ class UniversalChatMessageListApi(UniversalChatResource):
|
||||
'created_at': TimestampField
|
||||
}
|
||||
|
||||
retriever_resource_fields = {
|
||||
'id': fields.String,
|
||||
'message_id': fields.String,
|
||||
'position': fields.Integer,
|
||||
'dataset_id': fields.String,
|
||||
'dataset_name': fields.String,
|
||||
'document_id': fields.String,
|
||||
'document_name': fields.String,
|
||||
'data_source_type': fields.String,
|
||||
'segment_id': fields.String,
|
||||
'score': fields.Float,
|
||||
'hit_count': fields.Integer,
|
||||
'word_count': fields.Integer,
|
||||
'segment_position': fields.Integer,
|
||||
'index_node_hash': fields.String,
|
||||
'content': fields.String,
|
||||
'created_at': TimestampField
|
||||
}
|
||||
|
||||
message_fields = {
|
||||
'id': fields.String,
|
||||
'conversation_id': fields.String,
|
||||
@ -43,6 +62,7 @@ class UniversalChatMessageListApi(UniversalChatResource):
|
||||
'query': fields.String,
|
||||
'answer': fields.String,
|
||||
'feedback': fields.Nested(feedback_fields, attribute='user_feedback', allow_null=True),
|
||||
'retriever_resources': fields.List(fields.Nested(retriever_resource_fields)),
|
||||
'created_at': TimestampField,
|
||||
'agent_thoughts': fields.List(fields.Nested(agent_thought_fields))
|
||||
}
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
# -*- coding:utf-8 -*-
|
||||
import json
|
||||
|
||||
from flask_restful import marshal_with, fields
|
||||
|
||||
from controllers.console import api
|
||||
@ -14,6 +16,7 @@ class UniversalChatParameterApi(UniversalChatResource):
|
||||
'suggested_questions': fields.Raw,
|
||||
'suggested_questions_after_answer': fields.Raw,
|
||||
'speech_to_text': fields.Raw,
|
||||
'retriever_resource': fields.Raw,
|
||||
}
|
||||
|
||||
@marshal_with(parameters_fields)
|
||||
@ -21,12 +24,14 @@ class UniversalChatParameterApi(UniversalChatResource):
|
||||
"""Retrieve app parameters."""
|
||||
app_model = universal_app
|
||||
app_model_config = app_model.app_model_config
|
||||
app_model_config.retriever_resource = json.dumps({'enabled': True})
|
||||
|
||||
return {
|
||||
'opening_statement': app_model_config.opening_statement,
|
||||
'suggested_questions': app_model_config.suggested_questions_list,
|
||||
'suggested_questions_after_answer': app_model_config.suggested_questions_after_answer_dict,
|
||||
'speech_to_text': app_model_config.speech_to_text_dict,
|
||||
'retriever_resource': app_model_config.retriever_resource_dict,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -47,6 +47,7 @@ def universal_chat_app_required(view=None):
|
||||
suggested_questions=json.dumps([]),
|
||||
suggested_questions_after_answer=json.dumps({'enabled': True}),
|
||||
speech_to_text=json.dumps({'enabled': True}),
|
||||
retriever_resource=json.dumps({'enabled': True}),
|
||||
more_like_this=None,
|
||||
sensitive_word_avoidance=None,
|
||||
model=json.dumps({
|
||||
|
||||
@ -25,6 +25,7 @@ class AppParameterApi(AppApiResource):
|
||||
'suggested_questions': fields.Raw,
|
||||
'suggested_questions_after_answer': fields.Raw,
|
||||
'speech_to_text': fields.Raw,
|
||||
'retriever_resource': fields.Raw,
|
||||
'more_like_this': fields.Raw,
|
||||
'user_input_form': fields.Raw,
|
||||
}
|
||||
@ -39,6 +40,7 @@ class AppParameterApi(AppApiResource):
|
||||
'suggested_questions': app_model_config.suggested_questions_list,
|
||||
'suggested_questions_after_answer': app_model_config.suggested_questions_after_answer_dict,
|
||||
'speech_to_text': app_model_config.speech_to_text_dict,
|
||||
'retriever_resource': app_model_config.retriever_resource_dict,
|
||||
'more_like_this': app_model_config.more_like_this_dict,
|
||||
'user_input_form': app_model_config.user_input_form_list
|
||||
}
|
||||
|
||||
@ -27,9 +27,11 @@ class CompletionApi(AppApiResource):
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('inputs', type=dict, required=True, location='json')
|
||||
parser.add_argument('query', type=str, location='json')
|
||||
parser.add_argument('query', type=str, location='json', default='')
|
||||
parser.add_argument('response_mode', type=str, choices=['blocking', 'streaming'], location='json')
|
||||
parser.add_argument('user', type=str, location='json')
|
||||
parser.add_argument('retriever_from', type=str, required=False, default='dev', location='json')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
streaming = args['response_mode'] == 'streaming'
|
||||
@ -91,6 +93,8 @@ class ChatApi(AppApiResource):
|
||||
parser.add_argument('response_mode', type=str, choices=['blocking', 'streaming'], location='json')
|
||||
parser.add_argument('conversation_id', type=uuid_value, location='json')
|
||||
parser.add_argument('user', type=str, location='json')
|
||||
parser.add_argument('retriever_from', type=str, required=False, default='dev', location='json')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
streaming = args['response_mode'] == 'streaming'
|
||||
|
||||
@ -16,6 +16,24 @@ class MessageListApi(AppApiResource):
|
||||
feedback_fields = {
|
||||
'rating': fields.String
|
||||
}
|
||||
retriever_resource_fields = {
|
||||
'id': fields.String,
|
||||
'message_id': fields.String,
|
||||
'position': fields.Integer,
|
||||
'dataset_id': fields.String,
|
||||
'dataset_name': fields.String,
|
||||
'document_id': fields.String,
|
||||
'document_name': fields.String,
|
||||
'data_source_type': fields.String,
|
||||
'segment_id': fields.String,
|
||||
'score': fields.Float,
|
||||
'hit_count': fields.Integer,
|
||||
'word_count': fields.Integer,
|
||||
'segment_position': fields.Integer,
|
||||
'index_node_hash': fields.String,
|
||||
'content': fields.String,
|
||||
'created_at': TimestampField
|
||||
}
|
||||
|
||||
message_fields = {
|
||||
'id': fields.String,
|
||||
@ -24,6 +42,7 @@ class MessageListApi(AppApiResource):
|
||||
'query': fields.String,
|
||||
'answer': fields.String,
|
||||
'feedback': fields.Nested(feedback_fields, attribute='user_feedback', allow_null=True),
|
||||
'retriever_resources': fields.List(fields.Nested(retriever_resource_fields)),
|
||||
'created_at': TimestampField
|
||||
}
|
||||
|
||||
|
||||
@ -24,6 +24,7 @@ class AppParameterApi(WebApiResource):
|
||||
'suggested_questions': fields.Raw,
|
||||
'suggested_questions_after_answer': fields.Raw,
|
||||
'speech_to_text': fields.Raw,
|
||||
'retriever_resource': fields.Raw,
|
||||
'more_like_this': fields.Raw,
|
||||
'user_input_form': fields.Raw,
|
||||
}
|
||||
@ -38,6 +39,7 @@ class AppParameterApi(WebApiResource):
|
||||
'suggested_questions': app_model_config.suggested_questions_list,
|
||||
'suggested_questions_after_answer': app_model_config.suggested_questions_after_answer_dict,
|
||||
'speech_to_text': app_model_config.speech_to_text_dict,
|
||||
'retriever_resource': app_model_config.retriever_resource_dict,
|
||||
'more_like_this': app_model_config.more_like_this_dict,
|
||||
'user_input_form': app_model_config.user_input_form_list
|
||||
}
|
||||
|
||||
@ -29,8 +29,10 @@ class CompletionApi(WebApiResource):
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument('inputs', type=dict, required=True, location='json')
|
||||
parser.add_argument('query', type=str, location='json')
|
||||
parser.add_argument('query', type=str, location='json', default='')
|
||||
parser.add_argument('response_mode', type=str, choices=['blocking', 'streaming'], location='json')
|
||||
parser.add_argument('retriever_from', type=str, required=False, default='web_app', location='json')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
streaming = args['response_mode'] == 'streaming'
|
||||
@ -88,6 +90,8 @@ class ChatApi(WebApiResource):
|
||||
parser.add_argument('query', type=str, required=True, location='json')
|
||||
parser.add_argument('response_mode', type=str, choices=['blocking', 'streaming'], location='json')
|
||||
parser.add_argument('conversation_id', type=uuid_value, location='json')
|
||||
parser.add_argument('retriever_from', type=str, required=False, default='web_app', location='json')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
streaming = args['response_mode'] == 'streaming'
|
||||
|
||||
@ -29,6 +29,25 @@ class MessageListApi(WebApiResource):
|
||||
'rating': fields.String
|
||||
}
|
||||
|
||||
retriever_resource_fields = {
|
||||
'id': fields.String,
|
||||
'message_id': fields.String,
|
||||
'position': fields.Integer,
|
||||
'dataset_id': fields.String,
|
||||
'dataset_name': fields.String,
|
||||
'document_id': fields.String,
|
||||
'document_name': fields.String,
|
||||
'data_source_type': fields.String,
|
||||
'segment_id': fields.String,
|
||||
'score': fields.Float,
|
||||
'hit_count': fields.Integer,
|
||||
'word_count': fields.Integer,
|
||||
'segment_position': fields.Integer,
|
||||
'index_node_hash': fields.String,
|
||||
'content': fields.String,
|
||||
'created_at': TimestampField
|
||||
}
|
||||
|
||||
message_fields = {
|
||||
'id': fields.String,
|
||||
'conversation_id': fields.String,
|
||||
@ -36,6 +55,7 @@ class MessageListApi(WebApiResource):
|
||||
'query': fields.String,
|
||||
'answer': fields.String,
|
||||
'feedback': fields.Nested(feedback_fields, attribute='user_feedback', allow_null=True),
|
||||
'retriever_resources': fields.List(fields.Nested(retriever_resource_fields)),
|
||||
'created_at': TimestampField
|
||||
}
|
||||
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import json
|
||||
from typing import Tuple, List, Any, Union, Sequence, Optional, cast
|
||||
|
||||
from langchain.agents import OpenAIFunctionsAgent, BaseSingleActionAgent
|
||||
@ -53,6 +54,10 @@ class MultiDatasetRouterAgent(OpenAIFunctionsAgent):
|
||||
tool = next(iter(self.tools))
|
||||
tool = cast(DatasetRetrieverTool, tool)
|
||||
rst = tool.run(tool_input={'query': kwargs['input']})
|
||||
# output = ''
|
||||
# rst_json = json.loads(rst)
|
||||
# for item in rst_json:
|
||||
# output += f'{item["content"]}\n'
|
||||
return AgentFinish(return_values={"output": rst}, log=rst)
|
||||
|
||||
if intermediate_steps:
|
||||
|
||||
@ -64,12 +64,9 @@ class DatasetToolCallbackHandler(BaseCallbackHandler):
|
||||
llm_prefix: Optional[str] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
# kwargs={'name': 'Search'}
|
||||
# llm_prefix='Thought:'
|
||||
# observation_prefix='Observation: '
|
||||
# output='53 years'
|
||||
pass
|
||||
|
||||
|
||||
def on_tool_error(
|
||||
self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
|
||||
) -> None:
|
||||
|
||||
@ -2,6 +2,7 @@ from typing import List
|
||||
|
||||
from langchain.schema import Document
|
||||
|
||||
from core.conversation_message_task import ConversationMessageTask
|
||||
from extensions.ext_database import db
|
||||
from models.dataset import DocumentSegment
|
||||
|
||||
@ -9,8 +10,9 @@ from models.dataset import DocumentSegment
|
||||
class DatasetIndexToolCallbackHandler:
|
||||
"""Callback handler for dataset tool."""
|
||||
|
||||
def __init__(self, dataset_id: str) -> None:
|
||||
def __init__(self, dataset_id: str, conversation_message_task: ConversationMessageTask) -> None:
|
||||
self.dataset_id = dataset_id
|
||||
self.conversation_message_task = conversation_message_task
|
||||
|
||||
def on_tool_end(self, documents: List[Document]) -> None:
|
||||
"""Handle tool end."""
|
||||
@ -27,3 +29,7 @@ class DatasetIndexToolCallbackHandler:
|
||||
)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
def return_retriever_resource_info(self, resource: List):
|
||||
"""Handle return_retriever_resource_info."""
|
||||
self.conversation_message_task.on_dataset_query_finish(resource)
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Optional, List, Union, Tuple
|
||||
@ -19,13 +20,15 @@ from core.orchestrator_rule_parser import OrchestratorRuleParser
|
||||
from core.prompt.prompt_builder import PromptBuilder
|
||||
from core.prompt.prompt_template import JinjaPromptTemplate
|
||||
from core.prompt.prompts import MORE_LIKE_THIS_GENERATE_PROMPT
|
||||
from models.dataset import DocumentSegment, Dataset, Document
|
||||
from models.model import App, AppModelConfig, Account, Conversation, Message, EndUser
|
||||
|
||||
|
||||
class Completion:
|
||||
@classmethod
|
||||
def generate(cls, task_id: str, app: App, app_model_config: AppModelConfig, query: str, inputs: dict,
|
||||
user: Union[Account, EndUser], conversation: Optional[Conversation], streaming: bool, is_override: bool = False):
|
||||
user: Union[Account, EndUser], conversation: Optional[Conversation], streaming: bool,
|
||||
is_override: bool = False, retriever_from: str = 'dev'):
|
||||
"""
|
||||
errors: ProviderTokenNotInitError
|
||||
"""
|
||||
@ -96,7 +99,6 @@ class Completion:
|
||||
should_use_agent = agent_executor.should_use_agent(query)
|
||||
if should_use_agent:
|
||||
agent_execute_result = agent_executor.run(query)
|
||||
|
||||
# run the final llm
|
||||
try:
|
||||
cls.run_final_llm(
|
||||
@ -118,7 +120,8 @@ class Completion:
|
||||
return
|
||||
|
||||
@classmethod
|
||||
def run_final_llm(cls, model_instance: BaseLLM, mode: str, app_model_config: AppModelConfig, query: str, inputs: dict,
|
||||
def run_final_llm(cls, model_instance: BaseLLM, mode: str, app_model_config: AppModelConfig, query: str,
|
||||
inputs: dict,
|
||||
agent_execute_result: Optional[AgentExecuteResult],
|
||||
conversation_message_task: ConversationMessageTask,
|
||||
memory: Optional[ReadOnlyConversationTokenDBBufferSharedMemory]):
|
||||
@ -150,7 +153,6 @@ class Completion:
|
||||
callbacks=[LLMCallbackHandler(model_instance, conversation_message_task)],
|
||||
fake_response=fake_response
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
@classmethod
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import decimal
|
||||
import json
|
||||
from typing import Optional, Union
|
||||
from typing import Optional, Union, List
|
||||
|
||||
from core.callback_handler.entity.agent_loop import AgentLoop
|
||||
from core.callback_handler.entity.dataset_query import DatasetQueryObj
|
||||
@ -15,7 +15,8 @@ from events.message_event import message_was_created
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.dataset import DatasetQuery
|
||||
from models.model import AppModelConfig, Conversation, Account, Message, EndUser, App, MessageAgentThought, MessageChain
|
||||
from models.model import AppModelConfig, Conversation, Account, Message, EndUser, App, MessageAgentThought, \
|
||||
MessageChain, DatasetRetrieverResource
|
||||
|
||||
|
||||
class ConversationMessageTask:
|
||||
@ -41,6 +42,8 @@ class ConversationMessageTask:
|
||||
|
||||
self.message = None
|
||||
|
||||
self.retriever_resource = None
|
||||
|
||||
self.model_dict = self.app_model_config.model_dict
|
||||
self.provider_name = self.model_dict.get('provider')
|
||||
self.model_name = self.model_dict.get('name')
|
||||
@ -157,7 +160,8 @@ class ConversationMessageTask:
|
||||
self.message.message_tokens = message_tokens
|
||||
self.message.message_unit_price = message_unit_price
|
||||
self.message.message_price_unit = message_price_unit
|
||||
self.message.answer = PromptBuilder.process_template(llm_message.completion.strip()) if llm_message.completion else ''
|
||||
self.message.answer = PromptBuilder.process_template(
|
||||
llm_message.completion.strip()) if llm_message.completion else ''
|
||||
self.message.answer_tokens = answer_tokens
|
||||
self.message.answer_unit_price = answer_unit_price
|
||||
self.message.answer_price_unit = answer_price_unit
|
||||
@ -256,7 +260,36 @@ class ConversationMessageTask:
|
||||
|
||||
db.session.add(dataset_query)
|
||||
|
||||
def on_dataset_query_finish(self, resource: List):
|
||||
if resource and len(resource) > 0:
|
||||
for item in resource:
|
||||
dataset_retriever_resource = DatasetRetrieverResource(
|
||||
message_id=self.message.id,
|
||||
position=item.get('position'),
|
||||
dataset_id=item.get('dataset_id'),
|
||||
dataset_name=item.get('dataset_name'),
|
||||
document_id=item.get('document_id'),
|
||||
document_name=item.get('document_name'),
|
||||
data_source_type=item.get('data_source_type'),
|
||||
segment_id=item.get('segment_id'),
|
||||
score=item.get('score') if 'score' in item else None,
|
||||
hit_count=item.get('hit_count') if 'hit_count' else None,
|
||||
word_count=item.get('word_count') if 'word_count' in item else None,
|
||||
segment_position=item.get('segment_position') if 'segment_position' in item else None,
|
||||
index_node_hash=item.get('index_node_hash') if 'index_node_hash' in item else None,
|
||||
content=item.get('content'),
|
||||
retriever_from=item.get('retriever_from'),
|
||||
created_by=self.user.id
|
||||
)
|
||||
db.session.add(dataset_retriever_resource)
|
||||
db.session.flush()
|
||||
self.retriever_resource = resource
|
||||
|
||||
def message_end(self):
|
||||
self._pub_handler.pub_message_end(self.retriever_resource)
|
||||
|
||||
def end(self):
|
||||
self._pub_handler.pub_message_end(self.retriever_resource)
|
||||
self._pub_handler.pub_end()
|
||||
|
||||
|
||||
@ -350,6 +383,23 @@ class PubHandler:
|
||||
self.pub_end()
|
||||
raise ConversationTaskStoppedException()
|
||||
|
||||
def pub_message_end(self, retriever_resource: List):
|
||||
content = {
|
||||
'event': 'message_end',
|
||||
'data': {
|
||||
'task_id': self._task_id,
|
||||
'message_id': self._message.id,
|
||||
'mode': self._conversation.mode,
|
||||
'conversation_id': self._conversation.id
|
||||
}
|
||||
}
|
||||
if retriever_resource:
|
||||
content['data']['retriever_resources'] = retriever_resource
|
||||
redis_client.publish(self._channel, json.dumps(content))
|
||||
|
||||
if self._is_stopped():
|
||||
self.pub_end()
|
||||
raise ConversationTaskStoppedException()
|
||||
|
||||
def pub_end(self):
|
||||
content = {
|
||||
|
||||
@ -74,7 +74,7 @@ class KeywordTableIndex(BaseIndex):
|
||||
DocumentSegment.document_id == document_id
|
||||
).all()
|
||||
|
||||
ids = [segment.id for segment in segments]
|
||||
ids = [segment.index_node_id for segment in segments]
|
||||
|
||||
keyword_table = self._get_dataset_keyword_table()
|
||||
keyword_table = self._delete_ids_from_keyword_table(keyword_table, ids)
|
||||
|
||||
@ -113,6 +113,25 @@ class QdrantVectorIndex(BaseVectorIndex):
|
||||
],
|
||||
))
|
||||
|
||||
def delete_by_ids(self, ids: list[str]) -> None:
|
||||
if self._is_origin():
|
||||
self.recreate_dataset(self.dataset)
|
||||
return
|
||||
|
||||
vector_store = self._get_vector_store()
|
||||
vector_store = cast(self._get_vector_store_class(), vector_store)
|
||||
|
||||
from qdrant_client.http import models
|
||||
for node_id in ids:
|
||||
vector_store.del_texts(models.Filter(
|
||||
must=[
|
||||
models.FieldCondition(
|
||||
key="metadata.doc_id",
|
||||
match=models.MatchValue(value=node_id),
|
||||
),
|
||||
],
|
||||
))
|
||||
|
||||
def _is_origin(self):
|
||||
if self.dataset.index_struct_dict:
|
||||
class_prefix: str = self.dataset.index_struct_dict['vector_store']['class_prefix']
|
||||
|
||||
@ -8,6 +8,7 @@ class LLMRunResult(BaseModel):
|
||||
content: str
|
||||
prompt_tokens: int
|
||||
completion_tokens: int
|
||||
source: list = None
|
||||
|
||||
|
||||
class MessageType(enum.Enum):
|
||||
|
||||
@ -342,7 +342,7 @@ class BaseLLM(BaseProviderModel):
|
||||
if order == 'context_prompt':
|
||||
prompt += context_prompt_content
|
||||
elif order == 'pre_prompt':
|
||||
prompt += (pre_prompt_content + '\n\n') if pre_prompt_content else ''
|
||||
prompt += pre_prompt_content
|
||||
|
||||
query_prompt = prompt_rules['query_prompt'] if 'query_prompt' in prompt_rules else '{{query}}'
|
||||
|
||||
|
||||
@ -36,8 +36,8 @@ class OrchestratorRuleParser:
|
||||
self.app_model_config = app_model_config
|
||||
|
||||
def to_agent_executor(self, conversation_message_task: ConversationMessageTask, memory: Optional[BaseChatMemory],
|
||||
rest_tokens: int, chain_callback: MainChainGatherCallbackHandler) \
|
||||
-> Optional[AgentExecutor]:
|
||||
rest_tokens: int, chain_callback: MainChainGatherCallbackHandler,
|
||||
return_resource: bool = False, retriever_from: str = 'dev') -> Optional[AgentExecutor]:
|
||||
if not self.app_model_config.agent_mode_dict:
|
||||
return None
|
||||
|
||||
@ -74,7 +74,7 @@ class OrchestratorRuleParser:
|
||||
|
||||
# only OpenAI chat model (include Azure) support function call, use ReACT instead
|
||||
if agent_model_instance.model_mode != ModelMode.CHAT \
|
||||
or agent_model_instance.model_provider.provider_name not in ['openai', 'azure_openai']:
|
||||
or agent_model_instance.model_provider.provider_name not in ['openai', 'azure_openai']:
|
||||
if planning_strategy in [PlanningStrategy.FUNCTION_CALL, PlanningStrategy.MULTI_FUNCTION_CALL]:
|
||||
planning_strategy = PlanningStrategy.REACT
|
||||
elif planning_strategy == PlanningStrategy.ROUTER:
|
||||
@ -99,7 +99,9 @@ class OrchestratorRuleParser:
|
||||
tool_configs=tool_configs,
|
||||
conversation_message_task=conversation_message_task,
|
||||
rest_tokens=rest_tokens,
|
||||
callbacks=[agent_callback, DifyStdOutCallbackHandler()]
|
||||
callbacks=[agent_callback, DifyStdOutCallbackHandler()],
|
||||
return_resource=return_resource,
|
||||
retriever_from=retriever_from
|
||||
)
|
||||
|
||||
if len(tools) == 0:
|
||||
@ -145,8 +147,10 @@ class OrchestratorRuleParser:
|
||||
|
||||
return None
|
||||
|
||||
def to_tools(self, agent_model_instance: BaseLLM, tool_configs: list, conversation_message_task: ConversationMessageTask,
|
||||
rest_tokens: int, callbacks: Callbacks = None) -> list[BaseTool]:
|
||||
def to_tools(self, agent_model_instance: BaseLLM, tool_configs: list,
|
||||
conversation_message_task: ConversationMessageTask,
|
||||
rest_tokens: int, callbacks: Callbacks = None, return_resource: bool = False,
|
||||
retriever_from: str = 'dev') -> list[BaseTool]:
|
||||
"""
|
||||
Convert app agent tool configs to tools
|
||||
|
||||
@ -155,6 +159,8 @@ class OrchestratorRuleParser:
|
||||
:param tool_configs: app agent tool configs
|
||||
:param conversation_message_task:
|
||||
:param callbacks:
|
||||
:param return_resource:
|
||||
:param retriever_from:
|
||||
:return:
|
||||
"""
|
||||
tools = []
|
||||
@ -166,7 +172,7 @@ class OrchestratorRuleParser:
|
||||
|
||||
tool = None
|
||||
if tool_type == "dataset":
|
||||
tool = self.to_dataset_retriever_tool(tool_val, conversation_message_task, rest_tokens)
|
||||
tool = self.to_dataset_retriever_tool(tool_val, conversation_message_task, rest_tokens, return_resource, retriever_from)
|
||||
elif tool_type == "web_reader":
|
||||
tool = self.to_web_reader_tool(agent_model_instance)
|
||||
elif tool_type == "google_search":
|
||||
@ -183,13 +189,15 @@ class OrchestratorRuleParser:
|
||||
return tools
|
||||
|
||||
def to_dataset_retriever_tool(self, tool_config: dict, conversation_message_task: ConversationMessageTask,
|
||||
rest_tokens: int) \
|
||||
rest_tokens: int, return_resource: bool = False, retriever_from: str = 'dev') \
|
||||
-> Optional[BaseTool]:
|
||||
"""
|
||||
A dataset tool is a tool that can be used to retrieve information from a dataset
|
||||
:param rest_tokens:
|
||||
:param tool_config:
|
||||
:param conversation_message_task:
|
||||
:param return_resource:
|
||||
:param retriever_from:
|
||||
:return:
|
||||
"""
|
||||
# get dataset from dataset id
|
||||
@ -208,7 +216,10 @@ class OrchestratorRuleParser:
|
||||
tool = DatasetRetrieverTool.from_dataset(
|
||||
dataset=dataset,
|
||||
k=k,
|
||||
callbacks=[DatasetToolCallbackHandler(conversation_message_task)]
|
||||
callbacks=[DatasetToolCallbackHandler(conversation_message_task)],
|
||||
conversation_message_task=conversation_message_task,
|
||||
return_resource=return_resource,
|
||||
retriever_from=retriever_from
|
||||
)
|
||||
|
||||
return tool
|
||||
|
||||
@ -8,6 +8,6 @@
|
||||
"pre_prompt",
|
||||
"histories_prompt"
|
||||
],
|
||||
"query_prompt": "用户:{{query}}",
|
||||
"query_prompt": "\n\n用户:{{query}}",
|
||||
"stops": ["用户:"]
|
||||
}
|
||||
@ -8,6 +8,6 @@
|
||||
"pre_prompt",
|
||||
"histories_prompt"
|
||||
],
|
||||
"query_prompt": "Human: {{query}}\n\nAssistant: ",
|
||||
"query_prompt": "\n\nHuman: {{query}}\n\nAssistant: ",
|
||||
"stops": ["\nHuman:", "</histories>"]
|
||||
}
|
||||
}
|
||||
|
||||
@ -105,7 +105,7 @@ GENERATOR_QA_PROMPT = (
|
||||
'Step 3: Decompose or combine multiple pieces of information and concepts.\n'
|
||||
'Step 4: Generate 20 questions and answers based on these key information and concepts.'
|
||||
'The questions should be clear and detailed, and the answers should be detailed and complete.\n'
|
||||
"Answer must be the language:{language} and in the following format: Q1:\nA1:\nQ2:\nA2:...\n"
|
||||
"Answer according to the the language:{language} and in the following format: Q1:\nA1:\nQ2:\nA2:...\n"
|
||||
)
|
||||
|
||||
RULE_CONFIG_GENERATE_TEMPLATE = """Given MY INTENDED AUDIENCES and HOPING TO SOLVE using a language model, please select \
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import json
|
||||
from typing import Type
|
||||
|
||||
from flask import current_app
|
||||
@ -5,13 +6,14 @@ from langchain.tools import BaseTool
|
||||
from pydantic import Field, BaseModel
|
||||
|
||||
from core.callback_handler.index_tool_callback_handler import DatasetIndexToolCallbackHandler
|
||||
from core.conversation_message_task import ConversationMessageTask
|
||||
from core.embedding.cached_embedding import CacheEmbedding
|
||||
from core.index.keyword_table_index.keyword_table_index import KeywordTableIndex, KeywordTableConfig
|
||||
from core.index.vector_index.vector_index import VectorIndex
|
||||
from core.model_providers.error import LLMBadRequestError, ProviderTokenNotInitError
|
||||
from core.model_providers.model_factory import ModelFactory
|
||||
from extensions.ext_database import db
|
||||
from models.dataset import Dataset, DocumentSegment
|
||||
from models.dataset import Dataset, DocumentSegment, Document
|
||||
|
||||
|
||||
class DatasetRetrieverToolInput(BaseModel):
|
||||
@ -27,6 +29,10 @@ class DatasetRetrieverTool(BaseTool):
|
||||
tenant_id: str
|
||||
dataset_id: str
|
||||
k: int = 3
|
||||
conversation_message_task: ConversationMessageTask
|
||||
return_resource: str
|
||||
retriever_from: str
|
||||
|
||||
|
||||
@classmethod
|
||||
def from_dataset(cls, dataset: Dataset, **kwargs):
|
||||
@ -86,7 +92,7 @@ class DatasetRetrieverTool(BaseTool):
|
||||
if self.k > 0:
|
||||
documents = vector_index.search(
|
||||
query,
|
||||
search_type='similarity',
|
||||
search_type='similarity_score_threshold',
|
||||
search_kwargs={
|
||||
'k': self.k
|
||||
}
|
||||
@ -94,8 +100,12 @@ class DatasetRetrieverTool(BaseTool):
|
||||
else:
|
||||
documents = []
|
||||
|
||||
hit_callback = DatasetIndexToolCallbackHandler(dataset.id)
|
||||
hit_callback = DatasetIndexToolCallbackHandler(dataset.id, self.conversation_message_task)
|
||||
hit_callback.on_tool_end(documents)
|
||||
document_score_list = {}
|
||||
if dataset.indexing_technique != "economy":
|
||||
for item in documents:
|
||||
document_score_list[item.metadata['doc_id']] = item.metadata['score']
|
||||
document_context_list = []
|
||||
index_node_ids = [document.metadata['doc_id'] for document in documents]
|
||||
segments = DocumentSegment.query.filter(DocumentSegment.dataset_id == self.dataset_id,
|
||||
@ -112,9 +122,43 @@ class DatasetRetrieverTool(BaseTool):
|
||||
float('inf')))
|
||||
for segment in sorted_segments:
|
||||
if segment.answer:
|
||||
document_context_list.append(f'question:{segment.content} \nanswer:{segment.answer}')
|
||||
document_context_list.append(f'question:{segment.content} answer:{segment.answer}')
|
||||
else:
|
||||
document_context_list.append(segment.content)
|
||||
if self.return_resource:
|
||||
context_list = []
|
||||
resource_number = 1
|
||||
for segment in sorted_segments:
|
||||
context = {}
|
||||
document = Document.query.filter(Document.id == segment.document_id,
|
||||
Document.enabled == True,
|
||||
Document.archived == False,
|
||||
).first()
|
||||
if dataset and document:
|
||||
source = {
|
||||
'position': resource_number,
|
||||
'dataset_id': dataset.id,
|
||||
'dataset_name': dataset.name,
|
||||
'document_id': document.id,
|
||||
'document_name': document.name,
|
||||
'data_source_type': document.data_source_type,
|
||||
'segment_id': segment.id,
|
||||
'retriever_from': self.retriever_from
|
||||
}
|
||||
if dataset.indexing_technique != "economy":
|
||||
source['score'] = document_score_list.get(segment.index_node_id)
|
||||
if self.retriever_from == 'dev':
|
||||
source['hit_count'] = segment.hit_count
|
||||
source['word_count'] = segment.word_count
|
||||
source['segment_position'] = segment.position
|
||||
source['index_node_hash'] = segment.index_node_hash
|
||||
if segment.answer:
|
||||
source['content'] = f'question:{segment.content} \nanswer:{segment.answer}'
|
||||
else:
|
||||
source['content'] = segment.content
|
||||
context_list.append(source)
|
||||
resource_number += 1
|
||||
hit_callback.return_retriever_resource_info(context_list)
|
||||
|
||||
return str("\n".join(document_context_list))
|
||||
|
||||
|
||||
@ -0,0 +1,54 @@
|
||||
"""add_dataset_retriever_resource
|
||||
|
||||
Revision ID: 6dcb43972bdc
|
||||
Revises: 4bcffcd64aa4
|
||||
Create Date: 2023-09-06 16:51:27.385844
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6dcb43972bdc'
|
||||
down_revision = '4bcffcd64aa4'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('dataset_retriever_resources',
|
||||
sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
|
||||
sa.Column('message_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('position', sa.Integer(), nullable=False),
|
||||
sa.Column('dataset_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('dataset_name', sa.Text(), nullable=False),
|
||||
sa.Column('document_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('document_name', sa.Text(), nullable=False),
|
||||
sa.Column('data_source_type', sa.Text(), nullable=False),
|
||||
sa.Column('segment_id', postgresql.UUID(), nullable=False),
|
||||
sa.Column('score', sa.Float(), nullable=True),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('hit_count', sa.Integer(), nullable=True),
|
||||
sa.Column('word_count', sa.Integer(), nullable=True),
|
||||
sa.Column('segment_position', sa.Integer(), nullable=True),
|
||||
sa.Column('index_node_hash', sa.Text(), nullable=True),
|
||||
sa.Column('retriever_from', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', postgresql.UUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey')
|
||||
)
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.create_index('dataset_retriever_resource_message_id_idx', ['message_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op:
|
||||
batch_op.drop_index('dataset_retriever_resource_message_id_idx')
|
||||
|
||||
op.drop_table('dataset_retriever_resources')
|
||||
# ### end Alembic commands ###
|
||||
@ -0,0 +1,32 @@
|
||||
"""add_app_config_retriever_resource
|
||||
|
||||
Revision ID: 77e83833755c
|
||||
Revises: 6dcb43972bdc
|
||||
Create Date: 2023-09-06 17:26:40.311927
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '77e83833755c'
|
||||
down_revision = '6dcb43972bdc'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('retriever_resource', sa.Text(), nullable=True))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('app_model_configs', schema=None) as batch_op:
|
||||
batch_op.drop_column('retriever_resource')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@ -1,4 +1,5 @@
|
||||
import json
|
||||
from json import JSONDecodeError
|
||||
|
||||
from flask import current_app, request
|
||||
from flask_login import UserMixin
|
||||
@ -90,6 +91,7 @@ class AppModelConfig(db.Model):
|
||||
pre_prompt = db.Column(db.Text)
|
||||
agent_mode = db.Column(db.Text)
|
||||
sensitive_word_avoidance = db.Column(db.Text)
|
||||
retriever_resource = db.Column(db.Text)
|
||||
|
||||
@property
|
||||
def app(self):
|
||||
@ -114,6 +116,11 @@ class AppModelConfig(db.Model):
|
||||
return json.loads(self.speech_to_text) if self.speech_to_text \
|
||||
else {"enabled": False}
|
||||
|
||||
@property
|
||||
def retriever_resource_dict(self) -> dict:
|
||||
return json.loads(self.retriever_resource) if self.retriever_resource \
|
||||
else {"enabled": False}
|
||||
|
||||
@property
|
||||
def more_like_this_dict(self) -> dict:
|
||||
return json.loads(self.more_like_this) if self.more_like_this else {"enabled": False}
|
||||
@ -140,6 +147,7 @@ class AppModelConfig(db.Model):
|
||||
"suggested_questions": self.suggested_questions_list,
|
||||
"suggested_questions_after_answer": self.suggested_questions_after_answer_dict,
|
||||
"speech_to_text": self.speech_to_text_dict,
|
||||
"retriever_resource": self.retriever_resource,
|
||||
"more_like_this": self.more_like_this_dict,
|
||||
"sensitive_word_avoidance": self.sensitive_word_avoidance_dict,
|
||||
"model": self.model_dict,
|
||||
@ -164,7 +172,8 @@ class AppModelConfig(db.Model):
|
||||
self.user_input_form = json.dumps(model_config['user_input_form'])
|
||||
self.pre_prompt = model_config['pre_prompt']
|
||||
self.agent_mode = json.dumps(model_config['agent_mode'])
|
||||
|
||||
self.retriever_resource = json.dumps(model_config['retriever_resource']) \
|
||||
if model_config.get('retriever_resource') else None
|
||||
return self
|
||||
|
||||
def copy(self):
|
||||
@ -318,6 +327,7 @@ class Conversation(db.Model):
|
||||
model_config['suggested_questions'] = app_model_config.suggested_questions_list
|
||||
model_config['suggested_questions_after_answer'] = app_model_config.suggested_questions_after_answer_dict
|
||||
model_config['speech_to_text'] = app_model_config.speech_to_text_dict
|
||||
model_config['retriever_resource'] = app_model_config.retriever_resource_dict
|
||||
model_config['more_like_this'] = app_model_config.more_like_this_dict
|
||||
model_config['sensitive_word_avoidance'] = app_model_config.sensitive_word_avoidance_dict
|
||||
model_config['user_input_form'] = app_model_config.user_input_form_list
|
||||
@ -476,6 +486,11 @@ class Message(db.Model):
|
||||
return db.session.query(MessageAgentThought).filter(MessageAgentThought.message_id == self.id) \
|
||||
.order_by(MessageAgentThought.position.asc()).all()
|
||||
|
||||
@property
|
||||
def retriever_resources(self):
|
||||
return db.session.query(DatasetRetrieverResource).filter(DatasetRetrieverResource.message_id == self.id) \
|
||||
.order_by(DatasetRetrieverResource.position.asc()).all()
|
||||
|
||||
|
||||
class MessageFeedback(db.Model):
|
||||
__tablename__ = 'message_feedbacks'
|
||||
@ -719,3 +734,31 @@ class MessageAgentThought(db.Model):
|
||||
created_by_role = db.Column(db.String, nullable=False)
|
||||
created_by = db.Column(UUID, nullable=False)
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
|
||||
|
||||
|
||||
class DatasetRetrieverResource(db.Model):
|
||||
__tablename__ = 'dataset_retriever_resources'
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint('id', name='dataset_retriever_resource_pkey'),
|
||||
db.Index('dataset_retriever_resource_message_id_idx', 'message_id'),
|
||||
)
|
||||
|
||||
id = db.Column(UUID, nullable=False, server_default=db.text('uuid_generate_v4()'))
|
||||
message_id = db.Column(UUID, nullable=False)
|
||||
position = db.Column(db.Integer, nullable=False)
|
||||
dataset_id = db.Column(UUID, nullable=False)
|
||||
dataset_name = db.Column(db.Text, nullable=False)
|
||||
document_id = db.Column(UUID, nullable=False)
|
||||
document_name = db.Column(db.Text, nullable=False)
|
||||
data_source_type = db.Column(db.Text, nullable=False)
|
||||
segment_id = db.Column(UUID, nullable=False)
|
||||
score = db.Column(db.Float, nullable=True)
|
||||
content = db.Column(db.Text, nullable=False)
|
||||
hit_count = db.Column(db.Integer, nullable=True)
|
||||
word_count = db.Column(db.Integer, nullable=True)
|
||||
segment_position = db.Column(db.Integer, nullable=True)
|
||||
index_node_hash = db.Column(db.Text, nullable=True)
|
||||
retriever_from = db.Column(db.Text, nullable=False)
|
||||
created_by = db.Column(UUID, nullable=False)
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=db.func.current_timestamp())
|
||||
|
||||
|
||||
@ -130,6 +130,21 @@ class AppModelConfigService:
|
||||
if not isinstance(config["speech_to_text"]["enabled"], bool):
|
||||
raise ValueError("enabled in speech_to_text must be of boolean type")
|
||||
|
||||
# return retriever resource
|
||||
if 'retriever_resource' not in config or not config["retriever_resource"]:
|
||||
config["retriever_resource"] = {
|
||||
"enabled": False
|
||||
}
|
||||
|
||||
if not isinstance(config["retriever_resource"], dict):
|
||||
raise ValueError("retriever_resource must be of dict type")
|
||||
|
||||
if "enabled" not in config["retriever_resource"] or not config["retriever_resource"]["enabled"]:
|
||||
config["retriever_resource"]["enabled"] = False
|
||||
|
||||
if not isinstance(config["retriever_resource"]["enabled"], bool):
|
||||
raise ValueError("enabled in speech_to_text must be of boolean type")
|
||||
|
||||
# more_like_this
|
||||
if 'more_like_this' not in config or not config["more_like_this"]:
|
||||
config["more_like_this"] = {
|
||||
@ -216,8 +231,8 @@ class AppModelConfigService:
|
||||
variables = []
|
||||
for item in config["user_input_form"]:
|
||||
key = list(item.keys())[0]
|
||||
if key not in ["text-input", "select"]:
|
||||
raise ValueError("Keys in user_input_form list can only be 'text-input' or 'select'")
|
||||
if key not in ["text-input", "select", "paragraph"]:
|
||||
raise ValueError("Keys in user_input_form list can only be 'text-input', 'paragraph' or 'select'")
|
||||
|
||||
form_item = item[key]
|
||||
if 'label' not in form_item:
|
||||
@ -327,6 +342,7 @@ class AppModelConfigService:
|
||||
"suggested_questions": config["suggested_questions"],
|
||||
"suggested_questions_after_answer": config["suggested_questions_after_answer"],
|
||||
"speech_to_text": config["speech_to_text"],
|
||||
"retriever_resource": config["retriever_resource"],
|
||||
"more_like_this": config["more_like_this"],
|
||||
"sensitive_word_avoidance": config["sensitive_word_avoidance"],
|
||||
"model": {
|
||||
|
||||
@ -11,7 +11,8 @@ from sqlalchemy import and_
|
||||
|
||||
from core.completion import Completion
|
||||
from core.conversation_message_task import PubHandler, ConversationTaskStoppedException
|
||||
from core.model_providers.error import LLMBadRequestError, LLMAPIConnectionError, LLMAPIUnavailableError, LLMRateLimitError, \
|
||||
from core.model_providers.error import LLMBadRequestError, LLMAPIConnectionError, LLMAPIUnavailableError, \
|
||||
LLMRateLimitError, \
|
||||
LLMAuthorizationError, ProviderTokenNotInitError, QuotaExceededError, ModelCurrentlyNotSupportError
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
@ -34,7 +35,7 @@ class CompletionService:
|
||||
inputs = args['inputs']
|
||||
query = args['query']
|
||||
|
||||
if not query:
|
||||
if app_model.mode != 'completion' and not query:
|
||||
raise ValueError('query is required')
|
||||
|
||||
query = query.replace('\x00', '')
|
||||
@ -95,6 +96,7 @@ class CompletionService:
|
||||
|
||||
app_model_config_model = app_model_config.model_dict
|
||||
app_model_config_model['completion_params'] = completion_params
|
||||
app_model_config.retriever_resource = json.dumps({'enabled': True})
|
||||
|
||||
app_model_config = app_model_config.copy()
|
||||
app_model_config.model = json.dumps(app_model_config_model)
|
||||
@ -145,7 +147,8 @@ class CompletionService:
|
||||
'user': user,
|
||||
'conversation': conversation,
|
||||
'streaming': streaming,
|
||||
'is_model_config_override': is_model_config_override
|
||||
'is_model_config_override': is_model_config_override,
|
||||
'retriever_from': args['retriever_from'] if 'retriever_from' in args else 'dev'
|
||||
})
|
||||
|
||||
generate_worker_thread.start()
|
||||
@ -169,7 +172,8 @@ class CompletionService:
|
||||
@classmethod
|
||||
def generate_worker(cls, flask_app: Flask, generate_task_id: str, app_model: App, app_model_config: AppModelConfig,
|
||||
query: str, inputs: dict, user: Union[Account, EndUser],
|
||||
conversation: Conversation, streaming: bool, is_model_config_override: bool):
|
||||
conversation: Conversation, streaming: bool, is_model_config_override: bool,
|
||||
retriever_from: str = 'dev'):
|
||||
with flask_app.app_context():
|
||||
try:
|
||||
if conversation:
|
||||
@ -188,6 +192,7 @@ class CompletionService:
|
||||
conversation=conversation,
|
||||
streaming=streaming,
|
||||
is_override=is_model_config_override,
|
||||
retriever_from=retriever_from
|
||||
)
|
||||
except ConversationTaskStoppedException:
|
||||
pass
|
||||
@ -347,8 +352,8 @@ class CompletionService:
|
||||
if value not in options:
|
||||
raise ValueError(f"{variable} in input form must be one of the following: {options}")
|
||||
else:
|
||||
if 'max_length' in variable:
|
||||
max_length = variable['max_length']
|
||||
if 'max_length' in input_config:
|
||||
max_length = input_config['max_length']
|
||||
if len(value) > max_length:
|
||||
raise ValueError(f'{variable} in input form must be less than {max_length} characters')
|
||||
|
||||
@ -400,7 +405,11 @@ class CompletionService:
|
||||
elif event == 'chain':
|
||||
yield "data: " + json.dumps(cls.get_chain_response_data(result.get('data'))) + "\n\n"
|
||||
elif event == 'agent_thought':
|
||||
yield "data: " + json.dumps(cls.get_agent_thought_response_data(result.get('data'))) + "\n\n"
|
||||
yield "data: " + json.dumps(
|
||||
cls.get_agent_thought_response_data(result.get('data'))) + "\n\n"
|
||||
elif event == 'message_end':
|
||||
yield "data: " + json.dumps(
|
||||
cls.get_message_end_data(result.get('data'))) + "\n\n"
|
||||
elif event == 'ping':
|
||||
yield "event: ping\n\n"
|
||||
else:
|
||||
@ -432,6 +441,20 @@ class CompletionService:
|
||||
|
||||
return response_data
|
||||
|
||||
@classmethod
|
||||
def get_message_end_data(cls, data: dict):
|
||||
response_data = {
|
||||
'event': 'message_end',
|
||||
'task_id': data.get('task_id'),
|
||||
'id': data.get('message_id')
|
||||
}
|
||||
if 'retriever_resources' in data:
|
||||
response_data['retriever_resources'] = data.get('retriever_resources')
|
||||
if data.get('mode') == 'chat':
|
||||
response_data['conversation_id'] = data.get('conversation_id')
|
||||
|
||||
return response_data
|
||||
|
||||
@classmethod
|
||||
def get_chain_response_data(cls, data: dict):
|
||||
response_data = {
|
||||
|
||||
@ -2,7 +2,7 @@ version: '3.1'
|
||||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:0.3.20
|
||||
image: langgenius/dify-api:0.3.22
|
||||
restart: always
|
||||
environment:
|
||||
# Startup mode, 'api' starts the API server.
|
||||
@ -114,12 +114,9 @@ services:
|
||||
# The sample rate for Sentry profiles. Default: `1.0`
|
||||
SENTRY_PROFILES_SAMPLE_RATE: 1.0
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
weaviate:
|
||||
condition: service_started
|
||||
- db
|
||||
- redis
|
||||
- weaviate
|
||||
volumes:
|
||||
# Mount the storage directory to the container, for storing user files.
|
||||
- ./volumes/app/storage:/app/api/storage
|
||||
@ -127,7 +124,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:0.3.20
|
||||
image: langgenius/dify-api:0.3.22
|
||||
restart: always
|
||||
environment:
|
||||
# Startup mode, 'worker' starts the Celery worker for processing the queue.
|
||||
@ -170,19 +167,16 @@ services:
|
||||
# the api-key for resend (https://resend.com)
|
||||
RESEND_API_KEY: ''
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
weaviate:
|
||||
condition: service_started
|
||||
- db
|
||||
- redis
|
||||
- weaviate
|
||||
volumes:
|
||||
# Mount the storage directory to the container, for storing user files.
|
||||
- ./volumes/app/storage:/app/api/storage
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:0.3.20
|
||||
image: langgenius/dify-web:0.3.22
|
||||
restart: always
|
||||
environment:
|
||||
EDITION: SELF_HOSTED
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
import type { FC } from 'react'
|
||||
import React, { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import ModalFoot from '../modal-foot'
|
||||
import type { Options } from '../config-select'
|
||||
import ConfigSelect from '../config-select'
|
||||
@ -11,6 +12,7 @@ import s from './style.module.css'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import type { PromptVariable } from '@/models/debug'
|
||||
import { getNewVar } from '@/utils/var'
|
||||
import ConfigContext from '@/context/debug-configuration'
|
||||
|
||||
import Modal from '@/app/components/base/modal'
|
||||
|
||||
@ -28,6 +30,7 @@ const ConfigModal: FC<IConfigModalProps> = ({
|
||||
onClose,
|
||||
onConfirm,
|
||||
}) => {
|
||||
const { modelConfig } = useContext(ConfigContext)
|
||||
const { t } = useTranslation()
|
||||
const { type, name, key, options, max_length } = payload || getNewVar('')
|
||||
|
||||
@ -41,7 +44,7 @@ const ConfigModal: FC<IConfigModalProps> = ({
|
||||
}
|
||||
}
|
||||
|
||||
const isStringInput = tempType === 'string'
|
||||
const isStringInput = tempType === 'string' || tempType === 'paragraph'
|
||||
const title = isStringInput ? t('appDebug.variableConig.maxLength') : t('appDebug.variableConig.options')
|
||||
|
||||
// string type
|
||||
@ -93,22 +96,24 @@ const ConfigModal: FC<IConfigModalProps> = ({
|
||||
<div className='mb-2'>
|
||||
<div className={s.title}>{t('appDebug.variableConig.fieldType')}</div>
|
||||
<div className='flex space-x-2'>
|
||||
<SelectTypeItem type='string' selected={isStringInput} onClick={handleTypeChange('string')} />
|
||||
<SelectTypeItem type='select' selected={!isStringInput} onClick={handleTypeChange('select')} />
|
||||
<SelectTypeItem type='string' selected={tempType === 'string'} onClick={handleTypeChange('string')} />
|
||||
<SelectTypeItem type='paragraph' selected={tempType === 'paragraph'} onClick={handleTypeChange('paragraph')} />
|
||||
<SelectTypeItem type='select' selected={tempType === 'select'} onClick={handleTypeChange('select')} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='mt-6'>
|
||||
<div className={s.title}>{title}</div>
|
||||
{isStringInput
|
||||
? (
|
||||
<ConfigString value={tempMaxLength} onChange={setTempMaxValue} />
|
||||
)
|
||||
: (
|
||||
<ConfigSelect options={tempOptions} onChange={setTempOptions} />
|
||||
)}
|
||||
</div>
|
||||
|
||||
{tempType !== 'paragraph' && (
|
||||
<div className='mt-6'>
|
||||
<div className={s.title}>{title}</div>
|
||||
{isStringInput
|
||||
? (
|
||||
<ConfigString modelId={modelConfig.model_id} value={tempMaxLength} onChange={setTempMaxValue} />
|
||||
)
|
||||
: (
|
||||
<ConfigSelect options={tempOptions} onChange={setTempOptions} />
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<ModalFoot
|
||||
onConfirm={handleConfirm}
|
||||
@ -1,9 +1,10 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import React, { useEffect } from 'react'
|
||||
|
||||
export type IConfigStringProps = {
|
||||
value: number | undefined
|
||||
modelId: string
|
||||
onChange: (value: number | undefined) => void
|
||||
}
|
||||
|
||||
@ -13,6 +14,11 @@ const ConfigString: FC<IConfigStringProps> = ({
|
||||
value,
|
||||
onChange,
|
||||
}) => {
|
||||
useEffect(() => {
|
||||
if (value && value > MAX_LENGTH)
|
||||
onChange(MAX_LENGTH)
|
||||
}, [value, MAX_LENGTH])
|
||||
|
||||
return (
|
||||
<div>
|
||||
<input
|
||||
@ -21,7 +27,13 @@ const ConfigString: FC<IConfigStringProps> = ({
|
||||
min={1}
|
||||
value={value || ''}
|
||||
onChange={(e) => {
|
||||
const value = Math.max(1, Math.min(MAX_LENGTH, parseInt(e.target.value))) || 1
|
||||
let value = parseInt(e.target.value, 10)
|
||||
if (value > MAX_LENGTH)
|
||||
value = MAX_LENGTH
|
||||
|
||||
else if (value < 1)
|
||||
value = 1
|
||||
|
||||
onChange(value)
|
||||
}}
|
||||
className="w-full px-3 text-sm leading-9 text-gray-900 border-0 rounded-lg grow h-9 bg-gray-50 focus:outline-none focus:ring-1 focus:ring-inset focus:ring-gray-200"
|
||||
|
||||
@ -8,7 +8,7 @@ import type { Timeout } from 'ahooks/lib/useRequest/src/types'
|
||||
import Panel from '../base/feature-panel'
|
||||
import OperationBtn from '../base/operation-btn'
|
||||
import VarIcon from '../base/icons/var-icon'
|
||||
import EditModel from './config-model'
|
||||
import EditModal from './config-modal'
|
||||
import IconTypeIcon from './input-type-icon'
|
||||
import type { IInputTypeIconProps } from './input-type-icon'
|
||||
import s from './style.module.css'
|
||||
@ -52,13 +52,18 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, readonly, onPromptVar
|
||||
onPromptVariablesChange?.(newPromptVariables)
|
||||
}
|
||||
|
||||
const batchUpdatePromptVariable = (key: string, updateKeys: string[], newValues: any[]) => {
|
||||
const batchUpdatePromptVariable = (key: string, updateKeys: string[], newValues: any[], isParagraph?: boolean) => {
|
||||
const newPromptVariables = promptVariables.map((item) => {
|
||||
if (item.key === key) {
|
||||
const newItem: any = { ...item }
|
||||
updateKeys.forEach((updateKey, i) => {
|
||||
newItem[updateKey] = newValues[i]
|
||||
})
|
||||
if (isParagraph) {
|
||||
delete newItem.max_length
|
||||
delete newItem.options
|
||||
}
|
||||
console.log(newItem)
|
||||
return newItem
|
||||
}
|
||||
|
||||
@ -240,16 +245,15 @@ const ConfigVar: FC<IConfigVarProps> = ({ promptVariables, readonly, onPromptVar
|
||||
)}
|
||||
|
||||
{isShowEditModal && (
|
||||
<EditModel
|
||||
<EditModal
|
||||
payload={currItem as PromptVariable}
|
||||
isShow={isShowEditModal}
|
||||
onClose={hideEditModal}
|
||||
onConfirm={({ type, value }) => {
|
||||
if (type === 'string')
|
||||
batchUpdatePromptVariable(currKey as string, ['type', 'max_length'], [type, value || DEFAULT_VALUE_MAX_LEN])
|
||||
|
||||
else
|
||||
batchUpdatePromptVariable(currKey as string, ['type', 'options'], [type, value || []])
|
||||
batchUpdatePromptVariable(currKey as string, ['type', 'options'], [type, value || []], type === 'paragraph')
|
||||
|
||||
hideEditModal()
|
||||
}}
|
||||
|
||||
@ -13,6 +13,14 @@ const IconMap = (type: IInputTypeIconProps['type']) => {
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M3.52593 0.166672H8.47411C8.94367 0.166665 9.33123 0.166659 9.64692 0.192452C9.97481 0.219242 10.2762 0.276738 10.5593 0.420991C10.9984 0.644695 11.3553 1.00165 11.579 1.44069C11.7233 1.72381 11.7808 2.02522 11.8076 2.35311C11.8334 2.6688 11.8334 3.05634 11.8334 3.5259V8.47411C11.8334 8.94367 11.8334 9.33121 11.8076 9.6469C11.7808 9.97479 11.7233 10.2762 11.579 10.5593C11.3553 10.9984 10.9984 11.3553 10.5593 11.579C10.2762 11.7233 9.97481 11.7808 9.64692 11.8076C9.33123 11.8334 8.94369 11.8333 8.47413 11.8333H3.52592C3.05636 11.8333 2.66882 11.8334 2.35312 11.8076C2.02523 11.7808 1.72382 11.7233 1.44071 11.579C1.00167 11.3553 0.644711 10.9984 0.421006 10.5593C0.276753 10.2762 0.219257 9.97479 0.192468 9.6469C0.166674 9.33121 0.16668 8.94366 0.166687 8.4741V3.52591C0.16668 3.05635 0.166674 2.6688 0.192468 2.35311C0.219257 2.02522 0.276753 1.72381 0.421006 1.44069C0.644711 1.00165 1.00167 0.644695 1.44071 0.420991C1.72382 0.276738 2.02523 0.219242 2.35312 0.192452C2.66882 0.166659 3.05637 0.166665 3.52593 0.166672ZM3.08335 3.08334C3.08335 2.76117 3.34452 2.50001 3.66669 2.50001H8.33335C8.65552 2.50001 8.91669 2.76117 8.91669 3.08334C8.91669 3.4055 8.65552 3.66667 8.33335 3.66667H6.58335V8.91667C6.58335 9.23884 6.32219 9.5 6.00002 9.5C5.67785 9.5 5.41669 9.23884 5.41669 8.91667V3.66667H3.66669C3.34452 3.66667 3.08335 3.4055 3.08335 3.08334Z" fill="#98A2B3" />
|
||||
</svg>
|
||||
),
|
||||
paragraph: (
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.16669 5.83329C1.16669 5.51113 1.42785 5.24996 1.75002 5.24996H9.33335C9.65552 5.24996 9.91669 5.51113 9.91669 5.83329C9.91669 6.15546 9.65552 6.41663 9.33335 6.41663H1.75002C1.42785 6.41663 1.16669 6.15546 1.16669 5.83329Z" fill="#98A2B3"/>
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.16669 3.49996C1.16669 3.17779 1.42785 2.91663 1.75002 2.91663H11.6667C11.9889 2.91663 12.25 3.17779 12.25 3.49996C12.25 3.82213 11.9889 4.08329 11.6667 4.08329H1.75002C1.42785 4.08329 1.16669 3.82213 1.16669 3.49996Z" fill="#98A2B3"/>
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.16669 8.16663C1.16669 7.84446 1.42785 7.58329 1.75002 7.58329H11.6667C11.9889 7.58329 12.25 7.84446 12.25 8.16663C12.25 8.48879 11.9889 8.74996 11.6667 8.74996H1.75002C1.42785 8.74996 1.16669 8.48879 1.16669 8.16663Z" fill="#98A2B3"/>
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.16669 10.5C1.16669 10.1778 1.42785 9.91663 1.75002 9.91663H9.33335C9.65552 9.91663 9.91669 10.1778 9.91669 10.5C9.91669 10.8221 9.65552 11.0833 9.33335 11.0833H1.75002C1.42785 11.0833 1.16669 10.8221 1.16669 10.5Z" fill="#98A2B3"/>
|
||||
</svg>
|
||||
),
|
||||
select: (
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M7.48913 4.08334H3.01083C2.70334 4.08333 2.43804 4.08333 2.21955 4.10118C1.98893 4.12002 1.75955 4.16162 1.53883 4.27408C1.20955 4.44186 0.941831 4.70958 0.774053 5.03886C0.66159 5.25958 0.619989 5.48896 0.601147 5.71958C0.583295 5.93807 0.583304 6.20334 0.583313 6.51084V10.9892C0.583304 11.2967 0.583295 11.5619 0.601147 11.7804C0.619989 12.0111 0.66159 12.2404 0.774053 12.4612C0.941831 12.7904 1.20955 13.0582 1.53883 13.2259C1.75955 13.3384 1.98893 13.38 2.21955 13.3988C2.43803 13.4167 2.70329 13.4167 3.01077 13.4167H7.48912C7.7966 13.4167 8.06193 13.4167 8.28041 13.3988C8.51103 13.38 8.74041 13.3384 8.96113 13.2259C9.29041 13.0582 9.55813 12.7904 9.72591 12.4612C9.83837 12.2404 9.87997 12.0111 9.89882 11.7804C9.91667 11.5619 9.91666 11.2967 9.91665 10.9892V6.51087C9.91666 6.20336 9.91667 5.93808 9.89882 5.71958C9.87997 5.48896 9.83837 5.25958 9.72591 5.03886C9.55813 4.70958 9.29041 4.44186 8.96113 4.27408C8.74041 4.16162 8.51103 4.12002 8.28041 4.10118C8.06192 4.08333 7.79663 4.08333 7.48913 4.08334ZM7.70413 7.70416C7.93193 7.47635 7.93193 7.107 7.70413 6.8792C7.47632 6.65139 7.10697 6.65139 6.87917 6.8792L4.66665 9.09172L3.91246 8.33753C3.68465 8.10973 3.31531 8.10973 3.0875 8.33753C2.8597 8.56534 2.8597 8.93468 3.0875 9.16249L4.25417 10.3292C4.48197 10.557 4.85132 10.557 5.07913 10.3292L7.70413 7.70416Z" fill="#98A2B3" />
|
||||
|
||||
@ -1,11 +1,12 @@
|
||||
'use client'
|
||||
import React, { FC } from 'react'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import cn from 'classnames'
|
||||
|
||||
import s from './style.module.css'
|
||||
|
||||
export interface ISelectTypeItemProps {
|
||||
export type ISelectTypeItemProps = {
|
||||
type: string
|
||||
selected: boolean
|
||||
onClick: () => void
|
||||
@ -14,46 +15,75 @@ export interface ISelectTypeItemProps {
|
||||
const Icon = ({ type, selected }: Partial<ISelectTypeItemProps>) => {
|
||||
switch (type) {
|
||||
case 'select':
|
||||
return selected ? (
|
||||
<svg width="17" height="16" viewBox="0 0 17 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M8.89233 4.66669H3.77428C3.42285 4.66668 3.11966 4.66667 2.86995 4.68707C2.60639 4.7086 2.34424 4.75615 2.09199 4.88468C1.71567 5.07642 1.4097 5.38238 1.21796 5.75871C1.08943 6.01096 1.04188 6.27311 1.02035 6.53667C0.999949 6.78638 0.999959 7.08954 0.99997 7.44096V12.5591C0.999959 12.9105 0.999949 13.2137 1.02035 13.4634C1.04188 13.7269 1.08943 13.9891 1.21796 14.2413C1.4097 14.6177 1.71567 14.9236 2.09199 15.1154C2.34424 15.2439 2.60639 15.2914 2.86995 15.313C3.11965 15.3334 3.4228 15.3334 3.77421 15.3334H8.89232C9.24372 15.3334 9.54696 15.3334 9.79666 15.313C10.0602 15.2914 10.3224 15.2439 10.5746 15.1154C10.9509 14.9236 11.2569 14.6177 11.4487 14.2413C11.5772 13.9891 11.6247 13.7269 11.6463 13.4634C11.6667 13.2137 11.6667 12.9105 11.6666 12.559V7.44101C11.6667 7.08957 11.6667 6.78639 11.6463 6.53667C11.6247 6.27311 11.5772 6.01096 11.4487 5.75871C11.2569 5.38238 10.9509 5.07642 10.5746 4.88468C10.3224 4.75615 10.0602 4.7086 9.79666 4.68707C9.54695 4.66667 9.24376 4.66668 8.89233 4.66669ZM9.13804 8.80476C9.39839 8.54441 9.39839 8.1223 9.13804 7.86195C8.87769 7.6016 8.45558 7.6016 8.19523 7.86195L5.66664 10.3905L4.80471 9.52862C4.54436 9.26827 4.12225 9.26827 3.8619 9.52862C3.60155 9.78897 3.60155 10.2111 3.8619 10.4714L5.19523 11.8048C5.45558 12.0651 5.87769 12.0651 6.13804 11.8048L9.13804 8.80476Z" fill="#155EEF" />
|
||||
<path d="M12.8923 0.666688H7.77427C7.42285 0.666676 7.11966 0.666666 6.86995 0.687068C6.60639 0.708602 6.34424 0.756146 6.09199 0.884676C5.71567 1.07642 5.40971 1.38238 5.21796 1.75871C5.08943 2.01096 5.04188 2.27311 5.02035 2.53667C5.00206 2.76053 5.00018 3.02734 4.99999 3.33337L8.92055 3.33336C9.2463 3.33329 9.59951 3.3332 9.90523 3.35818C10.2512 3.38645 10.7084 3.45642 11.1799 3.69668C11.8071 4.01626 12.3171 4.5262 12.6367 5.1534C12.8769 5.62495 12.9469 6.08209 12.9752 6.42811C13.0001 6.73384 13.0001 7.08704 13 7.4128L13 11.3333C13.306 11.3332 13.5728 11.3313 13.7967 11.313C14.0602 11.2914 14.3224 11.2439 14.5746 11.1154C14.9509 10.9236 15.2569 10.6177 15.4487 10.2413C15.5772 9.98908 15.6247 9.72694 15.6463 9.46338C15.6667 9.21368 15.6666 8.91052 15.6666 8.55912V3.44101C15.6666 3.0896 15.6667 2.78637 15.6463 2.53667C15.6247 2.27311 15.5772 2.01096 15.4487 1.75871C15.2569 1.38238 14.9509 1.07642 14.5746 0.884676C14.3224 0.756146 14.0602 0.708602 13.7967 0.687068C13.5469 0.666666 13.2438 0.666676 12.8923 0.666688Z" fill="#155EEF" />
|
||||
</svg>
|
||||
|
||||
) : (
|
||||
<svg width="17" height="16" viewBox="0 0 17 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M8.89233 4.66667H3.77428C3.42285 4.66666 3.11966 4.66665 2.86995 4.68705C2.60639 4.70859 2.34424 4.75613 2.09199 4.88466C1.71567 5.07641 1.4097 5.38237 1.21796 5.75869C1.08943 6.01095 1.04188 6.27309 1.02035 6.53665C0.999949 6.78636 0.999959 7.08953 0.99997 7.44095V12.559C0.999959 12.9105 0.999949 13.2137 1.02035 13.4634C1.04188 13.7269 1.08943 13.9891 1.21796 14.2413C1.4097 14.6176 1.71567 14.9236 2.09199 15.1154C2.34424 15.2439 2.60639 15.2914 2.86995 15.313C3.11965 15.3334 3.4228 15.3334 3.77421 15.3333H8.89232C9.24372 15.3334 9.54696 15.3334 9.79666 15.313C10.0602 15.2914 10.3224 15.2439 10.5746 15.1154C10.9509 14.9236 11.2569 14.6176 11.4487 14.2413C11.5772 13.9891 11.6247 13.7269 11.6463 13.4634C11.6667 13.2136 11.6667 12.9105 11.6666 12.559V7.44099C11.6667 7.08955 11.6667 6.78637 11.6463 6.53665C11.6247 6.27309 11.5772 6.01095 11.4487 5.75869C11.2569 5.38237 10.9509 5.07641 10.5746 4.88466C10.3224 4.75613 10.0602 4.70859 9.79666 4.68705C9.54695 4.66665 9.24376 4.66666 8.89233 4.66667ZM9.13804 8.80474C9.39839 8.54439 9.39839 8.12228 9.13804 7.86193C8.87769 7.60159 8.45558 7.60159 8.19523 7.86193L5.66664 10.3905L4.80471 9.5286C4.54436 9.26825 4.12225 9.26825 3.8619 9.5286C3.60155 9.78895 3.60155 10.2111 3.8619 10.4714L5.19523 11.8047C5.45558 12.0651 5.87769 12.0651 6.13804 11.8047L9.13804 8.80474Z" fill="#667085" />
|
||||
<path d="M12.8923 0.666672H7.77427C7.42285 0.666661 7.11966 0.666651 6.86995 0.687053C6.60639 0.708587 6.34424 0.756131 6.09199 0.884661C5.71567 1.07641 5.40971 1.38237 5.21796 1.75869C5.08943 2.01095 5.04188 2.27309 5.02035 2.53665C5.00206 2.76051 5.00018 3.02733 4.99999 3.33336L8.92055 3.33335C9.2463 3.33327 9.59951 3.33319 9.90523 3.35816C10.2512 3.38644 10.7084 3.4564 11.1799 3.69667C11.8071 4.01625 12.3171 4.52618 12.6367 5.15339C12.8769 5.62493 12.9469 6.08208 12.9752 6.42809C13.0001 6.73382 13.0001 7.08702 13 7.41279L13 11.3333C13.306 11.3331 13.5728 11.3313 13.7967 11.313C14.0602 11.2914 14.3224 11.2439 14.5746 11.1154C14.9509 10.9236 15.2569 10.6176 15.4487 10.2413C15.5772 9.98907 15.6247 9.72692 15.6463 9.46336C15.6667 9.21366 15.6666 8.91051 15.6666 8.5591V3.44099C15.6666 3.08959 15.6667 2.78635 15.6463 2.53665C15.6247 2.27309 15.5772 2.01095 15.4487 1.75869C15.2569 1.38237 14.9509 1.07641 14.5746 0.884661C14.3224 0.756131 14.0602 0.708587 13.7967 0.687053C13.5469 0.666651 13.2438 0.666661 12.8923 0.666672Z" fill="#667085" />
|
||||
</svg>
|
||||
|
||||
)
|
||||
return selected
|
||||
? (
|
||||
<svg width="17" height="16" viewBox="0 0 17 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M8.89233 4.66669H3.77428C3.42285 4.66668 3.11966 4.66667 2.86995 4.68707C2.60639 4.7086 2.34424 4.75615 2.09199 4.88468C1.71567 5.07642 1.4097 5.38238 1.21796 5.75871C1.08943 6.01096 1.04188 6.27311 1.02035 6.53667C0.999949 6.78638 0.999959 7.08954 0.99997 7.44096V12.5591C0.999959 12.9105 0.999949 13.2137 1.02035 13.4634C1.04188 13.7269 1.08943 13.9891 1.21796 14.2413C1.4097 14.6177 1.71567 14.9236 2.09199 15.1154C2.34424 15.2439 2.60639 15.2914 2.86995 15.313C3.11965 15.3334 3.4228 15.3334 3.77421 15.3334H8.89232C9.24372 15.3334 9.54696 15.3334 9.79666 15.313C10.0602 15.2914 10.3224 15.2439 10.5746 15.1154C10.9509 14.9236 11.2569 14.6177 11.4487 14.2413C11.5772 13.9891 11.6247 13.7269 11.6463 13.4634C11.6667 13.2137 11.6667 12.9105 11.6666 12.559V7.44101C11.6667 7.08957 11.6667 6.78639 11.6463 6.53667C11.6247 6.27311 11.5772 6.01096 11.4487 5.75871C11.2569 5.38238 10.9509 5.07642 10.5746 4.88468C10.3224 4.75615 10.0602 4.7086 9.79666 4.68707C9.54695 4.66667 9.24376 4.66668 8.89233 4.66669ZM9.13804 8.80476C9.39839 8.54441 9.39839 8.1223 9.13804 7.86195C8.87769 7.6016 8.45558 7.6016 8.19523 7.86195L5.66664 10.3905L4.80471 9.52862C4.54436 9.26827 4.12225 9.26827 3.8619 9.52862C3.60155 9.78897 3.60155 10.2111 3.8619 10.4714L5.19523 11.8048C5.45558 12.0651 5.87769 12.0651 6.13804 11.8048L9.13804 8.80476Z" fill="#155EEF" />
|
||||
<path d="M12.8923 0.666688H7.77427C7.42285 0.666676 7.11966 0.666666 6.86995 0.687068C6.60639 0.708602 6.34424 0.756146 6.09199 0.884676C5.71567 1.07642 5.40971 1.38238 5.21796 1.75871C5.08943 2.01096 5.04188 2.27311 5.02035 2.53667C5.00206 2.76053 5.00018 3.02734 4.99999 3.33337L8.92055 3.33336C9.2463 3.33329 9.59951 3.3332 9.90523 3.35818C10.2512 3.38645 10.7084 3.45642 11.1799 3.69668C11.8071 4.01626 12.3171 4.5262 12.6367 5.1534C12.8769 5.62495 12.9469 6.08209 12.9752 6.42811C13.0001 6.73384 13.0001 7.08704 13 7.4128L13 11.3333C13.306 11.3332 13.5728 11.3313 13.7967 11.313C14.0602 11.2914 14.3224 11.2439 14.5746 11.1154C14.9509 10.9236 15.2569 10.6177 15.4487 10.2413C15.5772 9.98908 15.6247 9.72694 15.6463 9.46338C15.6667 9.21368 15.6666 8.91052 15.6666 8.55912V3.44101C15.6666 3.0896 15.6667 2.78637 15.6463 2.53667C15.6247 2.27311 15.5772 2.01096 15.4487 1.75871C15.2569 1.38238 14.9509 1.07642 14.5746 0.884676C14.3224 0.756146 14.0602 0.708602 13.7967 0.687068C13.5469 0.666666 13.2438 0.666676 12.8923 0.666688Z" fill="#155EEF" />
|
||||
</svg>
|
||||
)
|
||||
: (
|
||||
<svg width="17" height="16" viewBox="0 0 17 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M8.89233 4.66667H3.77428C3.42285 4.66666 3.11966 4.66665 2.86995 4.68705C2.60639 4.70859 2.34424 4.75613 2.09199 4.88466C1.71567 5.07641 1.4097 5.38237 1.21796 5.75869C1.08943 6.01095 1.04188 6.27309 1.02035 6.53665C0.999949 6.78636 0.999959 7.08953 0.99997 7.44095V12.559C0.999959 12.9105 0.999949 13.2137 1.02035 13.4634C1.04188 13.7269 1.08943 13.9891 1.21796 14.2413C1.4097 14.6176 1.71567 14.9236 2.09199 15.1154C2.34424 15.2439 2.60639 15.2914 2.86995 15.313C3.11965 15.3334 3.4228 15.3334 3.77421 15.3333H8.89232C9.24372 15.3334 9.54696 15.3334 9.79666 15.313C10.0602 15.2914 10.3224 15.2439 10.5746 15.1154C10.9509 14.9236 11.2569 14.6176 11.4487 14.2413C11.5772 13.9891 11.6247 13.7269 11.6463 13.4634C11.6667 13.2136 11.6667 12.9105 11.6666 12.559V7.44099C11.6667 7.08955 11.6667 6.78637 11.6463 6.53665C11.6247 6.27309 11.5772 6.01095 11.4487 5.75869C11.2569 5.38237 10.9509 5.07641 10.5746 4.88466C10.3224 4.75613 10.0602 4.70859 9.79666 4.68705C9.54695 4.66665 9.24376 4.66666 8.89233 4.66667ZM9.13804 8.80474C9.39839 8.54439 9.39839 8.12228 9.13804 7.86193C8.87769 7.60159 8.45558 7.60159 8.19523 7.86193L5.66664 10.3905L4.80471 9.5286C4.54436 9.26825 4.12225 9.26825 3.8619 9.5286C3.60155 9.78895 3.60155 10.2111 3.8619 10.4714L5.19523 11.8047C5.45558 12.0651 5.87769 12.0651 6.13804 11.8047L9.13804 8.80474Z" fill="#667085" />
|
||||
<path d="M12.8923 0.666672H7.77427C7.42285 0.666661 7.11966 0.666651 6.86995 0.687053C6.60639 0.708587 6.34424 0.756131 6.09199 0.884661C5.71567 1.07641 5.40971 1.38237 5.21796 1.75869C5.08943 2.01095 5.04188 2.27309 5.02035 2.53665C5.00206 2.76051 5.00018 3.02733 4.99999 3.33336L8.92055 3.33335C9.2463 3.33327 9.59951 3.33319 9.90523 3.35816C10.2512 3.38644 10.7084 3.4564 11.1799 3.69667C11.8071 4.01625 12.3171 4.52618 12.6367 5.15339C12.8769 5.62493 12.9469 6.08208 12.9752 6.42809C13.0001 6.73382 13.0001 7.08702 13 7.41279L13 11.3333C13.306 11.3331 13.5728 11.3313 13.7967 11.313C14.0602 11.2914 14.3224 11.2439 14.5746 11.1154C14.9509 10.9236 15.2569 10.6176 15.4487 10.2413C15.5772 9.98907 15.6247 9.72692 15.6463 9.46336C15.6667 9.21366 15.6666 8.91051 15.6666 8.5591V3.44099C15.6666 3.08959 15.6667 2.78635 15.6463 2.53665C15.6247 2.27309 15.5772 2.01095 15.4487 1.75869C15.2569 1.38237 14.9509 1.07641 14.5746 0.884661C14.3224 0.756131 14.0602 0.708587 13.7967 0.687053C13.5469 0.666651 13.2438 0.666661 12.8923 0.666672Z" fill="#667085" />
|
||||
</svg>
|
||||
)
|
||||
case 'paragraph':
|
||||
return selected
|
||||
? (
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="align-left">
|
||||
<g id="Solid">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.33334 6.66665C1.33334 6.29846 1.63182 5.99998 2.00001 5.99998H10.6667C11.0349 5.99998 11.3333 6.29846 11.3333 6.66665C11.3333 7.03484 11.0349 7.33331 10.6667 7.33331H2.00001C1.63182 7.33331 1.33334 7.03484 1.33334 6.66665Z" fill="#155EEF"/>
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.33334 3.99998C1.33334 3.63179 1.63182 3.33331 2.00001 3.33331H13.3333C13.7015 3.33331 14 3.63179 14 3.99998C14 4.36817 13.7015 4.66665 13.3333 4.66665H2.00001C1.63182 4.66665 1.33334 4.36817 1.33334 3.99998Z" fill="#155EEF"/>
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.33334 9.33331C1.33334 8.96512 1.63182 8.66665 2.00001 8.66665H13.3333C13.7015 8.66665 14 8.96512 14 9.33331C14 9.7015 13.7015 9.99998 13.3333 9.99998H2.00001C1.63182 9.99998 1.33334 9.7015 1.33334 9.33331Z" fill="#155EEF"/>
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.33334 12C1.33334 11.6318 1.63182 11.3333 2.00001 11.3333H10.6667C11.0349 11.3333 11.3333 11.6318 11.3333 12C11.3333 12.3682 11.0349 12.6666 10.6667 12.6666H2.00001C1.63182 12.6666 1.33334 12.3682 1.33334 12Z" fill="#155EEF"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
)
|
||||
: (
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g id="align-left">
|
||||
<g id="Solid">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.33334 6.66666C1.33334 6.29847 1.63182 5.99999 2.00001 5.99999H10.6667C11.0349 5.99999 11.3333 6.29847 11.3333 6.66666C11.3333 7.03485 11.0349 7.33333 10.6667 7.33333H2.00001C1.63182 7.33333 1.33334 7.03485 1.33334 6.66666Z" fill="#667085"/>
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.33334 3.99999C1.33334 3.63181 1.63182 3.33333 2.00001 3.33333H13.3333C13.7015 3.33333 14 3.63181 14 3.99999C14 4.36818 13.7015 4.66666 13.3333 4.66666H2.00001C1.63182 4.66666 1.33334 4.36818 1.33334 3.99999Z" fill="#667085"/>
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.33334 9.33333C1.33334 8.96514 1.63182 8.66666 2.00001 8.66666H13.3333C13.7015 8.66666 14 8.96514 14 9.33333C14 9.70152 13.7015 10 13.3333 10H2.00001C1.63182 10 1.33334 9.70152 1.33334 9.33333Z" fill="#667085"/>
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1.33334 12C1.33334 11.6318 1.63182 11.3333 2.00001 11.3333H10.6667C11.0349 11.3333 11.3333 11.6318 11.3333 12C11.3333 12.3682 11.0349 12.6667 10.6667 12.6667H2.00001C1.63182 12.6667 1.33334 12.3682 1.33334 12Z" fill="#667085"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
)
|
||||
case 'string':
|
||||
default:
|
||||
return selected ? (
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M5.17246 1.33333H10.8275C11.3642 1.33332 11.8071 1.33331 12.1679 1.36279C12.5426 1.39341 12.8871 1.45912 13.2106 1.62398C13.7124 1.87964 14.1203 2.28759 14.376 2.78935C14.5409 3.11291 14.6066 3.45738 14.6372 3.83211C14.6667 4.19291 14.6667 4.63581 14.6667 5.17245V10.8275C14.6667 11.3642 14.6667 11.8071 14.6372 12.1679C14.6066 12.5426 14.5409 12.8871 14.376 13.2106C14.1203 13.7124 13.7124 14.1203 13.2106 14.376C12.8871 14.5409 12.5426 14.6066 12.1679 14.6372C11.8071 14.6667 11.3642 14.6667 10.8275 14.6667H5.17245C4.63581 14.6667 4.1929 14.6667 3.83211 14.6372C3.45738 14.6066 3.11291 14.5409 2.78935 14.376C2.28759 14.1203 1.87964 13.7124 1.62398 13.2106C1.45912 12.8871 1.39341 12.5426 1.36279 12.1679C1.33331 11.8071 1.33332 11.3642 1.33333 10.8275V5.17245C1.33332 4.63581 1.33331 4.1929 1.36279 3.83211C1.39341 3.45738 1.45912 3.11291 1.62398 2.78935C1.87964 2.28759 2.28759 1.87964 2.78935 1.62398C3.11291 1.45912 3.45738 1.39341 3.83211 1.36279C4.1929 1.33331 4.63583 1.33332 5.17246 1.33333ZM4.66666 4.66666C4.66666 4.29847 4.96514 3.99999 5.33333 3.99999H10.6667C11.0349 3.99999 11.3333 4.29847 11.3333 4.66666C11.3333 5.03485 11.0349 5.33333 10.6667 5.33333H8.66666V11.3333C8.66666 11.7015 8.36818 12 7.99999 12C7.6318 12 7.33333 11.7015 7.33333 11.3333V5.33333H5.33333C4.96514 5.33333 4.66666 5.03485 4.66666 4.66666Z" fill="#155EEF" />
|
||||
</svg>
|
||||
|
||||
) : (<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M5.17246 1.33331H10.8275C11.3642 1.33331 11.8071 1.3333 12.1679 1.36278C12.5426 1.39339 12.8871 1.4591 13.2106 1.62396C13.7124 1.87963 14.1203 2.28757 14.376 2.78934C14.5409 3.1129 14.6066 3.45737 14.6372 3.8321C14.6667 4.19289 14.6667 4.63579 14.6667 5.17243V10.8275C14.6667 11.3642 14.6667 11.8071 14.6372 12.1679C14.6066 12.5426 14.5409 12.8871 14.376 13.2106C14.1203 13.7124 13.7124 14.1203 13.2106 14.376C12.8871 14.5409 12.5426 14.6066 12.1679 14.6372C11.8071 14.6667 11.3642 14.6667 10.8275 14.6666H5.17245C4.63581 14.6667 4.1929 14.6667 3.83211 14.6372C3.45738 14.6066 3.11291 14.5409 2.78935 14.376C2.28759 14.1203 1.87964 13.7124 1.62398 13.2106C1.45912 12.8871 1.39341 12.5426 1.36279 12.1679C1.33331 11.8071 1.33332 11.3642 1.33333 10.8275V5.17244C1.33332 4.6358 1.33331 4.19289 1.36279 3.8321C1.39341 3.45737 1.45912 3.1129 1.62398 2.78934C1.87964 2.28757 2.28759 1.87963 2.78935 1.62396C3.11291 1.4591 3.45738 1.39339 3.83211 1.36278C4.1929 1.3333 4.63583 1.33331 5.17246 1.33331ZM4.66666 4.66665C4.66666 4.29846 4.96514 3.99998 5.33333 3.99998H10.6667C11.0349 3.99998 11.3333 4.29846 11.3333 4.66665C11.3333 5.03484 11.0349 5.33331 10.6667 5.33331H8.66666V11.3333C8.66666 11.7015 8.36818 12 7.99999 12C7.6318 12 7.33333 11.7015 7.33333 11.3333V5.33331H5.33333C4.96514 5.33331 4.66666 5.03484 4.66666 4.66665Z" fill="#667085" />
|
||||
</svg>)
|
||||
return selected
|
||||
? (
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M5.17246 1.33333H10.8275C11.3642 1.33332 11.8071 1.33331 12.1679 1.36279C12.5426 1.39341 12.8871 1.45912 13.2106 1.62398C13.7124 1.87964 14.1203 2.28759 14.376 2.78935C14.5409 3.11291 14.6066 3.45738 14.6372 3.83211C14.6667 4.19291 14.6667 4.63581 14.6667 5.17245V10.8275C14.6667 11.3642 14.6667 11.8071 14.6372 12.1679C14.6066 12.5426 14.5409 12.8871 14.376 13.2106C14.1203 13.7124 13.7124 14.1203 13.2106 14.376C12.8871 14.5409 12.5426 14.6066 12.1679 14.6372C11.8071 14.6667 11.3642 14.6667 10.8275 14.6667H5.17245C4.63581 14.6667 4.1929 14.6667 3.83211 14.6372C3.45738 14.6066 3.11291 14.5409 2.78935 14.376C2.28759 14.1203 1.87964 13.7124 1.62398 13.2106C1.45912 12.8871 1.39341 12.5426 1.36279 12.1679C1.33331 11.8071 1.33332 11.3642 1.33333 10.8275V5.17245C1.33332 4.63581 1.33331 4.1929 1.36279 3.83211C1.39341 3.45738 1.45912 3.11291 1.62398 2.78935C1.87964 2.28759 2.28759 1.87964 2.78935 1.62398C3.11291 1.45912 3.45738 1.39341 3.83211 1.36279C4.1929 1.33331 4.63583 1.33332 5.17246 1.33333ZM4.66666 4.66666C4.66666 4.29847 4.96514 3.99999 5.33333 3.99999H10.6667C11.0349 3.99999 11.3333 4.29847 11.3333 4.66666C11.3333 5.03485 11.0349 5.33333 10.6667 5.33333H8.66666V11.3333C8.66666 11.7015 8.36818 12 7.99999 12C7.6318 12 7.33333 11.7015 7.33333 11.3333V5.33333H5.33333C4.96514 5.33333 4.66666 5.03485 4.66666 4.66666Z" fill="#155EEF" />
|
||||
</svg>
|
||||
)
|
||||
: (<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M5.17246 1.33331H10.8275C11.3642 1.33331 11.8071 1.3333 12.1679 1.36278C12.5426 1.39339 12.8871 1.4591 13.2106 1.62396C13.7124 1.87963 14.1203 2.28757 14.376 2.78934C14.5409 3.1129 14.6066 3.45737 14.6372 3.8321C14.6667 4.19289 14.6667 4.63579 14.6667 5.17243V10.8275C14.6667 11.3642 14.6667 11.8071 14.6372 12.1679C14.6066 12.5426 14.5409 12.8871 14.376 13.2106C14.1203 13.7124 13.7124 14.1203 13.2106 14.376C12.8871 14.5409 12.5426 14.6066 12.1679 14.6372C11.8071 14.6667 11.3642 14.6667 10.8275 14.6666H5.17245C4.63581 14.6667 4.1929 14.6667 3.83211 14.6372C3.45738 14.6066 3.11291 14.5409 2.78935 14.376C2.28759 14.1203 1.87964 13.7124 1.62398 13.2106C1.45912 12.8871 1.39341 12.5426 1.36279 12.1679C1.33331 11.8071 1.33332 11.3642 1.33333 10.8275V5.17244C1.33332 4.6358 1.33331 4.19289 1.36279 3.8321C1.39341 3.45737 1.45912 3.1129 1.62398 2.78934C1.87964 2.28757 2.28759 1.87963 2.78935 1.62396C3.11291 1.4591 3.45738 1.39339 3.83211 1.36278C4.1929 1.3333 4.63583 1.33331 5.17246 1.33331ZM4.66666 4.66665C4.66666 4.29846 4.96514 3.99998 5.33333 3.99998H10.6667C11.0349 3.99998 11.3333 4.29846 11.3333 4.66665C11.3333 5.03484 11.0349 5.33331 10.6667 5.33331H8.66666V11.3333C8.66666 11.7015 8.36818 12 7.99999 12C7.6318 12 7.33333 11.7015 7.33333 11.3333V5.33331H5.33333C4.96514 5.33331 4.66666 5.03484 4.66666 4.66665Z" fill="#667085" />
|
||||
</svg>)
|
||||
}
|
||||
}
|
||||
|
||||
const SelectTypeItem: FC<ISelectTypeItemProps> = ({
|
||||
type,
|
||||
selected,
|
||||
onClick
|
||||
onClick,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const typeName = t(`appDebug.variableConig.${type}`)
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(s.item, selected && s.selected, 'space-x-2')}
|
||||
className={cn(s.item, selected && s.selected, 'space-y-1')}
|
||||
onClick={onClick}
|
||||
>
|
||||
<Icon type={type} selected={selected} />
|
||||
<div className='shrink-0'>
|
||||
<Icon type={type} selected={selected} />
|
||||
</div>
|
||||
<span className={cn(s.text)}>{typeName}</span>
|
||||
</div>
|
||||
)
|
||||
|
||||
@ -1,9 +1,10 @@
|
||||
.item {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 32px;
|
||||
width: 133px;
|
||||
padding-left: 12px;
|
||||
height: 58px;
|
||||
width: 98px;
|
||||
border-radius: 8px;
|
||||
border: 1px solid #EAECF0;
|
||||
box-shadow: 0px 1px 2px rgba(16, 24, 40, 0.05);
|
||||
|
||||
@ -297,7 +297,6 @@ const Debug: FC<IDebug> = ({
|
||||
setChatList([])
|
||||
}, [controlClearChatMessage])
|
||||
|
||||
const [completionQuery, setCompletionQuery] = useState('')
|
||||
const [completionRes, setCompletionRes] = useState('')
|
||||
|
||||
const sendTextCompletion = async () => {
|
||||
@ -309,11 +308,6 @@ const Debug: FC<IDebug> = ({
|
||||
if (!checkCanSend())
|
||||
return
|
||||
|
||||
if (!completionQuery) {
|
||||
logError(t('appDebug.errorMessage.queryRequired'))
|
||||
return false
|
||||
}
|
||||
|
||||
const postDatasets = dataSets.map(({ id }) => ({
|
||||
dataset: {
|
||||
enabled: true,
|
||||
@ -342,7 +336,6 @@ const Debug: FC<IDebug> = ({
|
||||
|
||||
const data = {
|
||||
inputs,
|
||||
query: completionQuery,
|
||||
model_config: postModelConfig,
|
||||
}
|
||||
|
||||
@ -380,8 +373,6 @@ const Debug: FC<IDebug> = ({
|
||||
</div>
|
||||
<PromptValuePanel
|
||||
appType={mode as AppType}
|
||||
value={completionQuery}
|
||||
onChange={setCompletionQuery}
|
||||
onSend={sendTextCompletion}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@ -6,6 +6,7 @@ import { useContext } from 'use-context-selector'
|
||||
import { usePathname } from 'next/navigation'
|
||||
import produce from 'immer'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import cn from 'classnames'
|
||||
import Button from '../../base/button'
|
||||
import Loading from '../../base/loading'
|
||||
import type { CompletionParams, Inputs, ModelConfig, MoreLikeThisConfig, PromptConfig, PromptVariable } from '@/models/debug'
|
||||
@ -24,6 +25,7 @@ import { promptVariablesToUserInputsForm, userInputsFormToPromptVariables } from
|
||||
import { fetchDatasets } from '@/service/datasets'
|
||||
import AccountSetting from '@/app/components/header/account-setting'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { AppType } from '@/types/app'
|
||||
|
||||
const Configuration: FC = () => {
|
||||
const { t } = useTranslation()
|
||||
@ -193,11 +195,16 @@ const Configuration: FC = () => {
|
||||
})
|
||||
}, [appId])
|
||||
|
||||
const cannotPublish = mode === AppType.completion && !modelConfig.configs.prompt_template
|
||||
const saveAppConfig = async () => {
|
||||
const modelId = modelConfig.model_id
|
||||
const promptTemplate = modelConfig.configs.prompt_template
|
||||
const promptVariables = modelConfig.configs.prompt_variables
|
||||
|
||||
if (cannotPublish) {
|
||||
notify({ type: 'error', message: t('appDebug.otherError.promptNoBeEmpty'), duration: 3000 })
|
||||
return
|
||||
}
|
||||
const postDatasets = dataSets.map(({ id }) => ({
|
||||
dataset: {
|
||||
enabled: true,
|
||||
@ -311,7 +318,7 @@ const Configuration: FC = () => {
|
||||
/>
|
||||
<div className='mx-3 w-[1px] h-[14px] bg-gray-200'></div>
|
||||
<Button onClick={() => setShowConfirm(true)} className='shrink-0 mr-2 w-[70px] !h-8 !text-[13px] font-medium'>{t('appDebug.operation.resetConfig')}</Button>
|
||||
<Button type='primary' onClick={saveAppConfig} className='shrink-0 w-[70px] !h-8 !text-[13px] font-medium'>{t('appDebug.operation.applyConfig')}</Button>
|
||||
<Button type='primary' onClick={saveAppConfig} className={cn(cannotPublish && '!bg-primary-200 !cursor-not-allowed', 'shrink-0 w-[70px] !h-8 !text-[13px] font-medium')}>{t('appDebug.operation.applyConfig')}</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className='flex grow h-[200px]'>
|
||||
|
||||
@ -14,11 +14,10 @@ import Select from '@/app/components/base/select'
|
||||
import { DEFAULT_VALUE_MAX_LEN } from '@/config'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { ChevronDown, ChevronRight } from '@/app/components/base/icons/src/vender/line/arrows'
|
||||
import Tooltip from '@/app/components/base/tooltip-plus'
|
||||
|
||||
export type IPromptValuePanelProps = {
|
||||
appType: AppType
|
||||
value?: string
|
||||
onChange?: (value: string) => void
|
||||
onSend?: () => void
|
||||
}
|
||||
|
||||
@ -32,12 +31,10 @@ const starIcon = (
|
||||
|
||||
const PromptValuePanel: FC<IPromptValuePanelProps> = ({
|
||||
appType,
|
||||
value,
|
||||
onChange,
|
||||
onSend,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { modelConfig, inputs, setInputs } = useContext(ConfigContext)
|
||||
const { modelConfig, inputs, setInputs, mode } = useContext(ConfigContext)
|
||||
const [promptPreviewCollapse, setPromptPreviewCollapse] = useState(false)
|
||||
const [userInputFieldCollapse, setUserInputFieldCollapse] = useState(false)
|
||||
const promptTemplate = modelConfig.configs.prompt_template
|
||||
@ -53,6 +50,19 @@ const PromptValuePanel: FC<IPromptValuePanelProps> = ({
|
||||
return obj
|
||||
})()
|
||||
|
||||
const canNotRun = mode === AppType.completion && !modelConfig.configs.prompt_template
|
||||
const renderRunButton = () => {
|
||||
return (
|
||||
<Button
|
||||
type="primary"
|
||||
disabled={canNotRun}
|
||||
onClick={() => onSend && onSend()}
|
||||
className="w-[80px] !h-8">
|
||||
<PlayIcon className="shrink-0 w-4 h-4 mr-1" aria-hidden="true" />
|
||||
<span className='uppercase text-[13px]'>{t('appDebug.inputs.run')}</span>
|
||||
</Button>
|
||||
)
|
||||
}
|
||||
const handleInputValueChange = (key: string, value: string) => {
|
||||
if (!(key in promptVariableObj))
|
||||
return
|
||||
@ -65,6 +75,14 @@ const PromptValuePanel: FC<IPromptValuePanelProps> = ({
|
||||
setInputs(newInputs)
|
||||
}
|
||||
|
||||
const onClear = () => {
|
||||
const newInputs: Record<string, any> = {}
|
||||
promptVariables.forEach((item) => {
|
||||
newInputs[item.key] = ''
|
||||
})
|
||||
setInputs(newInputs)
|
||||
}
|
||||
|
||||
const promptPreview = (
|
||||
<div className='py-3 rounded-t-xl bg-indigo-25'>
|
||||
<div className="px-4">
|
||||
@ -125,83 +143,78 @@ const PromptValuePanel: FC<IPromptValuePanelProps> = ({
|
||||
<div className="mt-1 text-xs leading-normal text-gray-500">{t('appDebug.inputs.completionVarTip')}</div>
|
||||
)}
|
||||
</div>
|
||||
{
|
||||
!userInputFieldCollapse && (
|
||||
<>
|
||||
{
|
||||
promptVariables.length > 0
|
||||
? (
|
||||
<div className="space-y-3 ">
|
||||
{promptVariables.map(({ key, name, type, options, max_length, required }) => (
|
||||
<div key={key} className="flex items-center justify-between">
|
||||
<div className="mr-1 shrink-0 w-[120px] text-sm text-gray-900 break-all">{name || key}</div>
|
||||
{type === 'select'
|
||||
? (
|
||||
<Select
|
||||
className='w-full'
|
||||
defaultValue={inputs[key] as string}
|
||||
onSelect={(i) => { handleInputValueChange(key, i.value as string) }}
|
||||
items={(options || []).map(i => ({ name: i, value: i }))}
|
||||
allowSearch={false}
|
||||
bgClassName='bg-gray-50'
|
||||
overlayClassName='z-[11]'
|
||||
/>
|
||||
)
|
||||
: (
|
||||
<input
|
||||
className="w-full px-3 text-sm leading-9 text-gray-900 border-0 rounded-lg grow h-9 bg-gray-50 focus:outline-none focus:ring-1 focus:ring-inset focus:ring-gray-200"
|
||||
placeholder={`${name}${!required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
type="text"
|
||||
value={inputs[key] ? `${inputs[key]}` : ''}
|
||||
onChange={(e) => { handleInputValueChange(key, e.target.value) }}
|
||||
maxLength={max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
/>
|
||||
)}
|
||||
{!userInputFieldCollapse && (
|
||||
<>
|
||||
{
|
||||
promptVariables.length > 0
|
||||
? (
|
||||
<div className="space-y-3 ">
|
||||
{promptVariables.map(({ key, name, type, options, max_length, required }) => (
|
||||
<div key={key} className="flex justify-between">
|
||||
<div className="mr-1 pt-2 shrink-0 w-[120px] text-sm text-gray-900">{name || key}</div>
|
||||
{type === 'select' && (
|
||||
<Select
|
||||
className='w-full'
|
||||
defaultValue={inputs[key] as string}
|
||||
onSelect={(i) => { handleInputValueChange(key, i.value as string) }}
|
||||
items={(options || []).map(i => ({ name: i, value: i }))}
|
||||
allowSearch={false}
|
||||
bgClassName='bg-gray-50'
|
||||
/>
|
||||
)
|
||||
}
|
||||
{type === 'string' && (
|
||||
<input
|
||||
className="w-full px-3 text-sm leading-9 text-gray-900 border-0 rounded-lg grow h-9 bg-gray-50 focus:outline-none focus:ring-1 focus:ring-inset focus:ring-gray-200"
|
||||
placeholder={`${name}${!required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
type="text"
|
||||
value={inputs[key] ? `${inputs[key]}` : ''}
|
||||
onChange={(e) => { handleInputValueChange(key, e.target.value) }}
|
||||
maxLength={max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
/>
|
||||
)}
|
||||
{type === 'paragraph' && (
|
||||
<textarea
|
||||
className="w-full px-3 text-sm leading-9 text-gray-900 border-0 rounded-lg grow h-[120px] bg-gray-50 focus:outline-none focus:ring-1 focus:ring-inset focus:ring-gray-200"
|
||||
placeholder={`${name}${!required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
value={inputs[key] ? `${inputs[key]}` : ''}
|
||||
onChange={(e) => { handleInputValueChange(key, e.target.value) }}
|
||||
/>
|
||||
)}
|
||||
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
: (
|
||||
<div className='text-xs text-gray-500'>{t('appDebug.inputs.noVar')}</div>
|
||||
)
|
||||
}
|
||||
</>
|
||||
)
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
: (
|
||||
<div className='text-xs text-gray-500'>{t('appDebug.inputs.noVar')}</div>
|
||||
)
|
||||
}
|
||||
</>
|
||||
)
|
||||
}
|
||||
</div>
|
||||
|
||||
{
|
||||
appType === AppType.completion && (
|
||||
<div className='px-4'>
|
||||
<div className="mt-3 border-b border-gray-100"></div>
|
||||
<div className="mt-4">
|
||||
<div>
|
||||
<div className="text-[13px] text-gray-900 font-medium">{t('appDebug.inputs.queryTitle')}</div>
|
||||
<div className="mt-2 mb-4 overflow-hidden border border-gray-200 rounded-lg grow bg-gray-50 ">
|
||||
<div className="px-4 py-2 rounded-t-lg bg-gray-50">
|
||||
<textarea
|
||||
rows={4}
|
||||
className="w-full px-0 text-sm text-gray-900 border-0 bg-gray-50 focus:outline-none placeholder:bg-gray-50"
|
||||
placeholder={t('appDebug.inputs.queryPlaceholder') as string}
|
||||
value={value}
|
||||
onChange={e => onChange && onChange(e.target.value)}
|
||||
></textarea>
|
||||
</div>
|
||||
<div className="flex items-center justify-between px-3 py-2 bg-gray-50">
|
||||
<div className="flex pl-0 space-x-1 sm:pl-2">
|
||||
<span className="bg-gray-100 text-gray-500 text-xs font-medium mr-2 px-2.5 py-0.5 rounded cursor-pointer">{value?.length}</span>
|
||||
</div>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={() => onSend && onSend()}
|
||||
className="w-[80px] !h-8">
|
||||
<PlayIcon className="shrink-0 w-4 h-4 mr-1" aria-hidden="true" />
|
||||
<span className='uppercase text-[13px]'>{t('appDebug.inputs.run')}</span>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div className="mt-5 border-b border-gray-100"></div>
|
||||
<div className="flex justify-between mt-4 px-4">
|
||||
<Button
|
||||
className='!h-8 !p-3'
|
||||
onClick={onClear}
|
||||
disabled={false}
|
||||
>
|
||||
<span className='text-[13px]'>{t('common.operation.clear')}</span>
|
||||
</Button>
|
||||
|
||||
{canNotRun
|
||||
? (<Tooltip
|
||||
popupContent={t('appDebug.otherError.promptNoBeEmpty')}
|
||||
>
|
||||
{renderRunButton()}
|
||||
</Tooltip>)
|
||||
: renderRunButton()}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
@ -81,7 +81,7 @@ const getFormattedChatList = (messages: ChatMessage[]) => {
|
||||
messages.forEach((item: ChatMessage) => {
|
||||
newChatList.push({
|
||||
id: `question-${item.id}`,
|
||||
content: item.query,
|
||||
content: item.inputs.query || item.inputs.default_input || item.query, // text generation: item.inputs.query; chat: item.query
|
||||
isAnswer: false,
|
||||
})
|
||||
|
||||
@ -413,7 +413,7 @@ const ConversationList: FC<IConversationList> = ({ logs, appDetail, onRefresh })
|
||||
<tbody className="text-gray-500">
|
||||
{logs.data.map((log) => {
|
||||
const endUser = log.from_end_user_session_id
|
||||
const leftValue = get(log, isChatMode ? 'summary' : 'message.query')
|
||||
const leftValue = get(log, isChatMode ? 'summary' : 'message.inputs.query') || (!isChatMode ? (get(log, 'message.query') || get(log, 'message.inputs.default_input')) : '') || ''
|
||||
const rightValue = get(log, isChatMode ? 'message_count' : 'message.answer')
|
||||
return <tr
|
||||
key={log.id}
|
||||
|
||||
50
web/app/components/base/tooltip-plus/index.tsx
Normal file
50
web/app/components/base/tooltip-plus/index.tsx
Normal file
@ -0,0 +1,50 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React, { useState } from 'react'
|
||||
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '@/app/components/base/portal-to-follow-elem'
|
||||
export type TooltipProps = {
|
||||
position?: 'top' | 'right' | 'bottom' | 'left'
|
||||
triggerMethod?: 'hover' | 'click'
|
||||
popupContent: React.ReactNode
|
||||
children: React.ReactNode
|
||||
}
|
||||
|
||||
const arrow = (
|
||||
<svg className="absolute text-white h-2 w-full left-0 top-full" x="0px" y="0px" viewBox="0 0 255 255"><polygon className="fill-current" points="0,0 127.5,127.5 255,0"></polygon></svg>
|
||||
)
|
||||
|
||||
const Tooltip: FC< TooltipProps> = ({
|
||||
position = 'top',
|
||||
triggerMethod = 'hover',
|
||||
popupContent,
|
||||
children,
|
||||
}) => {
|
||||
const [open, setOpen] = useState(false)
|
||||
|
||||
return (
|
||||
<PortalToFollowElem
|
||||
open={open}
|
||||
onOpenChange={setOpen}
|
||||
placement={position}
|
||||
offset={10}
|
||||
>
|
||||
<PortalToFollowElemTrigger
|
||||
onClick={() => triggerMethod === 'click' && setOpen(v => !v)}
|
||||
onMouseEnter={() => triggerMethod === 'hover' && setOpen(true)}
|
||||
onMouseLeave={() => triggerMethod === 'hover' && setOpen(false)}
|
||||
>
|
||||
{children}
|
||||
</PortalToFollowElemTrigger>
|
||||
<PortalToFollowElemContent
|
||||
className="z-[999]"
|
||||
>
|
||||
<div className='relative px-3 py-2 text-xs font-normal text-gray-700 bg-white rounded-md shadow-lg'>
|
||||
{popupContent}
|
||||
{arrow}
|
||||
</div>
|
||||
</PortalToFollowElemContent>
|
||||
</PortalToFollowElem>
|
||||
)
|
||||
}
|
||||
|
||||
export default React.memo(Tooltip)
|
||||
23
web/app/components/datasets/create/assets/docx.svg
Normal file
23
web/app/components/datasets/create/assets/docx.svg
Normal file
@ -0,0 +1,23 @@
|
||||
<svg width="24" height="26" viewBox="0 0 24 26" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g filter="url(#filter0_d_6785_286)">
|
||||
<path d="M3 5.8C3 4.11984 3 3.27976 3.32698 2.63803C3.6146 2.07354 4.07354 1.6146 4.63803 1.32698C5.27976 1 6.11984 1 7.8 1H14L21 8V18.2C21 19.8802 21 20.7202 20.673 21.362C20.3854 21.9265 19.9265 22.3854 19.362 22.673C18.7202 23 17.8802 23 16.2 23H7.8C6.11984 23 5.27976 23 4.63803 22.673C4.07354 22.3854 3.6146 21.9265 3.32698 21.362C3 20.7202 3 19.8802 3 18.2V5.8Z" fill="#2349A9"/>
|
||||
</g>
|
||||
<path opacity="0.5" d="M14 1L21 8H16C14.8954 8 14 7.10457 14 6V1Z" fill="white"/>
|
||||
<g opacity="0.96">
|
||||
<path d="M8.13338 16C8.13338 16.6188 8.18659 17.1215 7.69589 17.5801C7.4062 17.8508 6.99827 17.9669 6.56078 17.9669H5V14.0331H6.56078C6.99827 14.0331 7.4062 14.1492 7.69589 14.4199C8.18659 14.8785 8.13338 15.3812 8.13338 16ZM7.09877 16C7.09877 15.337 7.06921 15.2265 6.98644 15.116C6.89185 14.9834 6.74996 14.895 6.48983 14.895H6.03461V17.105H6.48983C6.74996 17.105 6.89185 17.0166 6.98644 16.884C7.06921 16.7735 7.09877 16.6685 7.09877 16Z" fill="white"/>
|
||||
<path d="M11.9192 16C11.9192 16.5912 11.937 17.1436 11.4936 17.558C11.1862 17.8453 10.8314 18 10.3171 18C9.80274 18 9.44802 17.8453 9.14059 17.558C8.69719 17.1436 8.71493 16.5912 8.71493 16C8.71493 15.4088 8.69719 14.8564 9.14059 14.442C9.44802 14.1547 9.80274 14 10.3171 14C10.8314 14 11.1862 14.1547 11.4936 14.442C11.937 14.8564 11.9192 15.4088 11.9192 16ZM10.8846 16C10.8846 15.2818 10.8255 15.1492 10.7309 15.0331C10.6541 14.9392 10.5063 14.8619 10.3171 14.8619C10.1279 14.8619 9.9801 14.9392 9.90325 15.0331C9.80865 15.1492 9.74953 15.2818 9.74953 16C9.74953 16.7182 9.80865 16.8453 9.90325 16.9613C9.9801 17.0552 10.1279 17.1381 10.3171 17.1381C10.5063 17.1381 10.6541 17.0552 10.7309 16.9613C10.8255 16.8453 10.8846 16.7182 10.8846 16Z" fill="white"/>
|
||||
<path d="M15.689 16.7182C15.5353 17.5856 14.8909 18 14.0928 18C13.6021 18 13.2296 17.8453 12.9222 17.558C12.4788 17.1436 12.4965 16.5912 12.4965 16C12.4965 15.4088 12.4788 14.8564 12.9222 14.442C13.2296 14.1547 13.6021 14 14.0928 14C14.8909 14 15.5353 14.4144 15.689 15.2818H14.6367C14.5717 15.0608 14.4416 14.8619 14.0987 14.8619C13.9095 14.8619 13.7676 14.9337 13.6908 15.0276C13.5962 15.1436 13.5312 15.2818 13.5312 16C13.5312 16.7182 13.5962 16.8564 13.6908 16.9724C13.7676 17.0663 13.9095 17.1381 14.0987 17.1381C14.4416 17.1381 14.5717 16.9392 14.6367 16.7182H15.689Z" fill="white"/>
|
||||
<path d="M19.5 17.9669H18.3176L17.6259 16.7569L16.9342 17.9669H15.7518L17.0642 15.9503L15.8345 14.0331H17.011L17.6259 15.1436L18.2407 14.0331H19.4172L18.1875 15.9503L19.5 17.9669Z" fill="white"/>
|
||||
</g>
|
||||
<defs>
|
||||
<filter id="filter0_d_6785_286" x="1" y="0" width="22" height="26" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
|
||||
<feOffset dy="1"/>
|
||||
<feGaussianBlur stdDeviation="1"/>
|
||||
<feColorMatrix type="matrix" values="0 0 0 0 0.0627451 0 0 0 0 0.0941176 0 0 0 0 0.156863 0 0 0 0.05 0"/>
|
||||
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_6785_286"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_6785_286" result="shape"/>
|
||||
</filter>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.2 KiB |
@ -89,7 +89,9 @@
|
||||
.fileIcon.csv {
|
||||
background-image: url(../assets/csv.svg);
|
||||
}
|
||||
|
||||
.fileIcon.docx {
|
||||
background-image: url(../assets/docx.svg);
|
||||
}
|
||||
.fileIcon.xlsx,
|
||||
.fileIcon.xls {
|
||||
background-image: url(../assets/xlsx.svg);
|
||||
|
||||
@ -194,7 +194,7 @@ const EmbeddingProcess: FC<Props> = ({ datasetId, batchId, documents = [], index
|
||||
</div>
|
||||
<div className={s.progressContainer}>
|
||||
{indexingStatusBatchDetail.map(indexingStatusDetail => (
|
||||
<div className={cn(
|
||||
<div key={indexingStatusDetail.id} className={cn(
|
||||
s.sourceItem,
|
||||
indexingStatusDetail.indexing_status === 'error' && s.error,
|
||||
indexingStatusDetail.indexing_status === 'completed' && s.success,
|
||||
|
||||
@ -98,7 +98,9 @@
|
||||
.fileIcon.csv {
|
||||
background-image: url(../assets/csv.svg);
|
||||
}
|
||||
|
||||
.fileIcon.docx {
|
||||
background-image: url(../assets/docx.svg);
|
||||
}
|
||||
.fileIcon.xlsx,
|
||||
.fileIcon.xls {
|
||||
background-image: url(../assets/xlsx.svg);
|
||||
|
||||
@ -29,9 +29,9 @@ const ACCEPTS = [
|
||||
'.md',
|
||||
'.markdown',
|
||||
'.txt',
|
||||
// '.xls',
|
||||
'.xlsx',
|
||||
// '.csv',
|
||||
'.csv',
|
||||
'.docx',
|
||||
]
|
||||
|
||||
const FileUploader = ({
|
||||
@ -250,7 +250,6 @@ const FileUploader = ({
|
||||
className={cn(
|
||||
s.file,
|
||||
fileItem.progress < 100 && s.uploading,
|
||||
// s.active,
|
||||
)}
|
||||
>
|
||||
{fileItem.progress < 100 && (
|
||||
@ -274,33 +273,6 @@ const FileUploader = ({
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{/* {currentFile && (
|
||||
<div
|
||||
// onClick={() => onPreview(currentFile)}
|
||||
className={cn(
|
||||
s.file,
|
||||
uploading && s.uploading,
|
||||
// s.active,
|
||||
)}
|
||||
>
|
||||
{uploading && (
|
||||
<div className={s.progressbar} style={{ width: `${percent}%` }}/>
|
||||
)}
|
||||
<div className={s.fileInfo}>
|
||||
<div className={cn(s.fileIcon, s[getFileType(currentFile)])}/>
|
||||
<div className={s.filename}>{currentFile.name}</div>
|
||||
<div className={s.size}>{getFileSize(currentFile.size)}</div>
|
||||
</div>
|
||||
<div className={s.actionWrapper}>
|
||||
{uploading && (
|
||||
<div className={s.percent}>{`${percent}%`}</div>
|
||||
)}
|
||||
{!uploading && (
|
||||
<div className={s.remove} onClick={() => removeFile(index)}/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)} */}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
@ -65,7 +65,7 @@ const StepOne = ({
|
||||
const { dataset } = useDatasetDetailContext()
|
||||
const [showModal, setShowModal] = useState(false)
|
||||
const [currentFile, setCurrentFile] = useState<File | undefined>()
|
||||
const [currentNotionPage, setCurrentNotionPage] = useState<Page | undefined>()
|
||||
const [currentNotionPage, setCurrentNotionPage] = useState<NotionPage | undefined>()
|
||||
const { t } = useTranslation()
|
||||
|
||||
const modalShowHandle = () => setShowModal(true)
|
||||
@ -78,7 +78,7 @@ const StepOne = ({
|
||||
setCurrentFile(undefined)
|
||||
}
|
||||
|
||||
const updateCurrentPage = (page: Page) => {
|
||||
const updateCurrentPage = (page: NotionPage) => {
|
||||
setCurrentNotionPage(page)
|
||||
}
|
||||
|
||||
|
||||
@ -290,6 +290,10 @@
|
||||
background-image: url(../assets/csv.svg);
|
||||
}
|
||||
|
||||
.fileIcon.docx {
|
||||
background-image: url(../assets/docx.svg);
|
||||
}
|
||||
|
||||
.fileIcon.xlsx,
|
||||
.fileIcon.xls {
|
||||
background-image: url(../assets/xlsx.svg);
|
||||
|
||||
@ -43,7 +43,7 @@ type ISegmentCardProps = {
|
||||
scene?: UsageScene
|
||||
className?: string
|
||||
archived?: boolean
|
||||
embeddingAvailable: boolean
|
||||
embeddingAvailable?: boolean
|
||||
}
|
||||
|
||||
const SegmentCard: FC<ISegmentCardProps> = ({
|
||||
|
||||
@ -239,7 +239,7 @@ const Completed: FC<ICompletedProps> = ({
|
||||
// the current segment id and whether to show the modal
|
||||
const [currSegment, setCurrSegment] = useState<{ segInfo?: SegmentDetailModel; showModal: boolean }>({ showModal: false })
|
||||
|
||||
const [searchValue, setSearchValue] = useState() // the search value
|
||||
const [searchValue, setSearchValue] = useState<string>() // the search value
|
||||
const [selectedStatus, setSelectedStatus] = useState<boolean | 'all'>('all') // the selected status, enabled/disabled/undefined
|
||||
|
||||
const [lastSegmentsRes, setLastSegmentsRes] = useState<SegmentsResponse | undefined>(undefined)
|
||||
|
||||
@ -87,6 +87,9 @@
|
||||
.csvIcon {
|
||||
background-image: url(~@/assets/csv.svg);
|
||||
}
|
||||
.docxIcon {
|
||||
background-image: url(~@/assets/docx.svg);
|
||||
}
|
||||
.statusItemDetail {
|
||||
@apply h-8 font-medium border border-gray-200 inline-flex items-center rounded-lg pl-3 pr-4 mr-2;
|
||||
}
|
||||
|
||||
@ -30,9 +30,6 @@ For high-quality text generation, such as articles, summaries, and translations,
|
||||
)}
|
||||
</ul>
|
||||
</Property>
|
||||
<Property name='query' type='string' key='query'>
|
||||
User input text content.
|
||||
</Property>
|
||||
<Property name='response_mode' type='string' key='response_mode'>
|
||||
- Blocking type, waiting for execution to complete and returning results. (Requests may be interrupted if the process is long)
|
||||
- streaming returns. Implementation of streaming return based on SSE (**[Server-Sent Events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events)**).
|
||||
@ -44,7 +41,7 @@ For high-quality text generation, such as articles, summaries, and translations,
|
||||
</Col>
|
||||
<Col sticky>
|
||||
|
||||
<CodeGroup title="Request" tag="POST" label="/completion-messages" targetCode={`curl --location --request POST '${props.appDetail.api_base_url}/completion-messages' \\\n--header 'Authorization: Bearer ENTER-YOUR-SECRET-KEY' \\\n--header 'Content-Type: application/json' \\\n--data-raw '{\n "inputs": ${JSON.stringify(props.inputs)},\n "query": "Hi",\n "response_mode": "streaming"\n "user": "abc-123"\n}'\n`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/completion-messages" targetCode={`curl --location --request POST '${props.appDetail.api_base_url}/completion-messages' \\\n--header 'Authorization: Bearer ENTER-YOUR-SECRET-KEY' \\\n--header 'Content-Type: application/json' \\\n--data-raw '{\n "inputs": ${JSON.stringify(props.inputs)},\n "response_mode": "streaming"\n "user": "abc-123"\n}'\n`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl --location --request POST 'https://cloud.langgenius.dev/api/completion-messages' \
|
||||
@ -52,7 +49,6 @@ For high-quality text generation, such as articles, summaries, and translations,
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"inputs": {},
|
||||
"query": "Hi",
|
||||
"response_mode": "streaming",
|
||||
"user": "abc-123"
|
||||
}'
|
||||
|
||||
@ -30,9 +30,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
)}
|
||||
</ul>
|
||||
</Property>
|
||||
<Property name='query' type='string' key='query'>
|
||||
用户输入的文本正文。
|
||||
</Property>
|
||||
<Property name='response_mode' type='string' key='response_mode'>
|
||||
- blocking 阻塞型,等待执行完毕后返回结果。(请求若流程较长可能会被中断)
|
||||
- streaming 流式返回。基于 SSE(**[Server-Sent Events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events)**)实现流式返回。
|
||||
@ -44,7 +41,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
</Col>
|
||||
<Col sticky>
|
||||
|
||||
<CodeGroup title="Request" tag="POST" label="/completion-messages" targetCode={`curl --location --request POST '${props.appDetail.api_base_url}/completion-messages' \\\n--header 'Authorization: Bearer ENTER-YOUR-SECRET-KEY' \\\n--header 'Content-Type: application/json' \\\n--data-raw '{\n "inputs": ${JSON.stringify(props.inputs)},\n "query": "Hi",\n "response_mode": "streaming",\n "user": "abc-123"\n}'\n`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/completion-messages" targetCode={`curl --location --request POST '${props.appDetail.api_base_url}/completion-messages' \\\n--header 'Authorization: Bearer ENTER-YOUR-SECRET-KEY' \\\n--header 'Content-Type: application/json' \\\n--data-raw '{\n "inputs": ${JSON.stringify(props.inputs)},\n "response_mode": "streaming",\n "user": "abc-123"\n}'\n`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl --location --request POST 'https://cloud.langgenius.dev/api/completion-messages' \
|
||||
@ -52,7 +49,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"inputs": {},
|
||||
"query": "Hi",
|
||||
"response_mode": "streaming",
|
||||
"user": "abc-123"
|
||||
}'
|
||||
|
||||
@ -100,7 +100,7 @@ const KeyValidator = ({
|
||||
className='mb-4'
|
||||
name={form.title}
|
||||
placeholder={form.placeholder}
|
||||
value={value[form.key] || ''}
|
||||
value={value[form.key] as string || ''}
|
||||
onChange={v => handleChange(form, v)}
|
||||
onFocus={() => handleFocus(form)}
|
||||
validating={validating}
|
||||
|
||||
@ -97,10 +97,10 @@ const Welcome: FC<IWelcomeProps> = ({
|
||||
return (
|
||||
<div className='space-y-3'>
|
||||
{promptConfig.prompt_variables.map(item => (
|
||||
<div className='tablet:flex tablet:!h-9 mobile:space-y-2 tablet:space-y-0 mobile:text-xs tablet:text-sm' key={item.key}>
|
||||
<label className={`flex-shrink-0 flex items-center mobile:text-gray-700 tablet:text-gray-900 mobile:font-medium pc:font-normal ${s.formLabel}`}>{item.name}</label>
|
||||
<div className='tablet:flex items-start mobile:space-y-2 tablet:space-y-0 mobile:text-xs tablet:text-sm' key={item.key}>
|
||||
<label className={`flex-shrink-0 flex items-center tablet:leading-9 mobile:text-gray-700 tablet:text-gray-900 mobile:font-medium pc:font-normal ${s.formLabel}`}>{item.name}</label>
|
||||
{item.type === 'select'
|
||||
? (
|
||||
&& (
|
||||
<Select
|
||||
className='w-full'
|
||||
defaultValue={inputs?.[item.key]}
|
||||
@ -109,16 +109,24 @@ const Welcome: FC<IWelcomeProps> = ({
|
||||
allowSearch={false}
|
||||
bgClassName='bg-gray-50'
|
||||
/>
|
||||
)
|
||||
: (
|
||||
<input
|
||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
value={inputs?.[item.key] || ''}
|
||||
onChange={(e) => { setInputs({ ...inputs, [item.key]: e.target.value }) }}
|
||||
className={'w-full flex-grow py-2 pl-3 pr-3 box-border rounded-lg bg-gray-50'}
|
||||
maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
/>
|
||||
)}
|
||||
{item.type === 'string' && (
|
||||
<input
|
||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
value={inputs?.[item.key] || ''}
|
||||
onChange={(e) => { setInputs({ ...inputs, [item.key]: e.target.value }) }}
|
||||
className={'w-full flex-grow py-2 pl-3 pr-3 box-border rounded-lg bg-gray-50'}
|
||||
maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
/>
|
||||
)}
|
||||
{item.type === 'paragraph' && (
|
||||
<textarea
|
||||
className="w-full h-[104px] flex-grow py-2 pl-3 pr-3 box-border rounded-lg bg-gray-50"
|
||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
value={inputs?.[item.key] || ''}
|
||||
onChange={(e) => { setInputs({ ...inputs, [item.key]: e.target.value }) }}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
@ -97,10 +97,10 @@ const Welcome: FC<IWelcomeProps> = ({
|
||||
return (
|
||||
<div className='space-y-3'>
|
||||
{promptConfig.prompt_variables.map(item => (
|
||||
<div className='tablet:flex tablet:!h-9 mobile:space-y-2 tablet:space-y-0 mobile:text-xs tablet:text-sm' key={item.key}>
|
||||
<label className={`flex-shrink-0 flex items-center mobile:text-gray-700 tablet:text-gray-900 mobile:font-medium pc:font-normal ${s.formLabel}`}>{item.name}</label>
|
||||
<div className='tablet:flex items-start mobile:space-y-2 tablet:space-y-0 mobile:text-xs tablet:text-sm' key={item.key}>
|
||||
<label className={`flex-shrink-0 flex items-center tablet:leading-9 mobile:text-gray-700 tablet:text-gray-900 mobile:font-medium pc:font-normal ${s.formLabel}`}>{item.name}</label>
|
||||
{item.type === 'select'
|
||||
? (
|
||||
&& (
|
||||
<Select
|
||||
className='w-full'
|
||||
defaultValue={inputs?.[item.key]}
|
||||
@ -110,15 +110,24 @@ const Welcome: FC<IWelcomeProps> = ({
|
||||
bgClassName='bg-gray-50'
|
||||
/>
|
||||
)
|
||||
: (
|
||||
<input
|
||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
value={inputs?.[item.key] || ''}
|
||||
onChange={(e) => { setInputs({ ...inputs, [item.key]: e.target.value }) }}
|
||||
className={'w-full flex-grow py-2 pl-3 pr-3 box-border rounded-lg bg-gray-50'}
|
||||
maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
/>
|
||||
)}
|
||||
}
|
||||
{item.type === 'string' && (
|
||||
<input
|
||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
value={inputs?.[item.key] || ''}
|
||||
onChange={(e) => { setInputs({ ...inputs, [item.key]: e.target.value }) }}
|
||||
className={'w-full flex-grow py-2 pl-3 pr-3 box-border rounded-lg bg-gray-50'}
|
||||
maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
/>
|
||||
)}
|
||||
{item.type === 'paragraph' && (
|
||||
<textarea
|
||||
className="w-full h-[104px] flex-grow py-2 pl-3 pr-3 box-border rounded-lg bg-gray-50"
|
||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
value={inputs?.[item.key] || ''}
|
||||
onChange={(e) => { setInputs({ ...inputs, [item.key]: e.target.value }) }}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
@ -23,8 +23,9 @@ import { userInputsFormToPromptVariables } from '@/utils/model-config'
|
||||
import Res from '@/app/components/share/text-generation/result'
|
||||
import SavedItems from '@/app/components/app/text-generate/saved-items'
|
||||
import type { InstalledApp } from '@/models/explore'
|
||||
import { appDefaultIconBackground } from '@/config'
|
||||
import { DEFAULT_VALUE_MAX_LEN, appDefaultIconBackground } from '@/config'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
|
||||
const PARALLEL_LIMIT = 5
|
||||
enum TaskStatus {
|
||||
pending = 'pending',
|
||||
@ -34,7 +35,6 @@ enum TaskStatus {
|
||||
|
||||
type TaskParam = {
|
||||
inputs: Record<string, any>
|
||||
query: string
|
||||
}
|
||||
|
||||
type Task = {
|
||||
@ -65,7 +65,6 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
const [isCallBatchAPI, setIsCallBatchAPI] = useState(false)
|
||||
const isInBatchTab = currTab === 'batch'
|
||||
const [inputs, setInputs] = useState<Record<string, any>>({})
|
||||
const [query, setQuery] = useState('') // run once query content
|
||||
const [appId, setAppId] = useState<string>('')
|
||||
const [siteInfo, setSiteInfo] = useState<SiteInfo | null>(null)
|
||||
const [promptConfig, setPromptConfig] = useState<PromptConfig | null>(null)
|
||||
@ -111,11 +110,10 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
return {}
|
||||
const batchCompletionResLatest = getBatchCompletionRes()
|
||||
const res: Record<string, string> = {}
|
||||
const { inputs, query } = task.params
|
||||
const { inputs } = task.params
|
||||
promptConfig?.prompt_variables.forEach((v) => {
|
||||
res[v.name] = inputs[v.key]
|
||||
})
|
||||
res[t('share.generation.queryTitle')] = query
|
||||
res[t('share.generation.completionResult')] = batchCompletionResLatest[task.id]
|
||||
return res
|
||||
})
|
||||
@ -135,9 +133,6 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
isMapVarName = false
|
||||
})
|
||||
|
||||
if (headerData[varLen] !== t('share.generation.queryTitle'))
|
||||
isMapVarName = false
|
||||
|
||||
if (!isMapVarName) {
|
||||
notify({ type: 'error', message: t('share.generation.errorMsg.fileStructNotMatch') })
|
||||
return false
|
||||
@ -180,6 +175,8 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
}
|
||||
let errorRowIndex = 0
|
||||
let requiredVarName = ''
|
||||
let moreThanMaxLengthVarName = ''
|
||||
let maxLength = 0
|
||||
payloadData.forEach((item, index) => {
|
||||
if (errorRowIndex !== 0)
|
||||
return
|
||||
@ -187,6 +184,15 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
promptConfig?.prompt_variables.forEach((varItem, varIndex) => {
|
||||
if (errorRowIndex !== 0)
|
||||
return
|
||||
if (varItem.type === 'string') {
|
||||
const maxLen = varItem.max_length || DEFAULT_VALUE_MAX_LEN
|
||||
if (item[varIndex].length > maxLen) {
|
||||
moreThanMaxLengthVarName = varItem.name
|
||||
maxLength = maxLen
|
||||
errorRowIndex = index + 1
|
||||
return
|
||||
}
|
||||
}
|
||||
if (varItem.required === false)
|
||||
return
|
||||
|
||||
@ -195,18 +201,15 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
errorRowIndex = index + 1
|
||||
}
|
||||
})
|
||||
|
||||
if (errorRowIndex !== 0)
|
||||
return
|
||||
|
||||
if (item[varLen] === '') {
|
||||
requiredVarName = t('share.generation.queryTitle')
|
||||
errorRowIndex = index + 1
|
||||
}
|
||||
})
|
||||
|
||||
if (errorRowIndex !== 0) {
|
||||
notify({ type: 'error', message: t('share.generation.errorMsg.invalidLine', { rowIndex: errorRowIndex + 1, varName: requiredVarName }) })
|
||||
if (requiredVarName)
|
||||
notify({ type: 'error', message: t('share.generation.errorMsg.invalidLine', { rowIndex: errorRowIndex + 1, varName: requiredVarName }) })
|
||||
|
||||
if (moreThanMaxLengthVarName)
|
||||
notify({ type: 'error', message: t('share.generation.errorMsg.moreThanMaxLengthLine', { rowIndex: errorRowIndex + 1, varName: moreThanMaxLengthVarName, maxLength }) })
|
||||
|
||||
return false
|
||||
}
|
||||
return true
|
||||
@ -234,7 +237,6 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
status: i < PARALLEL_LIMIT ? TaskStatus.running : TaskStatus.pending,
|
||||
params: {
|
||||
inputs,
|
||||
query: item[varLen],
|
||||
},
|
||||
}
|
||||
})
|
||||
@ -334,7 +336,6 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
promptConfig={promptConfig}
|
||||
moreLikeThisEnabled={!!moreLikeThisConfig?.enabled}
|
||||
inputs={isCallBatchAPI ? (task as Task).params.inputs : inputs}
|
||||
query={isCallBatchAPI ? (task as Task).params.query : query}
|
||||
controlSend={controlSend}
|
||||
controlStopResponding={controlStopResponding}
|
||||
onShowRes={showResSidebar}
|
||||
@ -379,7 +380,6 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div className='grow overflow-y-auto'>
|
||||
@ -459,8 +459,6 @@ const TextGeneration: FC<IMainProps> = ({
|
||||
inputs={inputs}
|
||||
onInputsChange={setInputs}
|
||||
promptConfig={promptConfig}
|
||||
query={query}
|
||||
onQueryChange={setQuery}
|
||||
onSend={handleSend}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@ -21,7 +21,6 @@ export type IResultProps = {
|
||||
promptConfig: PromptConfig | null
|
||||
moreLikeThisEnabled: boolean
|
||||
inputs: Record<string, any>
|
||||
query: string
|
||||
controlSend?: number
|
||||
controlStopResponding?: number
|
||||
onShowRes: () => void
|
||||
@ -39,7 +38,6 @@ const Result: FC<IResultProps> = ({
|
||||
promptConfig,
|
||||
moreLikeThisEnabled,
|
||||
inputs,
|
||||
query,
|
||||
controlSend,
|
||||
controlStopResponding,
|
||||
onShowRes,
|
||||
@ -109,14 +107,8 @@ const Result: FC<IResultProps> = ({
|
||||
if (!checkCanSend())
|
||||
return
|
||||
|
||||
if (!query) {
|
||||
logError(t('appDebug.errorMessage.queryRequired'))
|
||||
return false
|
||||
}
|
||||
|
||||
const data = {
|
||||
inputs,
|
||||
query,
|
||||
}
|
||||
|
||||
setMessageId(null)
|
||||
|
||||
@ -16,7 +16,7 @@ const CSVDownload: FC<ICSVDownloadProps> = ({
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { CSVDownloader, Type } = useCSVDownloader()
|
||||
const addQueryContentVars = [...vars, { name: t('share.generation.queryTitle') }]
|
||||
const addQueryContentVars = [...vars]
|
||||
const template = (() => {
|
||||
const res: Record<string, string> = {}
|
||||
addQueryContentVars.forEach((item) => {
|
||||
|
||||
@ -15,20 +15,24 @@ export type IRunOnceProps = {
|
||||
promptConfig: PromptConfig
|
||||
inputs: Record<string, any>
|
||||
onInputsChange: (inputs: Record<string, any>) => void
|
||||
query: string
|
||||
onQueryChange: (query: string) => void
|
||||
onSend: () => void
|
||||
}
|
||||
const RunOnce: FC<IRunOnceProps> = ({
|
||||
promptConfig,
|
||||
inputs,
|
||||
onInputsChange,
|
||||
query,
|
||||
onQueryChange,
|
||||
onSend,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
const onClear = () => {
|
||||
const newInputs: Record<string, any> = {}
|
||||
promptConfig.prompt_variables.forEach((item) => {
|
||||
newInputs[item.key] = ''
|
||||
})
|
||||
onInputsChange(newInputs)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="">
|
||||
<section>
|
||||
@ -38,61 +42,58 @@ const RunOnce: FC<IRunOnceProps> = ({
|
||||
<div className='w-full mt-4' key={item.key}>
|
||||
<label className='text-gray-900 text-sm font-medium'>{item.name}</label>
|
||||
<div className='mt-2'>
|
||||
{item.type === 'select'
|
||||
? (
|
||||
<Select
|
||||
className='w-full'
|
||||
defaultValue={inputs[item.key]}
|
||||
onSelect={(i) => { onInputsChange({ ...inputs, [item.key]: i.value }) }}
|
||||
items={(item.options || []).map(i => ({ name: i, value: i }))}
|
||||
allowSearch={false}
|
||||
bgClassName='bg-gray-50'
|
||||
/>
|
||||
)
|
||||
: (
|
||||
<input
|
||||
type="text"
|
||||
className="block w-full p-2 text-gray-900 border border-gray-300 rounded-lg bg-gray-50 sm:text-xs focus:ring-blue-500 focus:border-blue-500 "
|
||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
value={inputs[item.key]}
|
||||
onChange={(e) => { onInputsChange({ ...inputs, [item.key]: e.target.value }) }}
|
||||
maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
/>
|
||||
)}
|
||||
{item.type === 'select' && (
|
||||
<Select
|
||||
className='w-full'
|
||||
defaultValue={inputs[item.key]}
|
||||
onSelect={(i) => { onInputsChange({ ...inputs, [item.key]: i.value }) }}
|
||||
items={(item.options || []).map(i => ({ name: i, value: i }))}
|
||||
allowSearch={false}
|
||||
bgClassName='bg-gray-50'
|
||||
/>
|
||||
)}
|
||||
{item.type === 'string' && (
|
||||
<input
|
||||
type="text"
|
||||
className="block w-full p-2 text-gray-900 border border-gray-300 rounded-lg bg-gray-50 sm:text-xs focus:ring-blue-500 focus:border-blue-500 "
|
||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
value={inputs[item.key]}
|
||||
onChange={(e) => { onInputsChange({ ...inputs, [item.key]: e.target.value }) }}
|
||||
maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN}
|
||||
/>
|
||||
)}
|
||||
{item.type === 'paragraph' && (
|
||||
<textarea
|
||||
className="block w-full h-[104px] p-2 text-gray-900 border border-gray-300 rounded-lg bg-gray-50 sm:text-xs focus:ring-blue-500 focus:border-blue-500 "
|
||||
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
|
||||
value={inputs[item.key]}
|
||||
onChange={(e) => { onInputsChange({ ...inputs, [item.key]: e.target.value }) }}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{promptConfig.prompt_variables.length > 0 && (
|
||||
<div className='mt-6 h-[1px] bg-gray-100'></div>
|
||||
<div className='mt-4 h-[1px] bg-gray-100'></div>
|
||||
)}
|
||||
<div className='w-full mt-5'>
|
||||
<label className='text-gray-900 text-sm font-medium'>{t('share.generation.queryTitle')}</label>
|
||||
<div className="mt-2 overflow-hidden rounded-lg bg-gray-50 ">
|
||||
<div className="px-4 py-2 bg-gray-50 rounded-t-lg">
|
||||
<textarea
|
||||
value={query}
|
||||
onChange={(e) => { onQueryChange(e.target.value) }}
|
||||
rows={4}
|
||||
className="w-full px-0 text-sm text-gray-900 border-0 bg-gray-50 focus:outline-none placeholder:bg-gray-50"
|
||||
placeholder={t('share.generation.queryPlaceholder') as string}
|
||||
required
|
||||
>
|
||||
</textarea>
|
||||
</div>
|
||||
<div className="flex items-center justify-between px-3 py-2">
|
||||
<div className="flex pl-0 space-x-1 sm:pl-2">
|
||||
<span className="bg-gray-100 text-gray-500 text-xs font-medium mr-2 px-2.5 py-0.5 rounded cursor-pointer">{query?.length}</span>
|
||||
</div>
|
||||
<Button
|
||||
type="primary"
|
||||
className='!h-8 !pl-3 !pr-4'
|
||||
onClick={onSend}
|
||||
disabled={!query || query === ''}
|
||||
>
|
||||
<PlayIcon className="shrink-0 w-4 h-4 mr-1" aria-hidden="true" />
|
||||
<span className='uppercase text-[13px]'>{t('share.generation.run')}</span>
|
||||
</Button>
|
||||
</div>
|
||||
<div className='w-full mt-4'>
|
||||
<div className="flex items-center justify-between">
|
||||
<Button
|
||||
className='!h-8 !p-3'
|
||||
onClick={onClear}
|
||||
disabled={false}
|
||||
>
|
||||
<span className='text-[13px]'>{t('common.operation.clear')}</span>
|
||||
</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
className='!h-8 !pl-3 !pr-4'
|
||||
onClick={onSend}
|
||||
disabled={false}
|
||||
>
|
||||
<PlayIcon className="shrink-0 w-4 h-4 mr-1" aria-hidden="true" />
|
||||
<span className='text-[13px]'>{t('share.generation.run')}</span>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
23
web/assets/docx.svg
Normal file
23
web/assets/docx.svg
Normal file
@ -0,0 +1,23 @@
|
||||
<svg width="24" height="26" viewBox="0 0 24 26" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g filter="url(#filter0_d_6785_286)">
|
||||
<path d="M3 5.8C3 4.11984 3 3.27976 3.32698 2.63803C3.6146 2.07354 4.07354 1.6146 4.63803 1.32698C5.27976 1 6.11984 1 7.8 1H14L21 8V18.2C21 19.8802 21 20.7202 20.673 21.362C20.3854 21.9265 19.9265 22.3854 19.362 22.673C18.7202 23 17.8802 23 16.2 23H7.8C6.11984 23 5.27976 23 4.63803 22.673C4.07354 22.3854 3.6146 21.9265 3.32698 21.362C3 20.7202 3 19.8802 3 18.2V5.8Z" fill="#2349A9"/>
|
||||
</g>
|
||||
<path opacity="0.5" d="M14 1L21 8H16C14.8954 8 14 7.10457 14 6V1Z" fill="white"/>
|
||||
<g opacity="0.96">
|
||||
<path d="M8.13338 16C8.13338 16.6188 8.18659 17.1215 7.69589 17.5801C7.4062 17.8508 6.99827 17.9669 6.56078 17.9669H5V14.0331H6.56078C6.99827 14.0331 7.4062 14.1492 7.69589 14.4199C8.18659 14.8785 8.13338 15.3812 8.13338 16ZM7.09877 16C7.09877 15.337 7.06921 15.2265 6.98644 15.116C6.89185 14.9834 6.74996 14.895 6.48983 14.895H6.03461V17.105H6.48983C6.74996 17.105 6.89185 17.0166 6.98644 16.884C7.06921 16.7735 7.09877 16.6685 7.09877 16Z" fill="white"/>
|
||||
<path d="M11.9192 16C11.9192 16.5912 11.937 17.1436 11.4936 17.558C11.1862 17.8453 10.8314 18 10.3171 18C9.80274 18 9.44802 17.8453 9.14059 17.558C8.69719 17.1436 8.71493 16.5912 8.71493 16C8.71493 15.4088 8.69719 14.8564 9.14059 14.442C9.44802 14.1547 9.80274 14 10.3171 14C10.8314 14 11.1862 14.1547 11.4936 14.442C11.937 14.8564 11.9192 15.4088 11.9192 16ZM10.8846 16C10.8846 15.2818 10.8255 15.1492 10.7309 15.0331C10.6541 14.9392 10.5063 14.8619 10.3171 14.8619C10.1279 14.8619 9.9801 14.9392 9.90325 15.0331C9.80865 15.1492 9.74953 15.2818 9.74953 16C9.74953 16.7182 9.80865 16.8453 9.90325 16.9613C9.9801 17.0552 10.1279 17.1381 10.3171 17.1381C10.5063 17.1381 10.6541 17.0552 10.7309 16.9613C10.8255 16.8453 10.8846 16.7182 10.8846 16Z" fill="white"/>
|
||||
<path d="M15.689 16.7182C15.5353 17.5856 14.8909 18 14.0928 18C13.6021 18 13.2296 17.8453 12.9222 17.558C12.4788 17.1436 12.4965 16.5912 12.4965 16C12.4965 15.4088 12.4788 14.8564 12.9222 14.442C13.2296 14.1547 13.6021 14 14.0928 14C14.8909 14 15.5353 14.4144 15.689 15.2818H14.6367C14.5717 15.0608 14.4416 14.8619 14.0987 14.8619C13.9095 14.8619 13.7676 14.9337 13.6908 15.0276C13.5962 15.1436 13.5312 15.2818 13.5312 16C13.5312 16.7182 13.5962 16.8564 13.6908 16.9724C13.7676 17.0663 13.9095 17.1381 14.0987 17.1381C14.4416 17.1381 14.5717 16.9392 14.6367 16.7182H15.689Z" fill="white"/>
|
||||
<path d="M19.5 17.9669H18.3176L17.6259 16.7569L16.9342 17.9669H15.7518L17.0642 15.9503L15.8345 14.0331H17.011L17.6259 15.1436L18.2407 14.0331H19.4172L18.1875 15.9503L19.5 17.9669Z" fill="white"/>
|
||||
</g>
|
||||
<defs>
|
||||
<filter id="filter0_d_6785_286" x="1" y="0" width="22" height="26" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
|
||||
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
|
||||
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
|
||||
<feOffset dy="1"/>
|
||||
<feGaussianBlur stdDeviation="1"/>
|
||||
<feColorMatrix type="matrix" values="0 0 0 0 0.0627451 0 0 0 0 0.0941176 0 0 0 0 0.156863 0 0 0 0.05 0"/>
|
||||
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_6785_286"/>
|
||||
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_6785_286" result="shape"/>
|
||||
</filter>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.2 KiB |
@ -91,9 +91,14 @@ export const TONE_LIST = [
|
||||
},
|
||||
]
|
||||
|
||||
export const getMaxToken = (modelId: string) => {
|
||||
return (modelId === 'gpt-4' || modelId === 'gpt-3.5-turbo-16k') ? 8000 : 4000
|
||||
}
|
||||
|
||||
export const LOCALE_COOKIE_NAME = 'locale'
|
||||
|
||||
export const DEFAULT_VALUE_MAX_LEN = 48
|
||||
export const DEFAULT_PARAGRAPH_VALUE_MAX_LEN = 1000
|
||||
|
||||
export const zhRegex = /^[\u4E00-\u9FA5]$/m
|
||||
export const emojiRegex = /^[\uD800-\uDBFF][\uDC00-\uDFFF]$/m
|
||||
|
||||
@ -12,7 +12,7 @@ export type DatasetsContextValue = {
|
||||
const DatasetsContext = createContext<DatasetsContextValue>({
|
||||
datasets: [],
|
||||
mutateDatasets: () => {},
|
||||
currentDataset: undefined
|
||||
currentDataset: undefined,
|
||||
})
|
||||
|
||||
export const useDatasetsContext = () => useContext(DatasetsContext)
|
||||
|
||||
@ -92,6 +92,12 @@ const DebugConfigurationContext = createContext<IDebugConfiguration>({
|
||||
prompt_template: '',
|
||||
prompt_variables: [],
|
||||
},
|
||||
opening_statement: null,
|
||||
more_like_this: null,
|
||||
suggested_questions_after_answer: null,
|
||||
speech_to_text: null,
|
||||
retriever_resource: null,
|
||||
dataSets: [],
|
||||
},
|
||||
setModelConfig: () => { },
|
||||
dataSets: [],
|
||||
|
||||
@ -128,11 +128,15 @@ const translation = {
|
||||
notStartWithNumber: 'Variable key: {{key}} can not start with a number',
|
||||
keyAlreadyExists: 'Variable key: :{{key}} already exists',
|
||||
},
|
||||
otherError: {
|
||||
promptNoBeEmpty: 'Prefix prompt can not be empty',
|
||||
},
|
||||
variableConig: {
|
||||
modalTitle: 'Field settings',
|
||||
description: 'Setting for variable {{varName}}',
|
||||
fieldType: 'Field type',
|
||||
string: 'Text',
|
||||
string: 'Short Text',
|
||||
paragraph: 'Paragraph',
|
||||
select: 'Select',
|
||||
notSet: 'Not set, try typing {{input}} in the prefix prompt',
|
||||
stringTitle: 'Form text box options',
|
||||
|
||||
@ -124,11 +124,15 @@ const translation = {
|
||||
notStartWithNumber: '变量: {{key}} 不能以数字开头',
|
||||
keyAlreadyExists: '变量:{{key}} 已存在',
|
||||
},
|
||||
otherError: {
|
||||
promptNoBeEmpty: '前缀提示词不能为空',
|
||||
},
|
||||
variableConig: {
|
||||
modalTitle: '变量设置',
|
||||
description: '设置变量 {{varName}}',
|
||||
fieldType: '字段类型',
|
||||
string: '文本',
|
||||
paragraph: '段落',
|
||||
select: '下拉选项',
|
||||
notSet: '未设置,在 Prompt 中输入 {{input}} 试试',
|
||||
stringTitle: '文本框设置',
|
||||
|
||||
@ -23,7 +23,7 @@ const translation = {
|
||||
title: 'Upload text file',
|
||||
button: 'Drag and drop file, or',
|
||||
browse: 'Browse',
|
||||
tip: 'Supports txt, html, markdown, xlsx, and pdf. Max {{size}}MB each.',
|
||||
tip: 'Supports txt, html, markdown, xlsx, csv, docx and pdf. Max {{size}}MB each.',
|
||||
validation: {
|
||||
typeError: 'File type not supported',
|
||||
size: 'File too large. Maximum is {{size}}MB',
|
||||
|
||||
@ -23,7 +23,7 @@ const translation = {
|
||||
title: '上传文本文件',
|
||||
button: '拖拽文件至此,或者',
|
||||
browse: '选择文件',
|
||||
tip: '已支持 TXT、 HTML、 Markdown、 PDF、 XLSX,每个文件不超过 {{size}}MB。',
|
||||
tip: '已支持 TXT、 HTML、 Markdown、 PDF、 XLSX、CSV、DOCX,每个文件不超过 {{size}}MB。',
|
||||
validation: {
|
||||
typeError: '文件类型不支持',
|
||||
size: '文件太大了,不能超过 {{size}}MB',
|
||||
|
||||
@ -58,7 +58,8 @@ const translation = {
|
||||
empty: 'Please input content in the uploaded file.',
|
||||
fileStructNotMatch: 'The uploaded CSV file not match the struct.',
|
||||
emptyLine: 'Row {{rowIndex}} is empty',
|
||||
invalidLine: 'Row {{rowIndex}}: variables value can not be empty',
|
||||
invalidLine: 'Row {{rowIndex}}: {{varName}} value can not be empty',
|
||||
moreThanMaxLengthLine: 'Row {{rowIndex}}: {{varName}} value can not be more than {{maxLength}} characters',
|
||||
atLeastOne: 'Please input at least one row in the uploaded file.',
|
||||
},
|
||||
},
|
||||
|
||||
@ -31,6 +31,7 @@ const translation = {
|
||||
create: '运行一次',
|
||||
batch: '批量运行',
|
||||
saved: '已保存',
|
||||
|
||||
},
|
||||
savedNoData: {
|
||||
title: '您还没有保存结果!',
|
||||
@ -54,7 +55,8 @@ const translation = {
|
||||
empty: '上传文件的内容不能为空',
|
||||
fileStructNotMatch: '上传文件的内容与结构不匹配',
|
||||
emptyLine: '第 {{rowIndex}} 行的内容为空',
|
||||
invalidLine: '第 {{rowIndex}} 行: 变量值必填',
|
||||
invalidLine: '第 {{rowIndex}} 行: {{varName}}值必填',
|
||||
moreThanMaxLengthLine: '第 {{rowIndex}} 行: {{varName}}值超过最大长度 {{maxLength}}',
|
||||
atLeastOne: '上传文件的内容不能少于一条',
|
||||
},
|
||||
},
|
||||
|
||||
@ -35,24 +35,18 @@ export type SpeechToTextConfig = MoreLikeThisConfig
|
||||
|
||||
export type CitationConfig = MoreLikeThisConfig
|
||||
|
||||
export type RetrieverResourceConfig = MoreLikeThisConfig
|
||||
|
||||
// frontend use. Not the same as backend
|
||||
export type ModelConfig = {
|
||||
provider: string // LLM Provider: for example "OPENAI"
|
||||
model_id: string
|
||||
configs: PromptConfig
|
||||
opening_statement: string | null
|
||||
more_like_this: {
|
||||
enabled: boolean
|
||||
} | null
|
||||
suggested_questions_after_answer: {
|
||||
enabled: boolean
|
||||
} | null
|
||||
speech_to_text: {
|
||||
enabled: boolean
|
||||
} | null
|
||||
retriever_resource: {
|
||||
enabled: boolean
|
||||
} | null
|
||||
more_like_this: MoreLikeThisConfig | null
|
||||
suggested_questions_after_answer: SuggestedQuestionsAfterAnswerConfig | null
|
||||
speech_to_text: SpeechToTextConfig | null
|
||||
retriever_resource: RetrieverResourceConfig | null
|
||||
dataSets: any[]
|
||||
}
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"version": "0.3.20",
|
||||
"version": "0.3.22",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
|
||||
@ -1,22 +1,32 @@
|
||||
import { UserInputFormItem, } from '@/types/app'
|
||||
import { PromptVariable } from '@/models/debug'
|
||||
import type { UserInputFormItem } from '@/types/app'
|
||||
import type { PromptVariable } from '@/models/debug'
|
||||
|
||||
export const userInputsFormToPromptVariables = (useInputs: UserInputFormItem[] | null) => {
|
||||
if (!useInputs) return []
|
||||
if (!useInputs)
|
||||
return []
|
||||
const promptVariables: PromptVariable[] = []
|
||||
useInputs.forEach((item: any) => {
|
||||
const type = item['text-input'] ? 'string' : 'select'
|
||||
const content = type === 'string' ? item['text-input'] : item['select']
|
||||
if (type === 'string') {
|
||||
const isParagraph = !!item.paragraph
|
||||
const [type, content] = (() => {
|
||||
if (isParagraph)
|
||||
return ['paragraph', item.paragraph]
|
||||
|
||||
if (item['text-input'])
|
||||
return ['string', item['text-input']]
|
||||
|
||||
return ['select', item.select]
|
||||
})()
|
||||
if (type === 'string' || type === 'paragraph') {
|
||||
promptVariables.push({
|
||||
key: content.variable,
|
||||
name: content.label,
|
||||
required: content.required,
|
||||
type: 'string',
|
||||
type,
|
||||
max_length: content.max_length,
|
||||
options: [],
|
||||
})
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
promptVariables.push({
|
||||
key: content.variable,
|
||||
name: content.label,
|
||||
@ -32,29 +42,30 @@ export const userInputsFormToPromptVariables = (useInputs: UserInputFormItem[] |
|
||||
export const promptVariablesToUserInputsForm = (promptVariables: PromptVariable[]) => {
|
||||
const userInputs: UserInputFormItem[] = []
|
||||
promptVariables.filter(({ key, name }) => {
|
||||
if (key && key.trim() && name && name.trim()) {
|
||||
if (key && key.trim() && name && name.trim())
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}).forEach((item: any) => {
|
||||
if (item.type === 'string') {
|
||||
if (item.type === 'string' || item.type === 'paragraph') {
|
||||
userInputs.push({
|
||||
'text-input': {
|
||||
[item.type === 'string' ? 'text-input' : 'paragraph']: {
|
||||
label: item.name,
|
||||
variable: item.key,
|
||||
required: item.required === false ? false : true, // default true
|
||||
required: item.required !== false, // default true
|
||||
max_length: item.max_length,
|
||||
default: ''
|
||||
default: '',
|
||||
},
|
||||
} as any)
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
userInputs.push({
|
||||
'select': {
|
||||
select: {
|
||||
label: item.name,
|
||||
variable: item.key,
|
||||
required: item.required === false ? false : true, // default true
|
||||
required: item.required !== false, // default true
|
||||
options: item.options,
|
||||
default: ''
|
||||
default: '',
|
||||
},
|
||||
} as any)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user