This commit is contained in:
jyong
2025-06-17 19:06:17 +08:00
parent 7c41f71248
commit 7f7ea92a45
19 changed files with 243 additions and 118 deletions

View File

@ -283,7 +283,7 @@ class DatasetApi(Resource):
location="json",
help="Invalid external knowledge api id.",
)
parser.add_argument(
"icon_info",
type=dict,

View File

@ -52,6 +52,7 @@ from fields.document_fields import (
)
from libs.login import login_required
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
from models.dataset import DocumentPipelineExecutionLog
from services.dataset_service import DatasetService, DocumentService
from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig
from tasks.add_document_to_index_task import add_document_to_index_task
@ -1092,6 +1093,35 @@ class WebsiteDocumentSyncApi(DocumentResource):
return {"result": "success"}, 200
class DocumentPipelineExecutionLogApi(DocumentResource):
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id, document_id):
dataset_id = str(dataset_id)
document_id = str(document_id)
dataset = DatasetService.get_dataset(dataset_id)
if not dataset:
raise NotFound("Dataset not found.")
document = DocumentService.get_document(dataset.id, document_id)
if not document:
raise NotFound("Document not found.")
log = (
db.session.query(DocumentPipelineExecutionLog)
.filter_by(document_id=document_id)
.order_by(DocumentPipelineExecutionLog.created_at.desc())
.first()
)
if not log:
return {"datasource_info": None, "datasource_type": None, "input_data": None}, 200
return {
"datasource_info": log.datasource_info,
"datasource_type": log.datasource_type,
"input_data": log.input_data,
}, 200
api.add_resource(GetProcessRuleApi, "/datasets/process-rule")
api.add_resource(DatasetDocumentListApi, "/datasets/<uuid:dataset_id>/documents")
api.add_resource(DatasetInitApi, "/datasets/init")

View File

@ -41,8 +41,9 @@ class DatasourcePluginOauthApi(Resource):
if not plugin_oauth_config:
raise NotFound()
oauth_handler = OAuthHandler()
redirect_url = (f"{dify_config.CONSOLE_WEB_URL}/oauth/datasource/callback?"
f"provider={provider}&plugin_id={plugin_id}")
redirect_url = (
f"{dify_config.CONSOLE_WEB_URL}/oauth/datasource/callback?provider={provider}&plugin_id={plugin_id}"
)
system_credentials = plugin_oauth_config.system_credentials
if system_credentials:
system_credentials["redirect_url"] = redirect_url
@ -123,9 +124,7 @@ class DatasourceAuth(Resource):
args = parser.parse_args()
datasource_provider_service = DatasourceProviderService()
datasources = datasource_provider_service.get_datasource_credentials(
tenant_id=current_user.current_tenant_id,
provider=args["provider"],
plugin_id=args["plugin_id"]
tenant_id=current_user.current_tenant_id, provider=args["provider"], plugin_id=args["plugin_id"]
)
return {"result": datasources}, 200
@ -146,7 +145,7 @@ class DatasourceAuthUpdateDeleteApi(Resource):
tenant_id=current_user.current_tenant_id,
auth_id=auth_id,
provider=args["provider"],
plugin_id=args["plugin_id"]
plugin_id=args["plugin_id"],
)
return {"result": "success"}, 200

View File

@ -384,6 +384,7 @@ class PublishedRagPipelineRunApi(Resource):
# return result
#
class RagPipelinePublishedDatasourceNodeRunApi(Resource):
@setup_required
@login_required
@ -419,7 +420,7 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource):
user_inputs=inputs,
account=current_user,
datasource_type=datasource_type,
is_published=True
is_published=True,
)
return result
@ -458,12 +459,12 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource):
return helper.compact_generate_response(
PipelineGenerator.convert_to_event_stream(
rag_pipeline_service.run_datasource_workflow_node(
pipeline=pipeline,
node_id=node_id,
user_inputs=inputs,
account=current_user,
datasource_type=datasource_type,
is_published=False
pipeline=pipeline,
node_id=node_id,
user_inputs=inputs,
account=current_user,
datasource_type=datasource_type,
is_published=False,
)
)
)