merge main

This commit is contained in:
zxhlyh
2026-01-21 14:05:22 +08:00
581 changed files with 38181 additions and 5619 deletions

View File

@ -40,6 +40,15 @@ type CommonDocReq = {
documentId: string
}
export type DocumentDownloadResponse = {
url: string
}
export type DocumentDownloadZipRequest = {
datasetId: string
documentIds: string[]
}
type BatchReq = {
datasetId: string
batchId: string
@ -158,6 +167,18 @@ export const resumeDocIndexing = ({ datasetId, documentId }: CommonDocReq): Prom
return patch<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/processing/resume`)
}
export const fetchDocumentDownloadUrl = ({ datasetId, documentId }: CommonDocReq): Promise<DocumentDownloadResponse> => {
return get<DocumentDownloadResponse>(`/datasets/${datasetId}/documents/${documentId}/download`, {})
}
export const downloadDocumentsZip = ({ datasetId, documentIds }: DocumentDownloadZipRequest): Promise<Blob> => {
return post<Blob>(`/datasets/${datasetId}/documents/download-zip`, {
body: {
document_ids: documentIds,
},
})
}
export const preImportNotionPages = ({ url, datasetId }: { url: string, datasetId?: string }): Promise<{ notion_info: DataSourceNotionWorkspace[] }> => {
return get<{ notion_info: DataSourceNotionWorkspace[] }>(url, { params: { dataset_id: datasetId } })
}

View File

@ -1,4 +1,4 @@
import type { MetadataType, SortType } from '../datasets'
import type { DocumentDownloadResponse, DocumentDownloadZipRequest, MetadataType, SortType } from '../datasets'
import type { CommonResponse } from '@/models/common'
import type { DocumentDetailResponse, DocumentListResponse, UpdateDocumentBatchParams } from '@/models/datasets'
import {
@ -8,7 +8,7 @@ import {
import { normalizeStatusForQuery } from '@/app/components/datasets/documents/status-filter'
import { DocumentActionType } from '@/models/datasets'
import { del, get, patch, post } from '../base'
import { pauseDocIndexing, resumeDocIndexing } from '../datasets'
import { downloadDocumentsZip, fetchDocumentDownloadUrl, pauseDocIndexing, resumeDocIndexing } from '../datasets'
import { useInvalid } from '../use-base'
const NAME_SPACE = 'knowledge/document'
@ -176,6 +176,26 @@ export const useDocumentResume = () => {
})
}
export const useDocumentDownload = () => {
return useMutation({
mutationFn: ({ datasetId, documentId }: UpdateDocumentBatchParams) => {
if (!datasetId || !documentId)
throw new Error('datasetId and documentId are required')
return fetchDocumentDownloadUrl({ datasetId, documentId }) as Promise<DocumentDownloadResponse>
},
})
}
export const useDocumentDownloadZip = () => {
return useMutation({
mutationFn: ({ datasetId, documentIds }: DocumentDownloadZipRequest) => {
if (!datasetId || !documentIds?.length)
throw new Error('datasetId and documentIds are required')
return downloadDocumentsZip({ datasetId, documentIds })
},
})
}
export const useDocumentBatchRetryIndex = () => {
return useMutation({
mutationFn: ({ datasetId, documentIds }: { datasetId: string, documentIds: string[] }) => {

View File

@ -0,0 +1,17 @@
import type { ICurrentWorkspace } from '@/models/common'
import { useQuery } from '@tanstack/react-query'
import { get } from './base'
type WorkspacePermissions = {
workspace_id: ICurrentWorkspace['id']
allow_member_invite: boolean
allow_owner_transfer: boolean
}
export function useWorkspacePermissions(workspaceId: ICurrentWorkspace['id'], enabled: boolean) {
return useQuery({
queryKey: ['workspace-permissions', workspaceId],
queryFn: () => get<WorkspacePermissions>('/workspaces/current/permission'),
enabled: enabled && !!workspaceId,
})
}