Skip to content

Commit

Permalink
fix: dataset data list api adapt (#2878)
Browse files Browse the repository at this point in the history
* fix: dataset data list api adapt

* update doc version

* perf: fedomain env

* add fedomain env
  • Loading branch information
c121914yu committed Oct 12, 2024
1 parent 67dd58c commit 01a83b2
Show file tree
Hide file tree
Showing 10 changed files with 99 additions and 17 deletions.
19 changes: 19 additions & 0 deletions docSite/content/zh-cn/docs/development/openapi/dataset.md
Original file line number Diff line number Diff line change
Expand Up @@ -1093,6 +1093,22 @@ A2:
{{< tab tabName="请求示例" >}}
{{< markdownify >}}

**4.8.11+**

```bash
curl --location --request POST 'http://localhost:3000/api/core/dataset/data/v2/list' \
--header 'Authorization: Bearer {{authorization}}' \
--header 'Content-Type: application/json' \
--data-raw '{
"offset": 0,
"pageSize": 10,
"collectionId":"65abd4ac9d1448617cba6171",
"searchText":""
}'
```

**4.6.7+**

```bash
curl --location --request POST 'http://localhost:3000/api/core/dataset/data/list' \
--header 'Authorization: Bearer {{authorization}}' \
Expand All @@ -1112,10 +1128,13 @@ curl --location --request POST 'http://localhost:3000/api/core/dataset/data/list
{{< markdownify >}}

{{% alert icon=" " context="success" %}}

- pageNum: 偏移量(选填)
- pageNum: 页码(选填)
- pageSize: 每页数量,最大30(选填)
- collectionId: 集合的ID(必填)
- searchText: 模糊搜索词(选填)

{{% /alert %}}

{{< /markdownify >}}
Expand Down
3 changes: 2 additions & 1 deletion docSite/content/zh-cn/docs/development/upgrading/4812.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@ weight: 812
## 更新说明

1. 新增 - 全局变量支持更多数据类型
2. 修复 - 文件后缀判断,去除 query 影响。
2. 新增 - FE_DOMAIN 环境变量,配置该环境变量后,上传文件/图片会补全后缀后得到完整地址。(可解决 docx 文件图片链接,有时会无法被模型识别问题)
3. 修复 - 文件后缀判断,去除 query 影响。
2 changes: 2 additions & 0 deletions files/docker/docker-compose-milvus.yml
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,8 @@ services:
- MILVUS_TOKEN=none
# sandbox 地址
- SANDBOX_URL=http://sandbox:3000
# 前端地址
- FE_DOMAIN=
# 日志等级: debug, info, warn, error
- LOG_LEVEL=info
- STORE_LOG_LEVEL=warn
Expand Down
2 changes: 2 additions & 0 deletions files/docker/docker-compose-pgvector.yml
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,8 @@ services:
- PG_URL=postgresql://username:password@pg:5432/postgres
# sandbox 地址
- SANDBOX_URL=http://sandbox:3000
# 前端地址
- FE_DOMAIN=
# 日志等级: debug, info, warn, error
- LOG_LEVEL=info
- STORE_LOG_LEVEL=warn
Expand Down
2 changes: 2 additions & 0 deletions files/docker/docker-compose-zilliz.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,8 @@ services:
- MILVUS_TOKEN=zilliz_cloud_token
# sandbox 地址
- SANDBOX_URL=http://sandbox:3000
# 前端地址
- FE_DOMAIN=
# 日志等级: debug, info, warn, error
- LOG_LEVEL=info
- STORE_LOG_LEVEL=warn
Expand Down
6 changes: 1 addition & 5 deletions packages/service/common/file/image/controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,6 @@ import { ClientSession } from '../../../common/mongo';
import { guessBase64ImageType } from '../utils';
import { readFromSecondary } from '../../mongo/utils';

export function getMongoImgUrl(id: string, extension: string) {
return `${imageBaseUrl}${id}.${extension}`;
}

export const maxImgSize = 1024 * 1024 * 12;
const base64MimeRegex = /data:image\/([^\)]+);base64/;
export async function uploadMongoImg({
Expand Down Expand Up @@ -39,7 +35,7 @@ export async function uploadMongoImg({
shareId
});

return getMongoImgUrl(String(_id), extension);
return `${process.env.FE_DOMAIN || ''}${imageBaseUrl}${String(_id)}.${extension}`;
}

export async function readMongoImg({ id }: { id: string }) {
Expand Down
2 changes: 1 addition & 1 deletion projects/app/.env.template
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ SANDBOX_URL=http://localhost:3001
# 商业版地址
PRO_URL=
# 页面的地址,用于自动补全相对路径资源的 domain
# FE_DOMAIN=http://localhost:3000
FE_DOMAIN=http://localhost:3000

# 日志等级: debug, info, warn, error
LOG_LEVEL=debug
Expand Down
17 changes: 9 additions & 8 deletions projects/app/src/pages/api/core/dataset/data/list.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,19 @@ import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { replaceRegChars } from '@fastgpt/global/common/string/tools';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { PagingData, RequestPaging } from '@/types';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { DatasetDataListItemType } from '@/global/core/dataset/type';
import { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';

export type GetDatasetDataListProps = PaginationProps & {
export type GetDatasetDataListProps = RequestPaging & {
searchText?: string;
collectionId: string;
};
export type GetDatasetDataListRes = PaginationResponse<DatasetDataListItemType>;

async function handler(
req: ApiRequestProps<GetDatasetDataListProps>
): Promise<GetDatasetDataListRes> {
let { offset, pageSize = 10, searchText = '', collectionId } = req.body;
): Promise<PagingData<DatasetDataListItemType>> {
let { pageNum = 1, pageSize = 10, searchText = '', collectionId } = req.body;

pageSize = Math.min(pageSize, 30);

Expand All @@ -41,17 +40,19 @@ async function handler(
: {})
};

const [list, total] = await Promise.all([
const [data, total] = await Promise.all([
MongoDatasetData.find(match, '_id datasetId collectionId q a chunkIndex')
.sort({ chunkIndex: 1, updateTime: -1 })
.skip(offset)
.skip((pageNum - 1) * pageSize)
.limit(pageSize)
.lean(),
MongoDatasetData.countDocuments(match)
]);

return {
list,
pageNum,
pageSize,
data,
total
};
}
Expand Down
59 changes: 59 additions & 0 deletions projects/app/src/pages/api/core/dataset/data/v2/list.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { replaceRegChars } from '@fastgpt/global/common/string/tools';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { DatasetDataListItemType } from '@/global/core/dataset/type';
import { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';

export type GetDatasetDataListProps = PaginationProps & {
searchText?: string;
collectionId: string;
};
export type GetDatasetDataListRes = PaginationResponse<DatasetDataListItemType>;

async function handler(
req: ApiRequestProps<GetDatasetDataListProps>
): Promise<GetDatasetDataListRes> {
let { offset, pageSize = 10, searchText = '', collectionId } = req.body;

pageSize = Math.min(pageSize, 30);

// 凭证校验
const { teamId, collection } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId,
per: ReadPermissionVal
});

const queryReg = new RegExp(`${replaceRegChars(searchText)}`, 'i');
const match = {
teamId,
datasetId: collection.datasetId._id,
collectionId,
...(searchText.trim()
? {
$or: [{ q: queryReg }, { a: queryReg }]
}
: {})
};

const [list, total] = await Promise.all([
MongoDatasetData.find(match, '_id datasetId collectionId q a chunkIndex')
.sort({ chunkIndex: 1, updateTime: -1 })
.skip(offset)
.limit(pageSize)
.lean(),
MongoDatasetData.countDocuments(match)
]);

return {
list,
total
};
}

export default NextAPI(handler);
4 changes: 2 additions & 2 deletions projects/app/src/web/core/dataset/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ import type { UpdateDatasetCollectionParams } from '@/pages/api/core/dataset/col
import type {
GetDatasetDataListProps,
GetDatasetDataListRes
} from '@/pages/api/core/dataset/data/list';
} from '@/pages/api/core/dataset/data/v2/list';
import type { UpdateDatasetDataProps } from '@fastgpt/global/core/dataset/controller';
import type { DatasetFolderCreateBody } from '@/pages/api/core/dataset/folder/create';
import type { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';
Expand Down Expand Up @@ -159,7 +159,7 @@ export const getScrollCollectionList = (data: GetScrollCollectionsProps) =>
/* =============================== data ==================================== */
/* get dataset list */
export const getDatasetDataList = (data: GetDatasetDataListProps) =>
POST<GetDatasetDataListRes>(`/core/dataset/data/list`, data);
POST<GetDatasetDataListRes>(`/core/dataset/data/v2/list`, data);

export const getDatasetDataItemById = (id: string) =>
GET<DatasetDataItemType>(`/core/dataset/data/detail`, { id });
Expand Down

0 comments on commit 01a83b2

Please sign in to comment.