Commit
ยท
9753e7a
1
Parent(s):
c9b34c6
Fix test cases (#3718)
Browse files### What problem does this PR solve?
Fix test cases
### Type of change
- [x] Other (please describe): Fix error cases
---------
Signed-off-by: jinhai <[email protected]>
- README.md +1 -2
- README_id.md +2 -3
- README_ja.md +1 -2
- README_ko.md +1 -2
- README_zh.md +1 -2
- sdk/python/test/conftest.py +4 -1
- sdk/python/test/test_frontend_api/test_dataset.py +6 -6
- sdk/python/test/test_sdk_api/t_chunk.py +4 -1
README.md
CHANGED
|
@@ -333,8 +333,7 @@ docker build -f Dockerfile -t infiniflow/ragflow:dev .
|
|
| 333 |
cd web
|
| 334 |
npm install --force
|
| 335 |
```
|
| 336 |
-
7.
|
| 337 |
-
8. Launch frontend service:
|
| 338 |
```bash
|
| 339 |
npm run dev
|
| 340 |
```
|
|
|
|
| 333 |
cd web
|
| 334 |
npm install --force
|
| 335 |
```
|
| 336 |
+
7. Launch frontend service:
|
|
|
|
| 337 |
```bash
|
| 338 |
npm run dev
|
| 339 |
```
|
README_id.md
CHANGED
|
@@ -307,9 +307,8 @@ docker build -f Dockerfile -t infiniflow/ragflow:dev .
|
|
| 307 |
```bash
|
| 308 |
cd web
|
| 309 |
npm install --force
|
| 310 |
-
```
|
| 311 |
-
7.
|
| 312 |
-
8. Jalankan aplikasi frontend:
|
| 313 |
```bash
|
| 314 |
npm run dev
|
| 315 |
```
|
|
|
|
| 307 |
```bash
|
| 308 |
cd web
|
| 309 |
npm install --force
|
| 310 |
+
```
|
| 311 |
+
7. Jalankan aplikasi frontend:
|
|
|
|
| 312 |
```bash
|
| 313 |
npm run dev
|
| 314 |
```
|
README_ja.md
CHANGED
|
@@ -289,8 +289,7 @@ docker build -f Dockerfile -t infiniflow/ragflow:dev .
|
|
| 289 |
cd web
|
| 290 |
npm install --force
|
| 291 |
```
|
| 292 |
-
7.
|
| 293 |
-
8. ใใญใณใใจใณใใตใผใในใ่ตทๅใใ:
|
| 294 |
```bash
|
| 295 |
npm run dev
|
| 296 |
```
|
|
|
|
| 289 |
cd web
|
| 290 |
npm install --force
|
| 291 |
```
|
| 292 |
+
7. ใใญใณใใจใณใใตใผใในใ่ตทๅใใ:
|
|
|
|
| 293 |
```bash
|
| 294 |
npm run dev
|
| 295 |
```
|
README_ko.md
CHANGED
|
@@ -291,8 +291,7 @@ docker build -f Dockerfile -t infiniflow/ragflow:dev .
|
|
| 291 |
cd web
|
| 292 |
npm install --force
|
| 293 |
```
|
| 294 |
-
7.
|
| 295 |
-
8. ํ๋ก ํธ์๋ ์๋น์ค๋ฅผ ์์ํฉ๋๋ค:
|
| 296 |
```bash
|
| 297 |
npm run dev
|
| 298 |
```
|
|
|
|
| 291 |
cd web
|
| 292 |
npm install --force
|
| 293 |
```
|
| 294 |
+
7. ํ๋ก ํธ์๋ ์๋น์ค๋ฅผ ์์ํฉ๋๋ค:
|
|
|
|
| 295 |
```bash
|
| 296 |
npm run dev
|
| 297 |
```
|
README_zh.md
CHANGED
|
@@ -296,8 +296,7 @@ docker build -f Dockerfile -t infiniflow/ragflow:dev .
|
|
| 296 |
cd web
|
| 297 |
npm install --force
|
| 298 |
```
|
| 299 |
-
7.
|
| 300 |
-
8. ๅฏๅจๅ็ซฏๆๅก๏ผ
|
| 301 |
```bash
|
| 302 |
npm run dev
|
| 303 |
```
|
|
|
|
| 296 |
cd web
|
| 297 |
npm install --force
|
| 298 |
```
|
| 299 |
+
7. ๅฏๅจๅ็ซฏๆๅก๏ผ
|
|
|
|
| 300 |
```bash
|
| 301 |
npm run dev
|
| 302 |
```
|
sdk/python/test/conftest.py
CHANGED
|
@@ -40,7 +40,10 @@ def login():
|
|
| 40 |
|
| 41 |
@pytest.fixture(scope="session")
|
| 42 |
def get_api_key_fixture():
|
| 43 |
-
|
|
|
|
|
|
|
|
|
|
| 44 |
auth = login()
|
| 45 |
url = HOST_ADDRESS + "/v1/system/new_token"
|
| 46 |
auth = {"Authorization": auth}
|
|
|
|
| 40 |
|
| 41 |
@pytest.fixture(scope="session")
|
| 42 |
def get_api_key_fixture():
|
| 43 |
+
try:
|
| 44 |
+
register()
|
| 45 |
+
except Exception as e:
|
| 46 |
+
print(e)
|
| 47 |
auth = login()
|
| 48 |
url = HOST_ADDRESS + "/v1/system/new_token"
|
| 49 |
auth = {"Authorization": auth}
|
sdk/python/test/test_frontend_api/test_dataset.py
CHANGED
|
@@ -14,8 +14,8 @@ def test_dataset(get_auth):
|
|
| 14 |
dataset_list = []
|
| 15 |
while True:
|
| 16 |
res = list_dataset(get_auth, page_number)
|
| 17 |
-
data = res.get("data")
|
| 18 |
-
for item in data
|
| 19 |
dataset_id = item.get("id")
|
| 20 |
dataset_list.append(dataset_id)
|
| 21 |
if len(dataset_list) < page_number * 150:
|
|
@@ -43,8 +43,8 @@ def test_dataset_1k_dataset(get_auth):
|
|
| 43 |
dataset_list = []
|
| 44 |
while True:
|
| 45 |
res = list_dataset(get_auth, page_number)
|
| 46 |
-
data = res.get("data")
|
| 47 |
-
for item in data
|
| 48 |
dataset_id = item.get("id")
|
| 49 |
dataset_list.append(dataset_id)
|
| 50 |
if len(dataset_list) < page_number * 150:
|
|
@@ -66,7 +66,7 @@ def test_duplicated_name_dataset(get_auth):
|
|
| 66 |
|
| 67 |
# list dataset
|
| 68 |
res = list_dataset(get_auth, 1)
|
| 69 |
-
data = res.get("data")
|
| 70 |
dataset_list = []
|
| 71 |
pattern = r'^test_create_dataset.*'
|
| 72 |
for item in data:
|
|
@@ -109,7 +109,7 @@ def test_update_different_params_dataset(get_auth):
|
|
| 109 |
dataset_list = []
|
| 110 |
while True:
|
| 111 |
res = list_dataset(get_auth, page_number)
|
| 112 |
-
data = res.get("data")
|
| 113 |
for item in data:
|
| 114 |
dataset_id = item.get("id")
|
| 115 |
dataset_list.append(dataset_id)
|
|
|
|
| 14 |
dataset_list = []
|
| 15 |
while True:
|
| 16 |
res = list_dataset(get_auth, page_number)
|
| 17 |
+
data = res.get("data").get("kbs")
|
| 18 |
+
for item in data:
|
| 19 |
dataset_id = item.get("id")
|
| 20 |
dataset_list.append(dataset_id)
|
| 21 |
if len(dataset_list) < page_number * 150:
|
|
|
|
| 43 |
dataset_list = []
|
| 44 |
while True:
|
| 45 |
res = list_dataset(get_auth, page_number)
|
| 46 |
+
data = res.get("data").get("kbs")
|
| 47 |
+
for item in data:
|
| 48 |
dataset_id = item.get("id")
|
| 49 |
dataset_list.append(dataset_id)
|
| 50 |
if len(dataset_list) < page_number * 150:
|
|
|
|
| 66 |
|
| 67 |
# list dataset
|
| 68 |
res = list_dataset(get_auth, 1)
|
| 69 |
+
data = res.get("data").get("kbs")
|
| 70 |
dataset_list = []
|
| 71 |
pattern = r'^test_create_dataset.*'
|
| 72 |
for item in data:
|
|
|
|
| 109 |
dataset_list = []
|
| 110 |
while True:
|
| 111 |
res = list_dataset(get_auth, page_number)
|
| 112 |
+
data = res.get("data").get("kbs")
|
| 113 |
for item in data:
|
| 114 |
dataset_id = item.get("id")
|
| 115 |
dataset_list.append(dataset_id)
|
sdk/python/test/test_sdk_api/t_chunk.py
CHANGED
|
@@ -190,4 +190,7 @@ def test_retrieve_chunks(get_api_key_fixture):
|
|
| 190 |
docs = ds.upload_documents(documents)
|
| 191 |
doc = docs[0]
|
| 192 |
doc.add_chunk(content="This is a chunk addition test")
|
| 193 |
-
rag.retrieve(dataset_ids=[ds.id],document_ids=[doc.id])
|
|
|
|
|
|
|
|
|
|
|
|
| 190 |
docs = ds.upload_documents(documents)
|
| 191 |
doc = docs[0]
|
| 192 |
doc.add_chunk(content="This is a chunk addition test")
|
| 193 |
+
rag.retrieve(dataset_ids=[ds.id],document_ids=[doc.id])
|
| 194 |
+
rag.delete_datasets(ids=[ds.id])
|
| 195 |
+
|
| 196 |
+
# test different parameters for the retrieval
|