Fix some issues in API (#2982)
### What problem does this PR solve? Fix some issues in API ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --------- Co-authored-by: liuhua <10215101452@stu.ecun.edu.cn>
This commit is contained in:
@@ -9,7 +9,7 @@ class Chat(Base):
|
||||
self.id = ""
|
||||
self.name = "assistant"
|
||||
self.avatar = "path/to/avatar"
|
||||
self.knowledgebases = ["kb1"]
|
||||
self.datasets = ["kb1"]
|
||||
self.llm = Chat.LLM(rag, {})
|
||||
self.prompt = Chat.Prompt(rag, {})
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
@@ -8,10 +8,10 @@ class Chunk(Base):
|
||||
self.important_keywords = []
|
||||
self.create_time = ""
|
||||
self.create_timestamp = 0.0
|
||||
self.knowledgebase_id = None
|
||||
self.dataset_id = None
|
||||
self.document_name = ""
|
||||
self.document_id = ""
|
||||
self.available = 1
|
||||
self.available = True
|
||||
for k in list(res_dict.keys()):
|
||||
if k not in self.__dict__:
|
||||
res_dict.pop(k)
|
||||
@@ -19,7 +19,7 @@ class Chunk(Base):
|
||||
|
||||
|
||||
def update(self,update_message:dict):
|
||||
res = self.put(f"/dataset/{self.knowledgebase_id}/document/{self.document_id}/chunk/{self.id}",update_message)
|
||||
res = self.put(f"/dataset/{self.dataset_id}/document/{self.document_id}/chunk/{self.id}",update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0 :
|
||||
raise Exception(res["message"])
|
||||
|
||||
@@ -10,10 +10,6 @@ from .base import Base
|
||||
class DataSet(Base):
|
||||
class ParserConfig(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
self.chunk_token_count = 128
|
||||
self.layout_recognize = True
|
||||
self.delimiter = '\n!?。;!?'
|
||||
self.task_page_size = 12
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def __init__(self, rag, res_dict):
|
||||
@@ -43,11 +39,16 @@ class DataSet(Base):
|
||||
|
||||
def upload_documents(self,document_list: List[dict]):
|
||||
url = f"/dataset/{self.id}/document"
|
||||
files = [("file",(ele["name"],ele["blob"])) for ele in document_list]
|
||||
files = [("file",(ele["displayed_name"],ele["blob"])) for ele in document_list]
|
||||
res = self.post(path=url,json=None,files=files)
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res.get("message"))
|
||||
if res.get("code") == 0:
|
||||
doc_list=[]
|
||||
for doc in res["data"]:
|
||||
document = Document(self.rag,doc)
|
||||
doc_list.append(document)
|
||||
return doc_list
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
def list_documents(self, id: str = None, keywords: str = None, offset: int =1, limit: int = 1024, orderby: str = "create_time", desc: bool = True):
|
||||
res = self.get(f"/dataset/{self.id}/info",params={"id": id,"keywords": keywords,"offset": offset,"limit": limit,"orderby": orderby,"desc": desc})
|
||||
|
||||
@@ -5,12 +5,16 @@ from typing import List
|
||||
|
||||
|
||||
class Document(Base):
|
||||
class ParserConfig(Base):
|
||||
def __init__(self, rag, res_dict):
|
||||
super().__init__(rag, res_dict)
|
||||
|
||||
def __init__(self, rag, res_dict):
|
||||
self.id = ""
|
||||
self.name = ""
|
||||
self.thumbnail = None
|
||||
self.knowledgebase_id = None
|
||||
self.chunk_method = ""
|
||||
self.dataset_id = None
|
||||
self.chunk_method = "naive"
|
||||
self.parser_config = {"pages": [[1, 1000000]]}
|
||||
self.source_type = "local"
|
||||
self.type = ""
|
||||
@@ -31,14 +35,14 @@ class Document(Base):
|
||||
|
||||
|
||||
def update(self, update_message: dict):
|
||||
res = self.put(f'/dataset/{self.knowledgebase_id}/info/{self.id}',
|
||||
res = self.put(f'/dataset/{self.dataset_id}/info/{self.id}',
|
||||
update_message)
|
||||
res = res.json()
|
||||
if res.get("code") != 0:
|
||||
raise Exception(res["message"])
|
||||
|
||||
def download(self):
|
||||
res = self.get(f"/dataset/{self.knowledgebase_id}/document/{self.id}")
|
||||
res = self.get(f"/dataset/{self.dataset_id}/document/{self.id}")
|
||||
try:
|
||||
res = res.json()
|
||||
raise Exception(res.get("message"))
|
||||
@@ -48,7 +52,7 @@ class Document(Base):
|
||||
|
||||
def list_chunks(self,offset=0, limit=30, keywords="", id:str=None):
|
||||
data={"document_id": self.id,"keywords": keywords,"offset":offset,"limit":limit,"id":id}
|
||||
res = self.get(f'/dataset/{self.knowledgebase_id}/document/{self.id}/chunk', data)
|
||||
res = self.get(f'/dataset/{self.dataset_id}/document/{self.id}/chunk', data)
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
chunks=[]
|
||||
@@ -59,15 +63,15 @@ class Document(Base):
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
|
||||
def add_chunk(self, content: str):
|
||||
res = self.post(f'/dataset/{self.knowledgebase_id}/document/{self.id}/chunk', {"content":content})
|
||||
def add_chunk(self, content: str,important_keywords:List[str]=[]):
|
||||
res = self.post(f'/dataset/{self.dataset_id}/document/{self.id}/chunk', {"content":content,"important_keywords":important_keywords})
|
||||
res = res.json()
|
||||
if res.get("code") == 0:
|
||||
return Chunk(self.rag,res["data"].get("chunk"))
|
||||
raise Exception(res.get("message"))
|
||||
|
||||
def delete_chunks(self,ids:List[str]):
|
||||
res = self.rm(f"dataset/{self.knowledgebase_id}/document/{self.id}/chunk",{"ids":ids})
|
||||
res = self.rm(f"dataset/{self.dataset_id}/document/{self.id}/chunk",{"ids":ids})
|
||||
res = res.json()
|
||||
if res.get("code")!=0:
|
||||
raise Exception(res.get("message"))
|
||||
@@ -40,7 +40,7 @@ class Session(Base):
|
||||
"content": chunk["content_with_weight"],
|
||||
"document_id": chunk["doc_id"],
|
||||
"document_name": chunk["docnm_kwd"],
|
||||
"knowledgebase_id": chunk["kb_id"],
|
||||
"dataset_id": chunk["kb_id"],
|
||||
"image_id": chunk["img_id"],
|
||||
"similarity": chunk["similarity"],
|
||||
"vector_similarity": chunk["vector_similarity"],
|
||||
@@ -75,7 +75,7 @@ class Chunk(Base):
|
||||
self.content = None
|
||||
self.document_id = ""
|
||||
self.document_name = ""
|
||||
self.knowledgebase_id = ""
|
||||
self.dataset_id = ""
|
||||
self.image_id = ""
|
||||
self.similarity = None
|
||||
self.vector_similarity = None
|
||||
|
||||
Reference in New Issue
Block a user