|
@@ -1,4 +1,4 @@
|
|
|
-from fastapi import APIRouter, Request, Depends, HTTPException
|
|
|
+from fastapi import APIRouter, Request, Depends, HTTPException, Query
|
|
|
from sqlalchemy.exc import IntegrityError
|
|
|
|
|
|
from fastapi.responses import JSONResponse
|
|
@@ -7,279 +7,153 @@ from sqlalchemy.orm import Session
|
|
|
from models import *
|
|
|
import json
|
|
|
import random
|
|
|
-from sqlalchemy import create_engine, select
|
|
|
+from sqlalchemy import create_engine, select, or_
|
|
|
from typing import Optional
|
|
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
-# @router.post('/create')
|
|
|
-# async def create_knowledge(request:Request,db:Session = Depends(get_db)):
|
|
|
-# data = await request.body()
|
|
|
-# body = data.decode(encoding='utf-8')
|
|
|
-# if len(body) > 0:
|
|
|
-# body = json.loads(body)
|
|
|
-# print(body)
|
|
|
-# random_10_digit_number = random.randint(1000000000, 9999999999)
|
|
|
-# # file_identifier = 'f'
|
|
|
-# reportId = 'ZJBG'+str(random_10_digit_number)
|
|
|
-# # file_identifier =
|
|
|
-# reportName = body["reportName"]
|
|
|
-# subject = body["subject"]
|
|
|
-# eventType = body["eventType"]
|
|
|
-# publishingUnit = body["publishingUnit"]
|
|
|
-# publishDate = body["publishDate"]
|
|
|
-# summary = body["summary"]
|
|
|
-#
|
|
|
-# notificationType = body["notificationType"]
|
|
|
-#
|
|
|
-# base_code = 'base'+str(random.randint(1000000000, 9999999999))
|
|
|
-#
|
|
|
-# fileNames = body["fileName"]
|
|
|
-# filePath = '/data/upload/mergefile/'
|
|
|
-#
|
|
|
-#
|
|
|
-#
|
|
|
-# konwledge = KnowledgeBase(
|
|
|
-# reportId=reportId,
|
|
|
-#
|
|
|
-# reportName=reportName,
|
|
|
-# subject=subject,
|
|
|
-# eventType=eventType,
|
|
|
-# publishingUnit=publishingUnit,
|
|
|
-# publishDate=publishDate,
|
|
|
-# summary = summary,
|
|
|
-# notificationType = notificationType,
|
|
|
-#
|
|
|
-# base_code = base_code
|
|
|
-# )
|
|
|
-# db.add(konwledge)
|
|
|
-#
|
|
|
-# for fileName in fileNames:
|
|
|
-# file_identifier='file'+str(random.randint(1000000000, 9999999999))
|
|
|
-# knowledge_file = KnowledgeFile(
|
|
|
-# file_identifier=file_identifier,
|
|
|
-# file_path=filePath,
|
|
|
-# file_name = fileName,
|
|
|
-# is_deleted = 0,
|
|
|
-# knowledge_base_code = base_code
|
|
|
-# )
|
|
|
-# db.add(knowledge_file)
|
|
|
-#
|
|
|
-# db.commit()
|
|
|
-# return {
|
|
|
-# "code":0,
|
|
|
-# "data":{
|
|
|
-# "reportId": reportId,
|
|
|
-# "status": "success",
|
|
|
-# "message": "总结报告创建成功"
|
|
|
-# }
|
|
|
-# }
|
|
|
+
|
|
|
|
|
|
|
|
|
@router.post('/create')
|
|
|
async def create_knowledge(request: Request, db: Session = Depends(get_db)):
|
|
|
- try:
|
|
|
- data = await request.body()
|
|
|
- body = json.loads(data.decode(encoding='utf-8'))
|
|
|
-
|
|
|
- # 验证必需的字段
|
|
|
- required_fields = ['reportName', 'subject', 'eventType', 'publishingUnit', 'publishDate', 'summary',
|
|
|
- # 'notificationType',
|
|
|
- 'fileNames']
|
|
|
- missing_fields = [field for field in required_fields if field not in body]
|
|
|
- print('missing_fields',missing_fields)
|
|
|
- if missing_fields:
|
|
|
- raise HTTPException(status_code=401, detail=f"Missing required fields: {', '.join(missing_fields)}")
|
|
|
-
|
|
|
- # 生成随机的报告ID和基础知识代码
|
|
|
- random_10_digit_number = random.randint(1000000000, 9999999999)
|
|
|
- reportId = 'ZJBG' + str(random_10_digit_number)
|
|
|
- base_code = 'base' + str(random.randint(1000000000, 9999999999))
|
|
|
-
|
|
|
- # 从请求体中提取其他数据
|
|
|
- reportName = body["reportName"]
|
|
|
- subject = body["subject"]
|
|
|
- eventType = body["eventType"]
|
|
|
- publishingUnit = body["publishingUnit"]
|
|
|
- publishDate = body["publishDate"]
|
|
|
- summary = body["summary"]
|
|
|
- notificationType = "总结报告"
|
|
|
- updateTime = body["publishDate"]
|
|
|
- # notificationType = body["notificationType"]
|
|
|
-
|
|
|
- fileNames = body["fileNames"] # 注意:这里假设它是列表
|
|
|
-
|
|
|
- # 创建 KnowledgeBase 实例
|
|
|
- konwledge = KnowledgeBase(
|
|
|
- reportId=reportId,
|
|
|
- reportName=reportName,
|
|
|
- subject=subject,
|
|
|
- eventType=eventType,
|
|
|
- publishingUnit=publishingUnit,
|
|
|
- publishDate=publishDate,
|
|
|
- summary=summary,
|
|
|
- notificationType=notificationType,
|
|
|
- base_code=base_code,
|
|
|
- updateTime=updateTime
|
|
|
+ body = await request.json()
|
|
|
+ required_fields = ['reportName', 'subject', 'eventType', 'publishingUnit', 'publishDate', 'summary', 'fileNames']
|
|
|
+ if not all(field in body for field in required_fields):
|
|
|
+ missing_fields = ", ".join([field for field in required_fields if field not in body])
|
|
|
+ return Response(content=f"Missing required fields: {missing_fields}", status_code=400)
|
|
|
+
|
|
|
+ reportId = f'ZJBG{random.randint(1000000000, 9999999999)}'
|
|
|
+ base_code = 'base' + f'{random.randint(1000000000, 9999999999)}'
|
|
|
+
|
|
|
+ knowledge = KnowledgeBase(
|
|
|
+ reportId=reportId,
|
|
|
+ **{
|
|
|
+ field: body[field]
|
|
|
+ for field in required_fields
|
|
|
+ if field != 'fileNames'
|
|
|
+ },
|
|
|
+ base_code=base_code,
|
|
|
+ del_flag = 0,
|
|
|
+ updateTime=body['publishDate'],
|
|
|
+ notificationType="总结报告" # 硬编码的值,如果有其他情况需要处理,请修改
|
|
|
+ )
|
|
|
+ db.add(knowledge)
|
|
|
+
|
|
|
+ knowledge_files = [
|
|
|
+ KnowledgeFile(
|
|
|
+ file_identifier=f'file{random.randint(1000000000, 9999999999)}',
|
|
|
+ file_path=f'/data/upload/mergefile/{fileName}', # 假设fileName是文件名
|
|
|
+ file_name=fileName,
|
|
|
+ is_deleted=0,
|
|
|
+ updateTime=body['publishDate'],
|
|
|
+ createTime=body['publishDate'],
|
|
|
+ knowledge_base_code=base_code
|
|
|
)
|
|
|
- db.add(konwledge)
|
|
|
-
|
|
|
- # 创建 KnowledgeFile 实例
|
|
|
- filePath = '/data/upload/mergefile/'
|
|
|
- for fileName in fileNames:
|
|
|
- file_identifier = 'file' + str(random.randint(1000000000, 9999999999))
|
|
|
- knowledge_file = KnowledgeFile(
|
|
|
- file_identifier=file_identifier,
|
|
|
- file_path=filePath + fileName, # 如果fileName是完整的路径,则可能不需要再次添加filePath
|
|
|
- file_name=fileName,
|
|
|
- is_deleted=0,
|
|
|
- updateTime = updateTime,
|
|
|
- createTime = updateTime,
|
|
|
- knowledge_base_code=base_code
|
|
|
- )
|
|
|
- db.add(knowledge_file)
|
|
|
+ for fileName in body['fileNames']
|
|
|
+ ]
|
|
|
+ db.add_all(knowledge_files)
|
|
|
|
|
|
+ try:
|
|
|
db.commit()
|
|
|
return {
|
|
|
"code": 200,
|
|
|
"msg": "总结报告创建成功",
|
|
|
"status": "success",
|
|
|
- "data":
|
|
|
- [reportId]
|
|
|
-
|
|
|
+ "data": [reportId]
|
|
|
}
|
|
|
-
|
|
|
- # return {
|
|
|
- # "code": 200,
|
|
|
- # "msg": "操作成功",
|
|
|
- # "data": {
|
|
|
- # "reportId": reportId,
|
|
|
- # "status": "success",
|
|
|
- # "message": "总结报告创建成功"
|
|
|
- # }
|
|
|
- # }
|
|
|
-
|
|
|
- except json.JSONDecodeError:
|
|
|
- raise HTTPException(status_code=400, detail="Invalid JSON data")
|
|
|
except IntegrityError as e:
|
|
|
db.rollback()
|
|
|
- raise HTTPException(status_code=409, detail=f"Database error: {str(e)}")
|
|
|
+ return Response(content=f"Database error: {str(e)}", status_code=409)
|
|
|
except Exception as e:
|
|
|
db.rollback()
|
|
|
- print(e)
|
|
|
- raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
|
|
|
+ return Response(content=f"Internal server error: {str(e)}", status_code=500)
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
|
|
|
|
|
|
-@router.post('/select')
|
|
|
@router.get('/select')
|
|
|
-async def select_knowledge(request: Request, db: Session = Depends(get_db), pageNum: Optional[int] = None,
|
|
|
- pageSize: Optional[int] = None):
|
|
|
- # page_from_json = None
|
|
|
- try:
|
|
|
- # # 尝试从请求体中解析 JSON 数据
|
|
|
- json_data = await request.json()
|
|
|
-
|
|
|
- # 初始化分页参数
|
|
|
- page_from_json = json_data.get('pageNum') if json_data else None
|
|
|
- size_from_json = json_data.get('pageSize') if json_data else None
|
|
|
- except:
|
|
|
- page_from_json = None
|
|
|
-
|
|
|
- # 如果查询参数和请求体都存在,优先选择查询参数
|
|
|
- if pageNum is not None and pageSize is not None:
|
|
|
- # 使用查询参数
|
|
|
- page_to_use = pageNum
|
|
|
- size_to_use = pageSize
|
|
|
- elif page_from_json is not None and size_from_json is not None:
|
|
|
- # 使用请求体中的参数
|
|
|
- page_to_use = page_from_json
|
|
|
- size_to_use = size_from_json
|
|
|
- else:
|
|
|
- # 如果只有一个存在,使用存在的那个,否则抛出异常
|
|
|
- if pageNum is not None:
|
|
|
- page_to_use = pageNum
|
|
|
- elif page_from_json is not None:
|
|
|
- page_to_use = page_from_json
|
|
|
- else:
|
|
|
- raise HTTPException(status_code=400, detail="Page parameter is required")
|
|
|
-
|
|
|
- if pageSize is not None:
|
|
|
- size_to_use = pageSize
|
|
|
- elif size_from_json is not None:
|
|
|
- size_to_use = size_from_json
|
|
|
- else:
|
|
|
- raise HTTPException(status_code=400, detail="Size parameter is required")
|
|
|
-
|
|
|
- # 验证分页参数
|
|
|
- if size_to_use > 100:
|
|
|
- size_to_use = 100
|
|
|
-
|
|
|
- # 计算 offset
|
|
|
- offset = (page_to_use - 1) * size_to_use
|
|
|
-
|
|
|
- # 使用 ORM 查询并应用分页
|
|
|
- data = db.query(KnowledgeBase).offset(offset).limit(size_to_use).all()
|
|
|
-
|
|
|
- # 计算总条数
|
|
|
- total_count = db.query(func.count(KnowledgeBase.reportId)).scalar()
|
|
|
-
|
|
|
- # 计算总页数
|
|
|
- total_pages = (total_count // size_to_use) + (1 if total_count % size_to_use else 0)
|
|
|
-
|
|
|
- # 返回查询结果
|
|
|
- # result = {
|
|
|
- # "code": 200,
|
|
|
- # 'msg': '查询成功',
|
|
|
- # 'data': {
|
|
|
- # 'pages': total_pages,
|
|
|
- # 'total': total_count,
|
|
|
- # "currentPage": page_to_use,
|
|
|
- # "pageSize": size_to_use,
|
|
|
- # "rows": data
|
|
|
- # }
|
|
|
- # }
|
|
|
+async def select_knowledge(
|
|
|
+ db: Session = Depends(get_db),
|
|
|
+ sortBy: str = Query(..., description="排序字段"),
|
|
|
+ sortOrder: str = Query(..., description="排序顺序"),
|
|
|
+ pageNum: int = Query(1, gt=0, description="页码"),
|
|
|
+ pageSize: int = Query(10, gt=0, le=100, description="每页大小"),
|
|
|
+ eventType: str = Query(None, description="事件类型"),
|
|
|
+ publishDateRange: str = Query(None, description="发布日期范围"),
|
|
|
+ query: str = Query(None, description="查询关键字")
|
|
|
+):
|
|
|
+ data_query = db.query(KnowledgeBase)
|
|
|
+ data_query = data_query.filter(KnowledgeBase.del_flag != '2')
|
|
|
+
|
|
|
+ if eventType:
|
|
|
+ data_query = data_query.filter(KnowledgeBase.eventType == eventType)
|
|
|
+
|
|
|
+ if publishDateRange:
|
|
|
+ start_date, end_date = publishDateRange.split('-')
|
|
|
+ data_query = data_query.filter(KnowledgeBase.publishDate.between(start_date, end_date))
|
|
|
+
|
|
|
+ if query:
|
|
|
+ search_fields = [getattr(KnowledgeBase, field) for field in ('reportName', 'publishingUnit', 'reportId') if hasattr(KnowledgeBase, field)]
|
|
|
+ search_conditions = [field.like(f'%{query}%') for field in search_fields]
|
|
|
+ data_query = data_query.filter(or_(*search_conditions))
|
|
|
+
|
|
|
+ if hasattr(KnowledgeBase, sortBy):
|
|
|
+ sort_attr = getattr(KnowledgeBase, sortBy)
|
|
|
+ data_query = data_query.order_by(sort_attr.asc() if sortOrder == 'asc' else sort_attr.desc())
|
|
|
+
|
|
|
+ offset = (pageNum - 1) * pageSize
|
|
|
+ data_query = data_query.offset(offset).limit(pageSize)
|
|
|
+
|
|
|
+ fields = ['reportId', 'reportName', 'eventType', 'publishDate', 'publishingUnit', 'summary', 'subject', 'notificationType']
|
|
|
+ entities = [getattr(KnowledgeBase, field) for field in fields if hasattr(KnowledgeBase, field)]
|
|
|
+ data = data_query.with_entities(*entities).offset(offset).limit(pageSize).all()
|
|
|
+
|
|
|
+ result_items = []
|
|
|
+ for item in data:
|
|
|
+ item_dict = {field: getattr(item, field) for field in fields}
|
|
|
+ result_items.append(item_dict)
|
|
|
+
|
|
|
+ total_count = data_query.count()
|
|
|
+
|
|
|
result = {
|
|
|
"code": 200,
|
|
|
'msg': '查询成功',
|
|
|
- 'pages': total_pages,
|
|
|
+ 'pages': (total_count + pageSize - 1) // pageSize,
|
|
|
'total': total_count,
|
|
|
- "currentPage": page_to_use,
|
|
|
- "pageSize": size_to_use,
|
|
|
- 'data': data
|
|
|
+ "currentPage": pageNum,
|
|
|
+ "pageSize": pageSize,
|
|
|
+ 'data': result_items
|
|
|
}
|
|
|
+
|
|
|
return result
|
|
|
|
|
|
|
|
|
-@router.post('/detail')
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
@router.get('/detail')
|
|
|
-async def get_knowledge_detail(request: Request, db: Session = Depends(get_db), reportID: Optional[str] = None):
|
|
|
- # 尝试从请求体中解析 JSON 数据
|
|
|
- report_id_body=None
|
|
|
- try:
|
|
|
- data = await request.json()
|
|
|
- report_id_body = data.get('reportID') if data else None
|
|
|
- except:
|
|
|
- print('报错')
|
|
|
- pass
|
|
|
-
|
|
|
- # 确定要使用的 report_id
|
|
|
- report_id_to_use = reportID or report_id_body
|
|
|
- print(reportID)
|
|
|
- print(report_id_body)
|
|
|
-
|
|
|
- # 如果没有提供 report_id,则抛出异常
|
|
|
+async def get_knowledge_detail(db: Session = Depends(get_db), reportID: Optional[str] = Query(None, description="报告ID")):
|
|
|
+ report_id_to_use = reportID
|
|
|
+
|
|
|
if not report_id_to_use:
|
|
|
raise HTTPException(status_code=400, detail="Missing required parameter 'reportID'")
|
|
|
|
|
|
- # 查询 KnowledgeBase
|
|
|
kb_entry = db.query(KnowledgeBase).filter(KnowledgeBase.reportId == report_id_to_use).first()
|
|
|
if not kb_entry:
|
|
|
raise HTTPException(status_code=404, detail="No knowledge base found for the given report ID")
|
|
|
|
|
|
- # 查询相关的 KnowledgeFile
|
|
|
kf_entries = db.query(KnowledgeFile).filter(KnowledgeFile.knowledge_base_code == kb_entry.base_code).all()
|
|
|
|
|
|
- # 准备返回的数据
|
|
|
+ files = [
|
|
|
+ {"content": kf.file_name, "url": f'http://127.0.0.1:9988/api/file/download/{kf.file_name}'}
|
|
|
+ for kf in kf_entries
|
|
|
+ ]
|
|
|
+
|
|
|
result = {
|
|
|
"code": 200,
|
|
|
"msg": "查询成功",
|
|
@@ -292,10 +166,7 @@ async def get_knowledge_detail(request: Request, db: Session = Depends(get_db),
|
|
|
"publishingUnit": kb_entry.publishingUnit,
|
|
|
"summary": kb_entry.summary,
|
|
|
"notificationType": kb_entry.notificationType,
|
|
|
- "file": [
|
|
|
- {"content": kf.file_name, "url": f'http://127.0.0.1:9988/api/file/download/{kf.file_name}'}
|
|
|
- for kf in kf_entries
|
|
|
- ]
|
|
|
+ "file": files
|
|
|
}]
|
|
|
}
|
|
|
|