|
@@ -16,5 +16,102 @@ from common.security import valid_access_token
|
|
|
import traceback
|
|
|
from utils import *
|
|
|
from datetime import datetime, timedelta
|
|
|
+import os
|
|
|
|
|
|
-router = APIRouter()
|
|
|
+router = APIRouter()
|
|
|
+
|
|
|
+# 上报伤亡情况
|
|
|
+@router.post("/creat_event_briefing")
|
|
|
+async def creat_event_briefing(
|
|
|
+ request: Request,
|
|
|
+ body = Depends(remove_xss_json),
|
|
|
+ db: Session = Depends(get_db),
|
|
|
+ user_id = Depends(valid_access_token)
|
|
|
+):
|
|
|
+ fileList = []
|
|
|
+ if 'fileList' in body:
|
|
|
+ fileList = body['fileList']
|
|
|
+ del body['fileList']
|
|
|
+
|
|
|
+ longitude = ''
|
|
|
+ latitude = ''
|
|
|
+ if 'lnglat' in body:
|
|
|
+ longitude = body['lnglat'][0]
|
|
|
+ latitude = body['lnglat'][1]
|
|
|
+ del body['lnglat']
|
|
|
+
|
|
|
+ new_briefing = EventBriefing(**body, del_flag='0', recorded_by=user_id, create_time=datetime.now(), longitude=longitude, latitude=latitude)
|
|
|
+ db.add(new_briefing)
|
|
|
+ db.commit()
|
|
|
+ db.refresh(new_briefing)
|
|
|
+
|
|
|
+ if len(fileList) > 0:
|
|
|
+ event_files = [
|
|
|
+ EventFile(
|
|
|
+ event_id=new_briefing.id,
|
|
|
+ file_name=fileName["name"], # 使用 fileName["name"] 作为文件名
|
|
|
+ file_path=f'/data/upload/mergefile/uploads/{fileName["url"]}',
|
|
|
+ file_size=os.path.getsize(f'/data/upload/mergefile/uploads/{fileName["url"]}'),
|
|
|
+ storage_file_name=fileName["url"],
|
|
|
+ foreign_key=new_briefing.event_id,
|
|
|
+ from_scenario="event_briefing",
|
|
|
+ update_time=datetime.now(),
|
|
|
+ create_time=datetime.now(),
|
|
|
+ del_flag='0'
|
|
|
+ )
|
|
|
+ for fileName in fileList # body['fileList'] 现在是一个包含对象的数组,每个对象都有 'name' 和 'url' 属性
|
|
|
+ ]
|
|
|
+ db.add_all(event_files)
|
|
|
+ db.commit()
|
|
|
+
|
|
|
+ return {
|
|
|
+ "code": 200,
|
|
|
+ "msg": "保存事件简报成功"
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+@router.get('/event_briefing/list')
|
|
|
+async def get_event_list(
|
|
|
+ event_id: str,
|
|
|
+ page: int = Query(1, gt=0, description='页码'),
|
|
|
+ page_size: int = Query(10, gt=0, description='pageSize'),
|
|
|
+ db: Session = Depends(get_db)
|
|
|
+):
|
|
|
+ try:
|
|
|
+ # 应用查询条件
|
|
|
+ where = and_(EventBriefing.del_flag == '0', EventBriefing.event_id == event_id)
|
|
|
+ # 计算总条目数
|
|
|
+ q = db.query(func.count(EventBriefing.id))
|
|
|
+ q = q.filter(where)
|
|
|
+ total = q.scalar()
|
|
|
+
|
|
|
+ # 执行分页查询
|
|
|
+ q = db.query(EventBriefing)
|
|
|
+ q = q.filter(where)
|
|
|
+ rows = q.order_by(EventBriefing.id.desc()).offset((page - 1) * page_size).limit(page_size).all()
|
|
|
+ data = [
|
|
|
+ {
|
|
|
+ "event_id": row.event_id,
|
|
|
+ "address": row.address,
|
|
|
+ "longitude": row.longitude,
|
|
|
+ "latitude": row.latitude,
|
|
|
+ "report_dept": row.report_dept,
|
|
|
+ "briefing_content": row.briefing_content,
|
|
|
+ "create_time": get_datetime_str(row.create_time),
|
|
|
+ "files": db_event_management.get_briefing_file_list(db, row.id)
|
|
|
+ }
|
|
|
+ for row in rows
|
|
|
+ ]
|
|
|
+
|
|
|
+ # 返回结果
|
|
|
+ return {
|
|
|
+ "code": 200,
|
|
|
+ "msg": "查询成功",
|
|
|
+ "data": data,
|
|
|
+ "total": total
|
|
|
+ }
|
|
|
+
|
|
|
+ except Exception as e:
|
|
|
+ # 处理异常
|
|
|
+ traceback.print_exc()
|
|
|
+ raise HTTPException(status_code=500, detail=str(e))
|