123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464 |
- #!/usr/bin/env python3
- # -*- coding: utf-8 -*-
- from fastapi import APIRouter, Request, Depends, Query, HTTPException, status,WebSocket,WebSocketDisconnect
- from common.security import valid_access_token,valid_websocket_token
- from fastapi.responses import JSONResponse,StreamingResponse
- from common.db import db_czrz
- from sqlalchemy.orm import Session
- from sqlalchemy.sql import func
- from common.auth_user import *
- from sqlalchemy import text
- from pydantic import BaseModel
- from common.BigDataCenterAPI import *
- from database import get_db
- from typing import List
- from models import *
- from utils import *
- from utils.spatial import *
- from utils.ry_system_util import *
- from utils.resource_provision_util import *
- from common.websocketManager import *
- import json
- import traceback
- import xlrd
- import os
- # 目录在文档上传接口写死
- UPLOAD_mergefile_PATH = '/data/upload/mergefile'
- router = APIRouter()
- @router.post("/create")
- async def create_pattern(
- user_id=Depends(valid_access_token),
- body = Depends(remove_xss_json),
- db: Session = Depends(get_db)
- ):
- try:
- new_type = ResourceProvisionWarehouseInfo(
- warehouse_id = new_guid(),
- warehouse_name=body['warehouse_name'],
- status=body['status'],
- contact_person=body['contact_person'],
- contact_phone=body['contact_phone'],
- address=body['address'],
- type=body['type'],
- level=body['level'],
- storage_dept_id=body['storage_dept_id'],
- storage_dept_name='', #body['storage_dept_name']
- area_name=body['area_name'],
- longitude=body['longitude'],
- latitude=body['latitude'],
- area =body['area'],
- remark=body['remark'],
- create_by = user_id
- )
- db.add(new_type)
- db.commit()
- return {"code": 200, "msg": "创建成功", "data": None}
- except Exception as e:
- traceback.print_exc()
- raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
- @router.put("/update/{id}")
- async def update_pattern(
- id :str ,
- user_id=Depends(valid_access_token),
- body=Depends(remove_xss_json),
- db: Session = Depends(get_db)
- ):
- try:
- update_warehouse = warehouse_id_get_warehouse_info(db,id)
- if not update_warehouse:
- return JSONResponse(status_code=404,content={"code":404,"msg":"warehouse not found"})
- update_warehouse.warehouse_name = body['warehouse_name']
- update_warehouse.status = body['status']
- update_warehouse.contact_person = body['contact_person']
- update_warehouse.contact_phone = body['contact_phone']
- update_warehouse.address = body['address']
- update_warehouse.type = body['type']
- update_warehouse.level = body['level']
- update_warehouse.storage_dept_id = body['storage_dept_id']
- update_warehouse.storage_dept_name = body['storage_dept_name']
- update_warehouse.area_name = body['area_name']
- update_warehouse.longitude = body['longitude']
- update_warehouse.latitude = body['latitude']
- update_warehouse.area = body['area']
- update_warehouse.remark = body['remark']
- update_warehouse.update_by = user_id
- db.commit()
- return {"code": 200, "msg": "更新成功"}
- except Exception as e:
- traceback.print_exc()
- raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
- @router.get("/info/{id}")
- async def get_pattern_info(
- id: str,
- user_id=Depends(valid_access_token),
- db: Session = Depends(get_db)
- ):
- try:
- info = warehouse_id_get_warehouse_info(db,id)
- if not info:
- return JSONResponse(status_code=404,content={"code":404,"msg":"warehouse not found"})
- dept_info = dept_id_get_dept_info(db,info.storage_dept_id)
- data = {"warehouse_id": info.warehouse_id,
- "warehouse_name": info.warehouse_name,
- "status": info.status,
- "contact_person": info.contact_person,
- "contact_phone": info.contact_phone,
- "address": info.address,
- "remark": info.remark,
- "type": info.type,
- "level": info.level,
- "storage_dept_id": info.storage_dept_id,
- "storage_dept_name": dept_info.dept_name,
- "area_name": info.area_name,
- "longitude": info.longitude,
- "latitude": info.latitude,
- "create_by": info.create_by,
- "area":info.area,
- "create_time":info.create_time}
- return {"code": 200, "msg": "获取成功", "data": data}
- except Exception as e:
- traceback.print_exc()
- raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
- @router.get("/list")
- async def get_pattern_list(
- # name: str = Query(None, description='名称'),
- user_id=Depends(valid_access_token),
- page: int = Query(1, gt=0, description='页码'),
- pageSize: int = Query(None, gt=0, description='每页条目数量'),
- db: Session = Depends(get_db)
- ):
- try:
- query = db.query(ResourceProvisionWarehouseInfo)
- query = query.filter_by(del_flag='0')
- # if name:
- # query = query.filter(ResourceProvisionWarehouseInfo.material_category_name.like(f'%{name}%'))
- total_items = query.count()
- if pageSize is None:
- pageSize=total_items
- # 排序
- query = query.order_by(ResourceProvisionWarehouseInfo.create_time.desc())
- # 执行分页查询
- lists = query.offset((page - 1) * pageSize).limit(pageSize).all()
- data = [ ]
- for info in lists:
- dept_info = dept_id_get_dept_info(db, info.storage_dept_id)
- data.append({"warehouse_id": info.warehouse_id,
- "warehouse_name": info.warehouse_name,
- "status": info.status,
- "contact_person": info.contact_person,
- "contact_phone": info.contact_phone,
- "address": info.address,
- "remark": info.remark,
- "type": info.type,
- "level": info.level,
- "storage_dept_id": info.storage_dept_id,
- "storage_dept_name": dept_info.dept_name,
- "area_name": info.area_name,
- "longitude": info.longitude,
- "latitude": info.latitude,
- "create_by": info.create_by,
- "area":info.area,
- "create_time":info.create_time})
- return {"code": 200, "msg": "查询成功", "data": data,
- "total": total_items,
- "page": page,
- "pageSize": pageSize,
- "totalPages": (total_items + pageSize - 1) // pageSize
- }
- except Exception as e:
- traceback.print_exc()
- raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
- @router.get("/export")
- async def export_data(
- request: Request,
- # name: str = Query(None, description='名称'),
- user_id=Depends(valid_access_token),
- auth_user: AuthUser = Depends(find_auth_user),
- db: Session = Depends(get_db)
- ):
- try:
- query = db.query(ResourceProvisionWarehouseInfo)
- query = query.filter_by(del_flag='0')
- # if name:
- # query = query.filter(ResourceProvisionWarehouseInfo.material_category_name.like(f'%{name}%'))
- # 排序
- query = query.order_by(ResourceProvisionWarehouseInfo.create_time.desc())
- # 执行分页查询
- lists = query.all()
- data = []
- for info in lists:
- dept_info = dept_id_get_dept_info(db, info.storage_dept_id)
- data.append({"仓库id": info.warehouse_id,
- "仓库": info.warehouse_name,
- "状态": info.status,
- "联系人": info.contact_person,
- "联系电话": info.contact_phone,
- "地址": info.address,
- "备注": info.remark,
- "类型": info.type,
- "等级": info.level,
- "物资保管部门id": info.storage_dept_id,
- "物资保管部门名称": dept_info.dept_name,
- "地区": info.area_name,
- "经度": info.longitude,
- "纬度": info.latitude,
- "创建者": info.create_by,
- "占地面积(平方米)":info.area,
- "数据创建时间":info.create_time})
- # 返回结果
- import pandas as pd
- from io import BytesIO
- # 将查询结果转换为 DataFrame
- df = pd.DataFrame(data)
- # 将 DataFrame 导出为 Excel 文件
- output = BytesIO()
- with pd.ExcelWriter(output, engine='openpyxl') as writer:
- df.to_excel(writer, index=False)
- # 设置响应头
- output.seek(0)
- from urllib.parse import quote
- encoded_filename = f'仓库明细{datetime.now().strftime("%Y%m%d%H%mi%s")}.xlsx'
- encoded_filename = quote(encoded_filename, encoding='utf-8')
- headers = {
- 'Content-Disposition': f'attachment; filename*=UTF-8\'\'{encoded_filename}',
- 'Content-Type': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
- }
- db_czrz.log(db, auth_user, "物资储备管理", f"物资储备管理仓库明细导出数据成功", request.client.host)
- # 返回文件流
- return StreamingResponse(output, headers=headers)
- except Exception as e:
- traceback.print_exc()
- return JSONResponse(status_code=500, content={"code": 500, "msg": f"Internal server error: {str(e)}"})
- @router.delete("/delete/{id}")
- async def delete_pattern(
- id: str,
- user_id=Depends(valid_access_token),
- db: Session = Depends(get_db)
- ):
- try:
- # 检查图案是否存在
- info = warehouse_id_get_warehouse_info(db, id)
- if not info:
- return JSONResponse(status_code=404, content={"code": 404, "msg": "warehouse not found"})
- info.del_flag='2'
- db.commit()
- return {"code": 200, "msg": "删除成功"}
- except Exception as e:
- traceback.print_exc()
- raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
- @router.post("/import_data")
- async def import_data(
- body = Depends(remove_xss_json),
- user_id=Depends(valid_access_token), db: Session = Depends(get_db)):
- # 获取表结构
- import os,openpyxl
- schema_name = "mmyjhd"
- table_name = "resource_provision_warehouse_info"
- columns = [{"column_name":"warehouse_name","column_comment":"仓库名称"},
- {"column_name":"status","column_comment":"状态1启用0关闭"},
- {"column_name":"contact_person","column_comment":"联系人"},
- {"column_name":"contact_phone","column_comment":"联系电话"},
- {"column_name":"address","column_comment":"地址"},
- {"column_name":"type","column_comment":"类型"},
- {"column_name":"level","column_comment":"等级"},
- {"column_name":"storage_dept_name","column_comment":"物资保管部门名称"},
- {"column_name":"area_name","column_comment":"地区(区县)"},
- {"column_name":"longitude","column_comment":"经度"},
- {"column_name":"latitude","column_comment":"纬度"},
- {"column_name":"area","column_comment":"占地面积(平方米)"},
- {"column_name":"remark","column_comment":"备注"}]
- filename = body['filename']
- if '../' in filename or '/' in filename:
- return JSONResponse(status_code=400, content={'code': 400, "msg": '警告:禁止篡改文件路径'})
- file_path = f'/data/upload/mergefile/uploads/{filename}'
- if not os.path.exists(file_path):
- return JSONResponse(status_code=404, content={'code': 404, 'msg': f"文件不存在"})
- # print("文件不存在,请检查路径!")
- # 读取 Excel 文件
- try:
- workbook = openpyxl.load_workbook(file_path)
- sheet = workbook.active
- data = pd.read_excel(file_path, header=1)
- data = data.to_dict(orient='records')
- except Exception as e:
- traceback.print_exc()
- return JSONResponse(status_code=400, content={'code': 400, 'msg': f"接口发生错误:{e}"})
- # raise HTTPException(status_code=400, detail="Invalid Excel file")
- # 获取字段名和字段备注名
- column_names = [col["column_name"] for col in columns if col["column_name"]!='id']
- column_comments = [col["column_comment"] for col in columns if col["column_name"]!='id']
- # 检查第一行是否为字段备注名
- first_row = [cell.value for cell in sheet[1]]
- if first_row != column_comments:
- print("接口发生错误:Excel columns do not match the expected columns")
- return JSONResponse(status_code=400, content={'code': 400, 'msg': f"接口发生错误:Excel columns do not match the expected columns"})
- # raise HTTPException(status_code=400, detail="Excel columns do not match the expected columns")
- # 检查第二行是否为字段名
- second_row = [cell.value for cell in sheet[2]]
- if second_row != column_names:
- print("接口发生错误:Excel columns do not match the expected columns")
- return JSONResponse(status_code=400,
- content={'code': 400, 'msg': f"接口发生错误:Excel columns do not match the expected columns"})
- # raise HTTPException(status_code=400, detail="Excel columns do not match the expected columns")
- # 将数据插入到数据库
- try:
- # insert_query = text(
- # f"INSERT INTO `{schema_name}`.`{table_name}` ({', '.join(column_names)}) VALUES ({', '.join([':' + col for col in column_names])})")
- # for row in sheet.iter_rows(min_row=3, values_only=True):
- for row in data:
- infotype = row['type']
- 'warehouse_type'
- 'warehouse_level'
- new_type = ResourceProvisionWarehouseInfo(
- warehouse_id=new_guid(),
- warehouse_name=row['warehouse_name'],
- status=row['status'],
- contact_person=row['contact_person'],
- contact_phone=row['contact_phone'],
- address=row['address'],
- type=infotype,
- level=row['level'],
- storage_dept_id=row['storage_dept_id'],
- storage_dept_name=row['storage_dept_name'], # body['storage_dept_name']
- area_name=row['area_name'],
- longitude=row['longitude'],
- latitude=row['latitude'],
- area=row['area'],
- remark=row['remark'],
- create_by=user_id
- )
- db.add(new_type)
- db.commit()
- # db.execute(insert_query, dict(zip(column_names, row)))
- # db.commit()
- return {"code":200,"msg": "Data imported successfully"}
- except Exception as e:
- db.rollback()
- traceback.print_exc()
- return JSONResponse(status_code=500,
- content={'code': 500, 'msg': f"接口发生错误:{e}"})
- # 导入
- @router.post('/import')
- async def import_doc(
- request: Request,
- db: Session = Depends(get_db),
- body = Depends(remove_xss_json),
- user_id = Depends(valid_access_token)
- ):
- # print(body)
- try:
- filename = body['filename']
- if len(filename) == 0:
- raise Exception()
- file = filename[0]
- url = file['url']
- file_path = f"{UPLOAD_mergefile_PATH}/uploads/{url}"
- file_path = os.path.abspath(file_path)
- print(file_path)
- book = xlrd.open_workbook(file_path)
- sheet = book.sheet_by_index(0)
- data = []
- '''
- for i in range(9, sheet.nrows):
- # 预案名称
- plan_name = sheet.cell(i, 0).value
- # 一级目录
- title1 = sheet.cell(i, 1).value
- # 二级目录
- title2 = sheet.cell(i, 2).value
- # 三级目录
- title3 = sheet.cell(i, 3).value
- # 正文
- content = sheet.cell(i, 4).value
- if len(plan_name) < 1 and len(title1) < 1 and len(title2) < 1 and len(title3) < 1 and len(content) < 1 :
- break
- data.append({
- 'plan_name': plan_name,
- 'title1': title1,
- 'title2': title2,
- 'title3': title3,
- 'content': content,
- })
- if len(data) > 0:
- db.query(EmergencyDoc).filter(EmergencyDoc.plan_id == plan_id).delete()
- db.commit()
- title1 = ''
- content = ''
- docs = []
- for n in data:
- if n['title1'] != '':
- if len(docs) > 0:
- add_doc_1(db, title1, content, docs, plan_id)
- docs = []
- title1 = n['title1']
- content = n['content']
- if n['title2'] != '':
- docs.append(n)
- continue
- docs.append(n)
- if len(docs) > 0:
- add_doc_1(db, title1, content, docs, plan_id)
- '''
- return {
- 'code': 200,
- 'msg': '导入成功'
- }
- except Exception:
- traceback.print_exc()
- return {
- 'code': 500,
- 'msg': '导入发生异常'
- }
|