chemical_company.py 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. from fastapi import APIRouter, Request, Depends, Query, HTTPException, status
  4. from common.security import valid_access_token
  5. from sqlalchemy.orm import Session
  6. from sqlalchemy.sql import func
  7. from common.auth_user import *
  8. from sqlalchemy import text
  9. from pydantic import BaseModel
  10. from common.BigDataCenterAPI import *
  11. from database import get_db
  12. from typing import List
  13. from models import *
  14. from utils import *
  15. from utils.spatial import *
  16. from utils.rainfall_util import *
  17. import json
  18. import traceback
  19. from jobs.rainfall_conditions_job import get_stcd_data
  20. from datetime import datetime,timedelta
  21. router = APIRouter()
  22. @router.get("/list")
  23. async def get_list(
  24. area_name: str = Query(None),
  25. company_type:str = Query(None),
  26. keyword: str= Query(None),
  27. history_time:int = Query(None),
  28. future_time:int = Query(None),
  29. db: Session = Depends(get_db),
  30. page: int = Query(1, gt=0, description='页码'),
  31. pageSize: int = Query(10, gt=0, description='每页条目数量')
  32. ):
  33. try:
  34. # 计算 OFFSET 值
  35. offset = (page - 1) * pageSize
  36. # 构造基础查询
  37. base_sql = "SELECT * FROM sharedb.chemical_company"
  38. count_sql = "SELECT COUNT(*) FROM sharedb.chemical_company"
  39. # 添加 WHERE 条件
  40. conditions = []
  41. params = {}
  42. if area_name:
  43. conditions.append("area = :area_name")
  44. params['area_name'] = area_name
  45. if keyword:
  46. conditions.append("`company_name` LIKE :keyword")
  47. params['keyword'] = f"%{keyword}%"
  48. if company_type:
  49. conditions.append("`company_type` LIKE :company_type")
  50. params['company_type'] = f"%{company_type}%"
  51. if conditions:
  52. base_sql += " WHERE " + " AND ".join(conditions)
  53. count_sql += " WHERE " + " AND ".join(conditions)
  54. count_query = text(count_sql) #.bindparams(**params)
  55. # 执行统计查询并获取总数据量
  56. total = db.execute(count_query,params).scalar()
  57. # 添加 LIMIT 和 OFFSET
  58. paginated_sql = f"{base_sql} LIMIT :limit OFFSET :offset"
  59. params['limit'] = pageSize
  60. params['offset'] = offset
  61. # 构造查询对象
  62. paginated_query = text(paginated_sql) #.bindparams(**params)
  63. # 执行分页查询并获取结果
  64. result = db.execute(paginated_query,params).fetchall()
  65. # 将结果转换为rain_pits.py字典列表
  66. result_list = []
  67. for row in result:
  68. data = dict(row)
  69. data['have_video'] = False
  70. data['video_unit_indexcode'] = ''
  71. company_name = data['company_name']
  72. video_unit_info = unitName_get_video_region_info(db,company_name)
  73. if video_unit_info:
  74. data['have_video'] = True
  75. data['video_unit_indexcode'] = video_unit_info.indexCode
  76. data['weather_warning_type'] = '暴雨预警'
  77. data['weather_warninglevel'] = '3'
  78. if history_time:
  79. real_code = get_real_code(db,data['longitude'],data['latitude'])
  80. rainfall = get_rainfall(real_code,history_time,db)
  81. data['rainfall'] = rainfall
  82. if future_time:
  83. data['rainfall'] = 0
  84. result_list.append(data)
  85. return {
  86. "code": 200,
  87. "msg": "操作成功",
  88. "data": result_list,
  89. "total": total,
  90. "page": page,
  91. "pageSize": pageSize,
  92. "totalPages": (total + pageSize - 1) // pageSize
  93. }
  94. except Exception as e:
  95. traceback.print_exc()
  96. raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
  97. @router.get('/get_video_list')
  98. async def get_video_list(
  99. video_unit_indexcode: str = Query(None, description='所属单位索引码'),
  100. db: Session = Depends(get_db),
  101. page: int = Query(1, gt=0, description='页码'),
  102. pageSize: int = Query(10, gt=0, description='每页条目数量')
  103. ):
  104. # 大屏左下角视频及更多视频
  105. try:
  106. query = db.query(TPVideoInfo)
  107. if video_unit_indexcode:
  108. query = query.filter(TPVideoInfo.regionPath.like(f'%{video_unit_indexcode}%'))
  109. else:
  110. return {
  111. "code": 200,
  112. "msg": "操作成功",
  113. "data": None}
  114. total_items = query.count()
  115. # query = query.order_by()
  116. videos = query.offset(pageSize * (page - 1)).limit(pageSize).all()
  117. data = []
  118. for row in videos:
  119. # row = row[0]
  120. # print(type(row),row[0])
  121. data.append({
  122. "name":row.name,
  123. "video_code": row.gbIndexCode,
  124. "status":row.status,
  125. "statusName":row.statusName,
  126. "regionPath":row.regionPath,
  127. "installPlace":row.installPlace,
  128. "cameraTypeName":row.cameraTypeName,
  129. "cameraType":row.cameraType
  130. })
  131. return {
  132. "code": 200,
  133. "msg": "操作成功",
  134. "data": data,
  135. "total": total_items,
  136. "page": page,
  137. "pageSize": pageSize,
  138. "totalPages": (total_items + pageSize - 1) // pageSize
  139. }
  140. except Exception as e:
  141. # 处理异常
  142. traceback.print_exc()
  143. raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e))