__init__.py 6.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. from fastapi import APIRouter, Request, Depends, Query, HTTPException, status
  4. from fastapi.responses import JSONResponse,StreamingResponse
  5. from common.db import db_czrz
  6. from common.security import valid_access_token
  7. from sqlalchemy.orm import Session
  8. from sqlalchemy.sql import func
  9. from common.auth_user import *
  10. from sqlalchemy import text
  11. from pydantic import BaseModel
  12. from common.BigDataCenterAPI import *
  13. from database import get_db
  14. from typing import List
  15. from models import *
  16. from utils import *
  17. from utils.spatial import *
  18. import json
  19. import traceback
  20. from jobs.rainfall_conditions_job import get_stcd_data
  21. from datetime import datetime,timedelta
  22. from .rain_pits import router as rain_pits_router
  23. from .dzzh import router as dzzh_router
  24. from .chemical_company import router as chemical_company_router
  25. router = APIRouter()
  26. router.include_router(rain_pits_router, prefix="/rain_pits")
  27. router.include_router(dzzh_router, prefix="/dzzh")
  28. router.include_router(chemical_company_router, prefix="/chemical_company")
  29. @router.get("/info/{code}")
  30. async def get_pattern_info(
  31. code: str,
  32. db: Session = Depends(get_db)
  33. ):
  34. rainfulldata = get_stcd_data(code,12)
  35. update_time_list = []
  36. rainfall_history = []
  37. rainfall_future = []
  38. cumulative_rainfall = []
  39. for i in rainfulldata[::-1]:
  40. area_name = i['F3070220000034_000018002']
  41. update_time_list.append( datetime.strptime(i['F3070220000034_000018006'], "%Y-%m-%d %H:%M:%S"))
  42. create_time = datetime.strptime(i['F3070220000034_000018004'], "%Y-%m-%d %H:%M:%S")
  43. hour = create_time.strftime("%H")
  44. value = i['F3070220000034_000018005']
  45. rainfall_history.append({"hour":hour,"value":value})
  46. update_time_max = max(update_time_list).strftime("%Y-%m-%d %H:%M:%S")
  47. for t in range(1,13):
  48. future_time = create_time+ timedelta(hours=t)
  49. hour = future_time.strftime("%H")
  50. value = 0
  51. rainfall_future.append({"hour":hour,"value":value})
  52. rainfall = 0
  53. for cumulative in rainfall_history+rainfall_future:
  54. rainfall += cumulative['value']
  55. cumulative_rainfall.append({"hour":cumulative['hour'],"value":rainfall})
  56. return {
  57. "code": 200, "msg": "获取成功", "data":{"areaName":area_name,"updateTime":update_time_max,"rainfallHistory":rainfall_history,"rainfallFuture":rainfall_future,"cumulativeRainfall":cumulative_rainfall}
  58. }
  59. @router.get("/gdyl/info/{code}")
  60. async def get_pattern_info(
  61. code: str,
  62. db: Session = Depends(get_db)
  63. ):
  64. rainfulldata = get_stcd_data(code,25)
  65. update_time_list = []
  66. rainfall_history = []
  67. rainfall_future = []
  68. # cumulative_rainfall = []
  69. num = 24
  70. for i in rainfulldata[::-1]:
  71. area_name = i['F3070220000034_000018002']
  72. update_time_list.append( datetime.strptime(i['F3070220000034_000018006'], "%Y-%m-%d %H:%M:%S"))
  73. create_time = datetime.strptime(i['F3070220000034_000018004'], "%Y-%m-%d %H:%M:%S")
  74. # hour = create_time.strftime("%H")
  75. value = i['F3070220000034_000018005']
  76. if num in [24,12,6,3,1,0]:
  77. rainfall_history.append({"hour":-num,"value":value})
  78. num-=1
  79. update_time_max = max(update_time_list).strftime("%Y-%m-%d %H:%M:%S")
  80. # for t in range(72):
  81. # future_time = create_time+ timedelta(hours=t)
  82. # hour = future_time.strftime("%H")
  83. # value = 0
  84. rainfall_future.append({"hour":1,"value":0})
  85. rainfall_future.append({"hour":3,"value":0})
  86. rainfall_future.append({"hour":24,"value":1})
  87. rainfall_future.append({"hour":48,"value":4})
  88. rainfall_future.append({"hour":72,"value":2})
  89. rainfall = 0
  90. # for cumulative in rainfall_history+rainfall_future:
  91. # rainfall += cumulative['value']
  92. # cumulative_rainfall.append({"hour":cumulative['hour'],"value":rainfall})
  93. return {
  94. "code": 200, "msg": "获取成功", "data":{"areaName":area_name,"updateTime":update_time_max,"rainfallHistory":rainfall_history,"rainfallFuture":rainfall_future}#,"cumulativeRainfall":cumulative_rainfall}
  95. }
  96. @router.get('/get_rainfall_range/export')
  97. async def get_inspection_task_list(
  98. request: Request,
  99. sort: str = Query('desc'),
  100. timeOption: str = Query('24'),
  101. area: str = Query(''),
  102. township: str = Query(''),
  103. db: Session = Depends(get_db),
  104. auth_user: AuthUser = Depends(find_auth_user),
  105. user_id = Depends(valid_access_token)
  106. ):
  107. try:
  108. sql=f"""SELECT ROW_NUMBER() OVER ( ORDER BY T2.rainfall {sort}) AS `序号`,T1.area as `区县`,T1.township as `镇街`,T1.address as `站点地质`,T1.area_name as `站点名称`,T2.rainfall as `雨量`,T1.`code` as `站点编号` FROM sharedb.govdata_real_time_address T1 LEFT JOIN (select `latest_data`.`code` AS `code`,sum(`latest_data`.`rainfall`) AS `rainfall` from (select `govdata_rain_data_info`.`code` AS `code`,`govdata_rain_data_info`.`area_name` AS `area_name`,`govdata_rain_data_info`.`address` AS `address`,`govdata_rain_data_info`.`create_time` AS `create_time`,`govdata_rain_data_info`.`rainfall` AS `rainfall`,`govdata_rain_data_info`.`update_time` AS `update_time`,row_number() OVER (PARTITION BY `govdata_rain_data_info`.`code` ORDER BY `govdata_rain_data_info`.`create_time` desc ) AS `rn` from sharedb.`govdata_rain_data_info`) `latest_data` where ((`latest_data`.`rn` <= '{timeOption}') and `latest_data`.`code` in (select `govdata_real_time_address`.`code` from sharedb.`govdata_real_time_address`)) group by `latest_data`.`code` order by `rainfall` desc) T2 on T1.code=T2.code where CASE
  109. WHEN '{area}'<>'' THEN
  110. T1.area='{area}'
  111. ELSE
  112. 1=1
  113. END and CASE
  114. WHEN '{township}'<>'' THEN
  115. T1.township='{township}'
  116. ELSE
  117. 1=1
  118. END and T2.rainfall>0 ORDER BY T2.rainfall {sort}"""
  119. da = db.execute(sql).fetchall()
  120. outlist = [dict(row) for row in da]
  121. # 返回结果
  122. import pandas as pd
  123. from io import BytesIO
  124. # 将查询结果转换为 DataFrame
  125. df = pd.DataFrame(outlist)
  126. # 将 DataFrame 导出为 Excel 文件
  127. output = BytesIO()
  128. with pd.ExcelWriter(output, engine='openpyxl') as writer:
  129. df.to_excel(writer, index=False)
  130. # 设置响应头
  131. output.seek(0)
  132. from urllib.parse import quote
  133. encoded_filename = f'雨量排行榜导出{datetime.now().strftime("%Y%m%d%H%mi%s")}.xlsx'
  134. encoded_filename = quote(encoded_filename, encoding='utf-8')
  135. headers = {
  136. 'Content-Disposition': f'attachment; filename*=UTF-8\'\'{encoded_filename}',
  137. 'Content-Type': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
  138. }
  139. db_czrz.log(db, auth_user, "雨量监测", f"雨量排行榜导出数据成功", request.client.host)
  140. # 返回文件流
  141. return StreamingResponse(output, headers=headers)
  142. except Exception as e:
  143. # 处理异常
  144. traceback.print_exc()
  145. return JSONResponse(status_code=500, content={"code": 500, "msg": f"Internal server error: {str(e)}"})