123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158 |
- #!/usr/bin/env python3
- # -*- coding: utf-8 -*-
- from fastapi import APIRouter, Request, Depends, Query, HTTPException, status
- from fastapi.responses import JSONResponse,StreamingResponse
- from common.db import db_czrz
- from common.security import valid_access_token
- from sqlalchemy.orm import Session
- from sqlalchemy.sql import func
- from common.auth_user import *
- from sqlalchemy import text
- from pydantic import BaseModel
- from common.BigDataCenterAPI import *
- from database import get_db
- from typing import List
- from models import *
- from utils import *
- from utils.spatial import *
- import json
- import traceback
- from jobs.rainfall_conditions_job import get_stcd_data
- from datetime import datetime,timedelta
- from .rain_pits import router as rain_pits_router
- from .dzzh import router as dzzh_router
- from .chemical_company import router as chemical_company_router
- router = APIRouter()
- router.include_router(rain_pits_router, prefix="/rain_pits")
- router.include_router(dzzh_router, prefix="/dzzh")
- router.include_router(chemical_company_router, prefix="/chemical_company")
- @router.get("/info/{code}")
- async def get_pattern_info(
- code: str,
- db: Session = Depends(get_db)
- ):
- rainfulldata = get_stcd_data(code,12)
- update_time_list = []
- rainfall_history = []
- rainfall_future = []
- cumulative_rainfall = []
- for i in rainfulldata[::-1]:
- area_name = i['F3070220000034_000018002']
- update_time_list.append( datetime.strptime(i['F3070220000034_000018006'], "%Y-%m-%d %H:%M:%S"))
- create_time = datetime.strptime(i['F3070220000034_000018004'], "%Y-%m-%d %H:%M:%S")
- hour = create_time.strftime("%H")
- value = i['F3070220000034_000018005']
- rainfall_history.append({"hour":hour,"value":value})
- update_time_max = max(update_time_list).strftime("%Y-%m-%d %H:%M:%S")
- for t in range(1,13):
- future_time = create_time+ timedelta(hours=t)
- hour = future_time.strftime("%H")
- value = 0
- rainfall_future.append({"hour":hour,"value":value})
- rainfall = 0
- for cumulative in rainfall_history+rainfall_future:
- rainfall += cumulative['value']
- cumulative_rainfall.append({"hour":cumulative['hour'],"value":rainfall})
- return {
- "code": 200, "msg": "获取成功", "data":{"areaName":area_name,"updateTime":update_time_max,"rainfallHistory":rainfall_history,"rainfallFuture":rainfall_future,"cumulativeRainfall":cumulative_rainfall}
- }
- @router.get("/gdyl/info/{code}")
- async def get_pattern_info(
- code: str,
- db: Session = Depends(get_db)
- ):
- rainfulldata = get_stcd_data(code,25)
- update_time_list = []
- rainfall_history = []
- rainfall_future = []
- # cumulative_rainfall = []
- num = 24
- for i in rainfulldata[::-1]:
- area_name = i['F3070220000034_000018002']
- update_time_list.append( datetime.strptime(i['F3070220000034_000018006'], "%Y-%m-%d %H:%M:%S"))
- create_time = datetime.strptime(i['F3070220000034_000018004'], "%Y-%m-%d %H:%M:%S")
- # hour = create_time.strftime("%H")
- value = i['F3070220000034_000018005']
- if num in [24,12,6,3,1,0]:
- rainfall_history.append({"hour":-num,"value":value})
- num-=1
- update_time_max = max(update_time_list).strftime("%Y-%m-%d %H:%M:%S")
- # for t in range(72):
- # future_time = create_time+ timedelta(hours=t)
- # hour = future_time.strftime("%H")
- # value = 0
- rainfall_future.append({"hour":1,"value":0})
- rainfall_future.append({"hour":3,"value":0})
- rainfall_future.append({"hour":24,"value":1})
- rainfall_future.append({"hour":48,"value":4})
- rainfall_future.append({"hour":72,"value":2})
- rainfall = 0
- # for cumulative in rainfall_history+rainfall_future:
- # rainfall += cumulative['value']
- # cumulative_rainfall.append({"hour":cumulative['hour'],"value":rainfall})
- return {
- "code": 200, "msg": "获取成功", "data":{"areaName":area_name,"updateTime":update_time_max,"rainfallHistory":rainfall_history,"rainfallFuture":rainfall_future}#,"cumulativeRainfall":cumulative_rainfall}
- }
- @router.get('/get_rainfall_range/export')
- async def get_inspection_task_list(
- request: Request,
- sort: str = Query('desc'),
- timeOption: str = Query('24'),
- area: str = Query(''),
- township: str = Query(''),
- db: Session = Depends(get_db),
- auth_user: AuthUser = Depends(find_auth_user),
- user_id = Depends(valid_access_token)
- ):
- try:
- sql=f"""SELECT ROW_NUMBER() OVER ( ORDER BY T2.rainfall {sort}) AS `序号`,T1.area as `区县`,T1.township as `镇街`,T1.address as `站点地质`,T1.area_name as `站点名称`,T2.rainfall as `雨量`,T1.`code` as `站点编号` FROM sharedb.govdata_real_time_address T1 LEFT JOIN (select `latest_data`.`code` AS `code`,sum(`latest_data`.`rainfall`) AS `rainfall` from (select `govdata_rain_data_info`.`code` AS `code`,`govdata_rain_data_info`.`area_name` AS `area_name`,`govdata_rain_data_info`.`address` AS `address`,`govdata_rain_data_info`.`create_time` AS `create_time`,`govdata_rain_data_info`.`rainfall` AS `rainfall`,`govdata_rain_data_info`.`update_time` AS `update_time`,row_number() OVER (PARTITION BY `govdata_rain_data_info`.`code` ORDER BY `govdata_rain_data_info`.`create_time` desc ) AS `rn` from sharedb.`govdata_rain_data_info`) `latest_data` where ((`latest_data`.`rn` <= '{timeOption}') and `latest_data`.`code` in (select `govdata_real_time_address`.`code` from sharedb.`govdata_real_time_address`)) group by `latest_data`.`code` order by `rainfall` desc) T2 on T1.code=T2.code where CASE
- WHEN '{area}'<>'' THEN
- T1.area='{area}'
- ELSE
- 1=1
- END and CASE
- WHEN '{township}'<>'' THEN
- T1.township='{township}'
- ELSE
- 1=1
- END and T2.rainfall>0 ORDER BY T2.rainfall {sort}"""
- da = db.execute(sql).fetchall()
- outlist = [dict(row) for row in da]
- # 返回结果
- import pandas as pd
- from io import BytesIO
- # 将查询结果转换为 DataFrame
- df = pd.DataFrame(outlist)
- # 将 DataFrame 导出为 Excel 文件
- output = BytesIO()
- with pd.ExcelWriter(output, engine='openpyxl') as writer:
- df.to_excel(writer, index=False)
- # 设置响应头
- output.seek(0)
- from urllib.parse import quote
- encoded_filename = f'雨量排行榜导出{datetime.now().strftime("%Y%m%d%H%mi%s")}.xlsx'
- encoded_filename = quote(encoded_filename, encoding='utf-8')
- headers = {
- 'Content-Disposition': f'attachment; filename*=UTF-8\'\'{encoded_filename}',
- 'Content-Type': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
- }
- db_czrz.log(db, auth_user, "雨量监测", f"雨量排行榜导出数据成功", request.client.host)
- # 返回文件流
- return StreamingResponse(output, headers=headers)
- except Exception as e:
- # 处理异常
- traceback.print_exc()
- return JSONResponse(status_code=500, content={"code": 500, "msg": f"Internal server error: {str(e)}"})
|