import gc from datetime import datetime, timedelta from zoneinfo import ZoneInfo from fastapi import FastAPI, BackgroundTasks from fastapi.responses import JSONResponse from fastapi.middleware import Middleware from starlette.middleware.cors import CORSMiddleware from fastapi_utils.tasks import repeat_every from app.settings import * from services.grib import Grib service_grib = Grib() # App middleware = [ Middleware( CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ), ] app = FastAPI(title="parser-gfs", middleware=middleware) # API # GET 'test' @app.get("/test/") async def get_test(): # check source availability # fresh one might be missing and old ones get deleted, so check yesterday's yesterday_news = datetime.now(tz=ZoneInfo("US/Eastern")) - timedelta(days=1) url = service_grib.form_gfswave_link(target_time=yesterday_news) if not service_grib.is_reachable(url): # just one should be fine print(url, " is not reachable at this time") # TODO: should we actually error out? return JSONResponse(content={"status": "success"}) @app.get("/download/") async def download_grib( background_tasks: BackgroundTasks, target_time: str = None, ): """Download and process GRIB files into csv of requested parameters :param target_time: well formed ISO 8601 time string, we will try to download GRIB available just before it """ background_tasks.add_task(service_grib.extract_useful_data, target_time) return JSONResponse(content={"status": "Background task started"}) # Tasks # gc @app.on_event("startup") @repeat_every(seconds=(1 * 60)) async def task_gc() -> None: gc.collect()