0% found this document useful (0 votes)
11 views4 pages

Weather Analytics Task Code

This document outlines a FastAPI application for weather data analytics, including database setup with SQLAlchemy and Pydantic schemas for data validation. It features endpoints to retrieve city information, calculate averages and extremes of weather data, compare weather between cities, and compute seasonal statistics. The application interacts with a PostgreSQL database to manage weather data related to various cities.

Uploaded by

sairam alhari
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
11 views4 pages

Weather Analytics Task Code

This document outlines a FastAPI application for weather data analytics, including database setup with SQLAlchemy and Pydantic schemas for data validation. It features endpoints to retrieve city information, calculate averages and extremes of weather data, compare weather between cities, and compute seasonal statistics. The application interacts with a PostgreSQL database to manage weather data related to various cities.

Uploaded by

sairam alhari
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 4

from fastapi import FastAPI, HTTPException

from pydantic import BaseModel


from sqlalchemy import create_engine, Column, Integer, String, Float, DateTime, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
from typing import List
from datetime import datetime
import pandas as pd

# -------------------------------
# Database setup
# -------------------------------
DATABASE_URL = "postgresql+psycopg2://user:password@localhost:5432/weather_db"

engine = create_engine(DATABASE_URL)
SessionLocal = sessionmaker(bind=engine)
Base = declarative_base()

# -------------------------------
# Database models
# -------------------------------
class City(Base):
__tablename__ = "cities"
id = Column(Integer, primary_key=True, index=True)
name = Column(String)
country = Column(String)
latitude = Column(Float)
longitude = Column(Float)
weather_data = relationship("WeatherData", back_populates="city")

class WeatherData(Base):
__tablename__ = "weather_data"
id = Column(Integer, primary_key=True, index=True)
city_id = Column(Integer, ForeignKey("cities.id"))
city_name = Column(String)
date = Column(DateTime)
temp_min = Column(Float)
temp_max = Column(Float)
humidity = Column(Float)
rainfall = Column(Float)

city = relationship("City", back_populates="weather_data")

Base.metadata.create_all(bind=engine)

# -------------------------------
# Pydantic schemas
# -------------------------------
class CitySchema(BaseModel):
id: int
name: str
country: str
latitude: float
longitude: float

class Config:
orm_mode = True

class WeatherDataSchema(BaseModel):
id: int
city_id: int
city_name: str
date: datetime
temp_min: float
temp_max: float
humidity: float
rainfall: float

class Config:
orm_mode = True

class CompareCitiesRequest(BaseModel):
city1_id: int
city2_id: int

# -------------------------------
# FastAPI app
# -------------------------------
app = FastAPI(title="Weather Data Analytics")

# -------------------------------
# Endpoints
# -------------------------------
@app.get("/cities", response_model=List[CitySchema])
def get_all_cities():
session = SessionLocal()
cities = session.query(City).all()
session.close()
return cities

@app.get("/cities/{city_id}/averages")
def get_city_averages(city_id: int):
session = SessionLocal()
data = session.query(WeatherData).filter(WeatherData.city_id == city_id).all()
if not data:
session.close()
raise HTTPException(status_code=404, detail="City not found")
df = pd.DataFrame([{
"temp_min": w.temp_min,
"temp_max": w.temp_max,
"humidity": w.humidity,
"rainfall": w.rainfall
} for w in data])
averages = df.mean().to_dict()
session.close()
return averages

@app.get("/cities/{city_id}/extremes")
def get_city_extremes(city_id: int):
session = SessionLocal()
data = session.query(WeatherData).filter(WeatherData.city_id == city_id).all()
if not data:
session.close()
raise HTTPException(status_code=404, detail="City not found")
df = pd.DataFrame([{
"temp_min": w.temp_min,
"temp_max": w.temp_max,
"humidity": w.humidity,
"rainfall": w.rainfall
} for w in data])
extremes = {
"hottest": df["temp_max"].max(),
"coldest": df["temp_min"].min(),
"wettest": df["rainfall"].max()
}
session.close()
return extremes

@app.post("/weather/compare")
def compare_cities(request: CompareCitiesRequest):
session = SessionLocal()
data1 = session.query(WeatherData).filter(WeatherData.city_id == request.city1_id).all()
data2 = session.query(WeatherData).filter(WeatherData.city_id == request.city2_id).all()
if not data1 or not data2:
session.close()
raise HTTPException(status_code=404, detail="One or both cities not found")
df1 = pd.DataFrame([{
"temp_min": w.temp_min,
"temp_max": w.temp_max,
"humidity": w.humidity,
"rainfall": w.rainfall
} for w in data1])
df2 = pd.DataFrame([{
"temp_min": w.temp_min,
"temp_max": w.temp_max,
"humidity": w.humidity,
"rainfall": w.rainfall
} for w in data2])
comparison = {
"city1_avg": df1.mean().to_dict(),
"city2_avg": df2.mean().to_dict()
}
session.close()
return comparison

@app.get("/weather/seasons/{city_id}")
def compute_seasons(city_id: int):
session = SessionLocal()
data = session.query(WeatherData).filter(WeatherData.city_id == city_id).all()
if not data:
session.close()
raise HTTPException(status_code=404, detail="City not found")
df = pd.DataFrame([{"month": w.date.month, "temp_avg": (w.temp_min + w.temp_max)/2,
"rainfall": w.rainfall} for w in data])
seasons = {}
for month, group in df.groupby("month"):
seasons[month] = {
"avg_temp": group["temp_avg"].mean(),
"total_rainfall": group["rainfall"].sum()
}
session.close()
return seasons

You might also like