116 lines
4.4 KiB
Python
116 lines
4.4 KiB
Python
import data
|
|
import polars as pl
|
|
from data import etl_property_capacities as etl_pc
|
|
from data import etl_property_capacities_daily as etl_pcd
|
|
from data import etl_property_capacities_monthly as etl_pcm
|
|
from data import etl_property_neighbours as etl_pn
|
|
from data import etl_region_capacities as etl_rc
|
|
from data import etl_region_capacities_comparison as etl_rcc
|
|
from data import etl_region_capacities_daily as etl_rcd
|
|
from data import etl_region_capacities_monthly as etl_rcm
|
|
from data import etl_region_movAverage as etl_rmA
|
|
from data import etl_region_properties_capacities as etl_rpc
|
|
from fastapi import FastAPI, Response
|
|
|
|
d = data.load()
|
|
|
|
app = FastAPI()
|
|
|
|
@app.get("/")
|
|
def read_root():
|
|
return {"Hi there!"}
|
|
|
|
# regions overview: contains names, ids, and count of properties of regions
|
|
@app.get("/regions")
|
|
def properties_region():
|
|
return d.properties_per_region().pl().to_dicts()
|
|
|
|
# get capacities of properties, for every scraping, filterd by region; -1 = all regions
|
|
@app.get("/region/{id}/properties/capacities")
|
|
def region_property_capacities_data(id: int):
|
|
capacities = etl_rpc.region_properties_capacities(id)
|
|
return capacities
|
|
|
|
# get the capacity of a region for every scraping; -1 = all regions
|
|
@app.get("/region/{id}/capacities")
|
|
def region_capacities_data(id: int):
|
|
capacities = etl_rc.region_capacities(id)
|
|
return capacities
|
|
|
|
# get the capacity of a region for desired scraping date by months; -1 = all regions
|
|
@app.get("/region/{id}/capacities/monthly/{scrapeDate}")
|
|
def region_capacities_data(id: int, scrapeDate: str):
|
|
capacities = etl_rcm.region_capacities_monthly(id, scrapeDate)
|
|
return capacities
|
|
|
|
# get the capacity of a region for desired scraping date by days; -1 = all regions
|
|
@app.get("/region/{id}/capacities/daily/{scrapeDate}")
|
|
def region_capacities_data(id: int, scrapeDate: str):
|
|
capacities = etl_rcd.region_capacities_daily(id, scrapeDate)
|
|
return capacities
|
|
|
|
# compare the capacities of two regions
|
|
@app.get("/region/capacities/comparison/{id_1}/{id_2}")
|
|
def region_capacities_data(id_1: int, id_2: int):
|
|
capacities = etl_rcc.region_capacities_comparison(id_1, id_2)
|
|
return capacities
|
|
|
|
# get the moving average for a region beginning from desired date
|
|
@app.get("/region/{id}/movingAverage/{startDate}")
|
|
def region_capacities_data(id: int, startDate: str):
|
|
result = etl_rmA.region_movingAverage(id, startDate)
|
|
return result
|
|
|
|
# get id and name of a region
|
|
@app.get("/region/{id}/base")
|
|
def region_base_data(id: int):
|
|
return d.region_base_data(id).pl().to_dicts()
|
|
|
|
# get growth of properties categorized by regions
|
|
@app.get("/properties/growth")
|
|
def properties_growth():
|
|
options = {"dates" : d.properties_growth().pl()['date'].to_list(), "total_all" : d.properties_growth().pl()['total_all'].to_list(), "total_heidiland" : d.properties_growth().pl()['total_heidiland'].to_list(), "total_engadin" : d.properties_growth().pl()['total_engadin'].to_list(), "total_davos" : d.properties_growth().pl()['total_davos'].to_list(), "total_stmoritz" : d.properties_growth().pl()['total_stmoritz'].to_list()}
|
|
return options
|
|
|
|
# get the geo coordinates for all properties
|
|
@app.get("/properties/geo")
|
|
def properties_geo():
|
|
return d.properties_geo().pl().to_dicts()
|
|
|
|
# get the 10 nearest properties from desired property
|
|
@app.get("/property/{id}/neighbours")
|
|
def property_neighbours(id: int):
|
|
capacities = etl_pn.property_neighbours(id)
|
|
return capacities
|
|
|
|
# get scraped data for all scrapings from desired property
|
|
@app.get("/property/{id}/extractions")
|
|
def property_extractions(id: int):
|
|
return d.extractions_for(property_id = id).pl().to_dicts()
|
|
|
|
# get scraped data for all scrapings from desired property
|
|
@app.get("/property/{id}/capacities")
|
|
def property_capacities_data(id: int):
|
|
capacities = etl_pc.property_capacities(id)
|
|
return capacities
|
|
|
|
# get the capacity of a property for desired scraping date by months
|
|
@app.get("/property/{id}/capacities/monthly/{scrapeDate}")
|
|
def property_capacities_data(id: int, scrapeDate: str):
|
|
capacities = etl_pcm.property_capacities_monthly(id, scrapeDate)
|
|
return capacities
|
|
|
|
# get the capacity of a property for desired scraping date by days
|
|
@app.get("/property/{id}/capacities/daily/{scrapeDate}")
|
|
def property_capacities_data(id: int, scrapeDate: str):
|
|
capacities = etl_pcd.property_capacities_daily(id, scrapeDate)
|
|
return capacities
|
|
|
|
# get first / last time a property was fond, region id and name in which the property is located and the coordinates
|
|
@app.get("/property/{id}/base")
|
|
def property_base_data(id: int):
|
|
return d.property_base_data(id).pl().to_dicts()
|
|
|
|
|
|
|