from io import StringIO import polars as pl import data from data import etl_cache d = data.load() def property_capacities_weekdays(id: int, scrapeDate: str): file = f"etl_property_capacities_weekdays_{id}.obj" obj = etl_cache.openObj(file) if obj: return obj extractions = d.extractions_propId_scrapeDate(id, scrapeDate).pl() weekdays = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] df_calendar = pl.DataFrame() numWeeks = 0 for row in extractions.rows(named=True): scrapeDate = row['created_at'] df_calendar = pl.read_json(StringIO(row['calendar'])) columnTitles = df_calendar.columns df_calendar = df_calendar.transpose() df_calendar = df_calendar.with_columns(pl.Series(name="dates", values=columnTitles)) df_calendar = df_calendar.with_columns((pl.col("dates").str.to_date())) numWeeks = round((df_calendar.get_column("dates").max() - df_calendar.get_column("dates").min()).days / 7, 0) df_calendar = df_calendar.with_columns(pl.col("dates").dt.weekday().alias("weekday_num")) df_calendar = df_calendar.with_columns(pl.col("dates").dt.strftime("%A").alias("weekday")) df_calendar = df_calendar.drop("dates") df_calendar = df_calendar.group_by(["weekday", "weekday_num"]).agg(pl.col("column_0").sum()) df_calendar = df_calendar.with_columns((pl.col("column_0") / numWeeks / 2 * 100).alias("column_0")) df_calendar = df_calendar.sort('weekday_num') df_calendar = df_calendar.drop('weekday_num') result = {"scraping-date": scrapeDate, "weekdays": df_calendar['weekday'].to_list(), 'capacities': df_calendar['column_0'].to_list()} etl_cache.saveObj(file, result) return result