Wiederinstandsetzung Heatmap @stoffelmauro musste dazu etwas in der API anpassen.
parent
67382003ca
commit
2013d2b440
|
@ -22,7 +22,7 @@ class Api
|
|||
return Cache::get($request);
|
||||
}
|
||||
|
||||
$get = Http::get($request);
|
||||
$get = Http::timeout(600)->get($request);
|
||||
|
||||
if($get->successful()){
|
||||
$result = $get->json();
|
||||
|
@ -63,9 +63,9 @@ class Api
|
|||
return self::get("/property/{$id}/base");
|
||||
}
|
||||
|
||||
public static function regionCapacities(int $id): mixed
|
||||
public static function regionPropertyCapacities(int $id): mixed
|
||||
{
|
||||
return self::get("/region/{$id}/capacities");
|
||||
return self::get("/region/{$id}/properties/capacities");
|
||||
}
|
||||
|
||||
public static function propertyCapacitiesMonthly(int $id, string $date): mixed
|
||||
|
@ -83,6 +83,10 @@ class Api
|
|||
return self::get("/property/{$id}/neighbours");
|
||||
}
|
||||
|
||||
public static function regionCapacities(int $id): mixed
|
||||
{
|
||||
return self::get("/region/{$id}/capacities");
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ const sharedOptions = {
|
|||
}
|
||||
}
|
||||
}
|
||||
const extractionDates = {!! json_encode($growth['dates']) !!};
|
||||
const extractionDates = {!! json_encode($regionPropertiesCapacities['scrapeDates']) !!};
|
||||
|
||||
const chartHeatmap = document.getElementById('chart-heatmap');
|
||||
const cHeatmap = echarts.init(chartHeatmap);
|
||||
|
@ -80,7 +80,7 @@ const cHeatmapOptions = {
|
|||
show: false,
|
||||
name: 'Kurzzeitmietobjekt',
|
||||
type: 'category',
|
||||
data: {!! json_encode($heat['property_ids']) !!},
|
||||
data: extractionDates,
|
||||
splitArea: {
|
||||
show: false
|
||||
},
|
||||
|
@ -91,7 +91,7 @@ const cHeatmapOptions = {
|
|||
yAxis: {
|
||||
show: false,
|
||||
type: 'category',
|
||||
data: extractionDates,
|
||||
data: {!! json_encode($regionPropertiesCapacities['property_ids']) !!},
|
||||
splitArea: {
|
||||
show: true
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ const cHeatmapOptions = {
|
|||
name: 'Auslastung',
|
||||
type: 'heatmap',
|
||||
blurSize: 0,
|
||||
data: {!! json_encode($heat['values']) !!},
|
||||
data: {!! json_encode($regionPropertiesCapacities['values']) !!},
|
||||
label: {
|
||||
show: false
|
||||
},
|
||||
|
|
|
@ -5,7 +5,7 @@ use App\Api;
|
|||
|
||||
Route::get('/', function () {
|
||||
|
||||
$regionHeat = Api::regionCapacities(1);
|
||||
$regionPropertyCapacities = Api::regionPropertyCapacities(-1);
|
||||
$propertiesGrowth = Api::propertiesGrowth();
|
||||
$propsPerRegion = Api::propertiesPerRegion();
|
||||
$propsPerRegionName = [];
|
||||
|
@ -18,7 +18,7 @@ Route::get('/', function () {
|
|||
|
||||
$propertiesGeo = Api::propertiesGeo();
|
||||
|
||||
return view('overview', ["heat" => $regionHeat, "geo" => $propertiesGeo, "growth" => $propertiesGrowth, "propsPerRegion" => [json_encode($propsPerRegionName), json_encode($propsPerRegionCounts)]]);
|
||||
return view('overview', ["regionPropertiesCapacities" => $regionPropertyCapacities, "geo" => $propertiesGeo, "growth" => $propertiesGrowth, "propsPerRegion" => [json_encode($propsPerRegionName), json_encode($propsPerRegionCounts)]]);
|
||||
});
|
||||
|
||||
Route::get('/prop/{id}', function (int $id) {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import data
|
||||
import polars as pl
|
||||
from io import StringIO
|
||||
import numpy as np
|
||||
|
||||
import polars as pl
|
||||
|
||||
import data
|
||||
|
||||
d = data.load()
|
||||
|
||||
|
@ -26,10 +26,9 @@ def region_properties_capacities(id: int):
|
|||
# Merge Dataframe to generate indices
|
||||
df = df.join(datesDF, on='ScrapeDate')
|
||||
df = df.join(propIdDF, on='property_id')
|
||||
# Drop now useless columns ScrapeDate and property_id
|
||||
df = df[['ScrapeDate', 'calendarBody', 'date_index', 'prop_index']]
|
||||
|
||||
# Calculate grid values
|
||||
gridData = []
|
||||
gridData = pl.DataFrame(schema=[("scrape_date", pl.String), ("property_id", pl.String), ("sum_hor", pl.Int64)])
|
||||
for row in df.rows(named=True):
|
||||
# Return 0 for sum if calendar is null
|
||||
if row['calendarBody']:
|
||||
|
@ -37,25 +36,22 @@ def region_properties_capacities(id: int):
|
|||
sum_hor = calDF.sum_horizontal()[0]
|
||||
else:
|
||||
sum_hor = 0
|
||||
# With Index
|
||||
# gridData.append([row['prop_index'], row['date_index'], sum_hor])
|
||||
# With ScrapeDate
|
||||
gridData.append([row['ScrapeDate'], row['date_index'], sum_hor])
|
||||
|
||||
gridData = np.array(gridData)
|
||||
# get all values to calculate Max
|
||||
allValues = gridData[:, 2].astype(int)
|
||||
maxValue = np.max(allValues)
|
||||
gridData[:, 2] = (allValues*100)/maxValue
|
||||
gridData = gridData.vstack(pl.DataFrame({"scrape_date" : row['ScrapeDate'], "property_id": str(row['property_id']), "sum_hor": sum_hor}))
|
||||
|
||||
# Return back to list
|
||||
gridData = gridData.tolist()
|
||||
# get the overall maximum sum
|
||||
maxValue = gridData['sum_hor'].max()
|
||||
values = []
|
||||
|
||||
for row in gridData.rows(named=True):
|
||||
capacity = (row['sum_hor']*100)/maxValue
|
||||
values.append((row['scrape_date'], row['property_id'], capacity))
|
||||
|
||||
# Cast listOfDates to datetime
|
||||
listOfDates = listOfDates.cast(pl.Date).to_list()
|
||||
listOfPropertyIDs = listOfPropertyIDs.to_list()
|
||||
listOfPropertyIDs = listOfPropertyIDs.cast(pl.String).to_list()
|
||||
|
||||
# Create JSON
|
||||
outDict = {'scrapeDates': listOfDates, 'property_ids': listOfPropertyIDs, 'values': gridData}
|
||||
outDict = {'scrapeDates': listOfDates, 'property_ids': listOfPropertyIDs, 'values': values}
|
||||
|
||||
return outDict
|
Loading…
Reference in New Issue