Compare commits
1 Commits
main
...
refactor-t
Author | SHA1 | Date |
---|---|---|
Giò Diani | 479feba6c2 |
|
@ -23,7 +23,6 @@
|
||||||
*.ipr
|
*.ipr
|
||||||
.idea/
|
.idea/
|
||||||
|
|
||||||
|
|
||||||
# eclipse project file
|
# eclipse project file
|
||||||
.settings/
|
.settings/
|
||||||
.classpath
|
.classpath
|
||||||
|
@ -66,8 +65,3 @@ env3.*/
|
||||||
# duckdb
|
# duckdb
|
||||||
*.duckdb
|
*.duckdb
|
||||||
|
|
||||||
# cache
|
|
||||||
*.obj
|
|
||||||
|
|
||||||
/src/mauro/dok/
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +0,0 @@
|
||||||
# Consultancy 2
|
|
||||||
|
|
||||||
## Projektstruktur
|
|
||||||
- etl: Enthält den Programmcode, welcher die Daten aufbereitet und via REST-API zur Verfügung stellt.
|
|
||||||
- dashboard: Webapplikation zur Exploration und Visualisierung der Daten.
|
|
||||||
|
|
|
@ -1,16 +0,0 @@
|
||||||
# Install
|
|
||||||
## Prerequisites
|
|
||||||
- In order to run this project please install all required software according to the laravel documentation: https://laravel.com/docs/11.x#installing-php
|
|
||||||
|
|
||||||
## Configuration & installation
|
|
||||||
- Make a copy of the .env.example to .env
|
|
||||||
- Run the following commands:
|
|
||||||
```bash
|
|
||||||
composer install && php artisan key:generate && npm i
|
|
||||||
```
|
|
||||||
|
|
||||||
# Run server
|
|
||||||
```bash
|
|
||||||
composer run dev
|
|
||||||
```
|
|
||||||
|
|
|
@ -1,115 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
namespace App;
|
|
||||||
|
|
||||||
use Illuminate\Support\Facades\Cache;
|
|
||||||
use Illuminate\Support\Facades\Http;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class contains methods which make calls to the API.
|
|
||||||
* Successfull calls get cached.
|
|
||||||
*/
|
|
||||||
|
|
||||||
class Api
|
|
||||||
{
|
|
||||||
|
|
||||||
public static function get(string $path, string $query = ''): ?array
|
|
||||||
{
|
|
||||||
|
|
||||||
$endpoint = env('FASTAPI_URI');
|
|
||||||
$request = $endpoint.$path;
|
|
||||||
|
|
||||||
// load from cache if available
|
|
||||||
if (Cache::has($request)) {
|
|
||||||
return Cache::get($request);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set timeout to .5h
|
|
||||||
$get = Http::timeout(1800)->get($request);
|
|
||||||
|
|
||||||
// return result and cache it
|
|
||||||
if($get->successful()){
|
|
||||||
$result = $get->json();
|
|
||||||
Cache::put($request, $result);
|
|
||||||
return $result;
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function propertiesGrowth(): mixed
|
|
||||||
{
|
|
||||||
return self::get('/properties/growth');
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function propertiesGeo(): mixed
|
|
||||||
{
|
|
||||||
return self::get('/properties/geo');
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function propertyExtractions(int $id): mixed
|
|
||||||
{
|
|
||||||
return self::get("/properties/{$id}/extractions");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function propertyCapacities(int $id): mixed
|
|
||||||
{
|
|
||||||
return self::get("/properties/{$id}/capacities");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function propertyBase(int $id): mixed
|
|
||||||
{
|
|
||||||
return self::get("/properties/{$id}/base");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function propertyCapacitiesMonthly(int $id, string $date): mixed
|
|
||||||
{
|
|
||||||
return self::get("/properties/{$id}/capacities/monthly/{$date}");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function propertyCapacitiesDaily(int $id, string $date): mixed
|
|
||||||
{
|
|
||||||
return self::get("/properties/{$id}/capacities/daily/{$date}");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function propertyNeighbours(int $id): mixed
|
|
||||||
{
|
|
||||||
return self::get("/properties/{$id}/neighbours");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function regions(): mixed
|
|
||||||
{
|
|
||||||
return self::get('/regions');
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function regionBase(int $id): mixed
|
|
||||||
{
|
|
||||||
return self::get("/regions/{$id}/base");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function regionPropertiesCapacities(int $id): mixed
|
|
||||||
{
|
|
||||||
return self::get("/regions/{$id}/properties/capacities");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function regionCapacitiesMonthly(int $id, string $date): mixed
|
|
||||||
{
|
|
||||||
return self::get("/regions/{$id}/capacities/monthly/{$date}");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function regionCapacitiesDaily(int $id, string $date): mixed
|
|
||||||
{
|
|
||||||
return self::get("/regions/{$id}/capacities/daily/{$date}");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function regionCapacities(int $id): mixed
|
|
||||||
{
|
|
||||||
return self::get("/regions/{$id}/capacities");
|
|
||||||
}
|
|
||||||
|
|
||||||
public static function regionMovingAverage(int $id, string $date): mixed
|
|
||||||
{
|
|
||||||
return self::get("/regions/{$id}/moving-average/{$date}");
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,12 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
namespace App;
|
|
||||||
|
|
||||||
class Chart
|
|
||||||
{
|
|
||||||
public static function colors(int $count = 5){
|
|
||||||
$colors = ['#9ebcda','#8c96c6','#88419d','#810f7c','#4d004b'];
|
|
||||||
return json_encode($colors);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,330 +0,0 @@
|
||||||
/* 1. Use a more-intuitive box-sizing model */
|
|
||||||
|
|
||||||
|
|
||||||
*, *::before, *::after {
|
|
||||||
box-sizing: border-box;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* 2. Remove default margin */
|
|
||||||
* {
|
|
||||||
margin: 0;
|
|
||||||
font-family: sans-serif;
|
|
||||||
}
|
|
||||||
|
|
||||||
body {
|
|
||||||
/* 3. Add accessible line-height */
|
|
||||||
line-height: 1.5;
|
|
||||||
/* 4. Improve text rendering */
|
|
||||||
-webkit-font-smoothing: antialiased;
|
|
||||||
padding: 0 1em;
|
|
||||||
height: 100vh;
|
|
||||||
background-image: radial-gradient(73% 147%, #EADFDF 59%, #ECE2DF 100%), radial-gradient(91% 146%, rgba(255,255,255,0.50) 47%, rgba(0,0,0,0.50) 100%);
|
|
||||||
background-blend-mode: screen;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* 5. Improve media defaults */
|
|
||||||
img, picture, video, canvas, svg {
|
|
||||||
display: block;
|
|
||||||
max-width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* 6. Inherit fonts for form controls */
|
|
||||||
input, button, textarea, select {
|
|
||||||
font: inherit;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* 7. Avoid text overflows */
|
|
||||||
p, h1, h2, h3, h4, h5, h6 {
|
|
||||||
overflow-wrap: break-word;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* 8. Improve line wrapping */
|
|
||||||
p {
|
|
||||||
text-wrap: pretty;
|
|
||||||
}
|
|
||||||
h1, h2, h3, h4, h5, h6 {
|
|
||||||
text-wrap: balance;
|
|
||||||
}
|
|
||||||
|
|
||||||
dt{
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
dd + dt{
|
|
||||||
margin-top: .2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
nav + button,
|
|
||||||
span + button{
|
|
||||||
margin-left: .5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul{
|
|
||||||
padding-left: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
p + ul{
|
|
||||||
margin-top: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
button[popovertarget]{
|
|
||||||
background: no-repeat center / .3em #4d004b url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 192 512'%3E%3C!--!Font Awesome Free 6.7.2 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license/free Copyright 2025 Fonticons, Inc.--%3E%3Cpath fill='%23fff' d='M48 80a48 48 0 1 1 96 0A48 48 0 1 1 48 80zM0 224c0-17.7 14.3-32 32-32l64 0c17.7 0 32 14.3 32 32l0 224 32 0c17.7 0 32 14.3 32 32s-14.3 32-32 32L32 512c-17.7 0-32-14.3-32-32s14.3-32 32-32l32 0 0-192-32 0c-17.7 0-32-14.3-32-32z'/%3E%3C/svg%3E%0A");
|
|
||||||
cursor: pointer;
|
|
||||||
display: inline-block;
|
|
||||||
width: 1.5em;
|
|
||||||
height: 1.5em;
|
|
||||||
border-radius: 50%;
|
|
||||||
border: 1px solid #fff;
|
|
||||||
}
|
|
||||||
|
|
||||||
button[popovertarget]::before{
|
|
||||||
color: #fff;
|
|
||||||
font-weight: 700;
|
|
||||||
}
|
|
||||||
|
|
||||||
button[popovertarget]>span{
|
|
||||||
position: absolute;
|
|
||||||
left: -999em;
|
|
||||||
top: -999em;
|
|
||||||
}
|
|
||||||
|
|
||||||
[popover] {
|
|
||||||
border: none;
|
|
||||||
border-radius: 1em;
|
|
||||||
background: #fff;
|
|
||||||
padding: 1.5em;
|
|
||||||
border-radius: var(--small-border);
|
|
||||||
box-shadow: .0625em .0625em .625em rgba(0, 0, 0, 0.1);
|
|
||||||
max-width: 40em;
|
|
||||||
top: 4em;
|
|
||||||
margin: 0 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
[popover]::backdrop{
|
|
||||||
background-color: rgba(0,0,0,.5);
|
|
||||||
}
|
|
||||||
|
|
||||||
[popover] h2{
|
|
||||||
font-size: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
[popover] h3{
|
|
||||||
font-size: .95em;
|
|
||||||
margin-top: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
p.formula{
|
|
||||||
font-family: monospace;
|
|
||||||
border: 1px solid #aaa;
|
|
||||||
padding: .5em 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
p + p{
|
|
||||||
margin-top: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
9. Create a root stacking context
|
|
||||||
*/
|
|
||||||
#root, #__next {
|
|
||||||
isolation: isolate;
|
|
||||||
}
|
|
||||||
|
|
||||||
body>header{
|
|
||||||
position: fixed;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
width: 100%;
|
|
||||||
height: 3em;
|
|
||||||
background: #ccc;
|
|
||||||
z-index: 99;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
padding: 0 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
body>header>nav{
|
|
||||||
text-align: center;
|
|
||||||
min-width: 10em;
|
|
||||||
background: #fff;
|
|
||||||
border-radius: .2em;
|
|
||||||
position: relative;
|
|
||||||
border: 1px solid #fff;
|
|
||||||
}
|
|
||||||
|
|
||||||
body>header>nav>ul{
|
|
||||||
position: absolute;
|
|
||||||
background: #fff;
|
|
||||||
width: calc(100% + 2px);
|
|
||||||
list-style: none;
|
|
||||||
padding: 0 0 1em;
|
|
||||||
top: -999em;
|
|
||||||
left: -999em;
|
|
||||||
border-radius: 0 0 .2em .2em;
|
|
||||||
border-left: 1px solid #aaa;
|
|
||||||
border-right: 1px solid #aaa;
|
|
||||||
border-bottom: 1px solid #aaa;
|
|
||||||
}
|
|
||||||
|
|
||||||
body>header>nav:hover{
|
|
||||||
border-radius: .2em .2em 0 0;
|
|
||||||
border: 1px solid #aaa;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
body>header>nav:hover ul{
|
|
||||||
top: initial;
|
|
||||||
left: -1px;
|
|
||||||
}
|
|
||||||
|
|
||||||
body>header>nav>ul>li a,
|
|
||||||
body>header>nav>strong{
|
|
||||||
display: inline-block;
|
|
||||||
padding: .2em .4em;
|
|
||||||
}
|
|
||||||
|
|
||||||
a{
|
|
||||||
color: #000;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:hover,
|
|
||||||
a:focus{
|
|
||||||
color: #aaa;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
main{
|
|
||||||
width: 100%;
|
|
||||||
height: 100vh;
|
|
||||||
padding: 4em 0 1em;
|
|
||||||
display: grid;
|
|
||||||
gap: .5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
body.overview main{
|
|
||||||
grid-template-columns: repeat(8, minmax(1%, 50%));
|
|
||||||
grid-template-rows: repeat(4, 1fr);
|
|
||||||
grid-template-areas:
|
|
||||||
"chart1 chart1 chart1 chart2 chart2 chart2 chart4 chart4"
|
|
||||||
"chart1 chart1 chart1 chart2 chart2 chart2 chart4 chart4"
|
|
||||||
"chart1 chart1 chart1 chart3 chart3 chart3 chart4 chart4"
|
|
||||||
"chart1 chart1 chart1 chart3 chart3 chart3 chart4 chart4"
|
|
||||||
}
|
|
||||||
|
|
||||||
body.region main{
|
|
||||||
grid-template-columns: repeat(4, minmax(10%, 50%));
|
|
||||||
grid-template-rows: repeat(6, 1fr) 4em;
|
|
||||||
grid-template-areas:
|
|
||||||
"chart1 chart1 chart2 chart2"
|
|
||||||
"chart1 chart1 chart2 chart2"
|
|
||||||
"chart1 chart1 chart3 chart4"
|
|
||||||
"chart1 chart1 chart3 chart4"
|
|
||||||
"chart1 chart1 chart6 chart6"
|
|
||||||
"chart1 chart1 chart6 chart6"
|
|
||||||
"chart1 chart1 timeline timeline";
|
|
||||||
}
|
|
||||||
|
|
||||||
body.property main{
|
|
||||||
grid-template-columns: repeat(4, minmax(10%, 50%));
|
|
||||||
grid-template-rows: repeat(4, 1fr) 4em;
|
|
||||||
grid-template-areas:
|
|
||||||
"chart1 chart1 chart2 chart2"
|
|
||||||
"chart1 chart1 chart2 chart2"
|
|
||||||
"chart5 chart5 chart3 chart4"
|
|
||||||
"chart5 chart5 chart3 chart4"
|
|
||||||
"chart5 chart5 timeline timeline";
|
|
||||||
}
|
|
||||||
|
|
||||||
article{
|
|
||||||
background: #f9f9f9;
|
|
||||||
border: .0625em solid #ccc;
|
|
||||||
box-shadow: 0 5px 10px rgba(154,160,185,.05), 0 15px 40px rgba(166,173,201,.2);
|
|
||||||
border-radius: .2em;
|
|
||||||
display: grid;
|
|
||||||
}
|
|
||||||
|
|
||||||
article.header{
|
|
||||||
grid-template-columns: 100%;
|
|
||||||
grid-template-rows: minmax(1%, 2em) 1fr;
|
|
||||||
padding: .5em 1em 1em .5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
article.map{
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
article.map>header{
|
|
||||||
padding: .5em 1em 1em .5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
article>header{
|
|
||||||
display: grid;
|
|
||||||
grid-template-columns: 1fr 1em;
|
|
||||||
grid-template-rows: 1fr;
|
|
||||||
}
|
|
||||||
|
|
||||||
article>header>h2{
|
|
||||||
font-size: .8em;
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
@media(max-width: 960px){
|
|
||||||
|
|
||||||
body{
|
|
||||||
height: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
body.overview main,
|
|
||||||
body.region main,
|
|
||||||
body.property main{
|
|
||||||
height: auto;
|
|
||||||
grid-template-columns: 100%;
|
|
||||||
grid-template-rows: repeat(5, minmax(20em, 25em)) 4em;
|
|
||||||
grid-template-areas: "chart1" "chart2" "chart3" "chart4" "chart5" "chart6" "timeline";
|
|
||||||
}
|
|
||||||
|
|
||||||
body.overview main{
|
|
||||||
grid-template-rows: minmax(20em, 40em) repeat(3, minmax(20em, 25em));
|
|
||||||
grid-template-areas: "chart1" "chart2" "chart3" "chart4";
|
|
||||||
}
|
|
||||||
|
|
||||||
body.region main{
|
|
||||||
grid-template-rows: minmax(20em, 40em) repeat(4, minmax(20em, 25em)) 4em;
|
|
||||||
grid-template-areas: "chart1" "chart2" "chart3" "chart4" "chart6" "timeline";
|
|
||||||
}
|
|
||||||
|
|
||||||
body.property main{
|
|
||||||
grid-template-rows: repeat(5, minmax(20em, 25em)) 4em;
|
|
||||||
grid-template-areas: "chart1" "chart2" "chart3" "chart4" "chart5" "timeline";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.leaflet-marker-icon span{
|
|
||||||
background: #4d004b;
|
|
||||||
width: 2rem;
|
|
||||||
height: 2rem;
|
|
||||||
display: block;
|
|
||||||
left: -1rem;
|
|
||||||
top: -1rem;
|
|
||||||
position: relative;
|
|
||||||
border-radius: 50% 50% 0;
|
|
||||||
transform: rotate(45deg);
|
|
||||||
border: 2px solid #fff
|
|
||||||
}
|
|
||||||
|
|
||||||
/*['#9ecae1','#6baed6','#4292c6','#2171b5','#084594'*/
|
|
||||||
.leaflet-marker-icon.region1 span{
|
|
||||||
background: #8c96c6;
|
|
||||||
}
|
|
||||||
|
|
||||||
.leaflet-marker-icon.region2 span{
|
|
||||||
background: #88419d;
|
|
||||||
}
|
|
||||||
|
|
||||||
.leaflet-marker-icon.region3 span{
|
|
||||||
background: #810f7c;
|
|
||||||
}
|
|
||||||
|
|
||||||
.leaflet-marker-icon.region4 span{
|
|
||||||
background: #4d004b;
|
|
||||||
}
|
|
File diff suppressed because one or more lines are too long
|
@ -1,4 +0,0 @@
|
||||||
import * as echarts from 'echarts';
|
|
||||||
import 'leaflet'
|
|
||||||
|
|
||||||
window.echarts = echarts;
|
|
|
@ -1,17 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="de">
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
|
||||||
<title>Dashboard</title>
|
|
||||||
@vite(['resources/css/app.css', 'resources/js/app.js', 'node_modules/leaflet/dist/leaflet.css'])
|
|
||||||
</head>
|
|
||||||
<body class="@yield('body-class')">
|
|
||||||
<header>
|
|
||||||
@yield('header')
|
|
||||||
</header>
|
|
||||||
<main>
|
|
||||||
@yield('main')
|
|
||||||
</main>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,365 +0,0 @@
|
||||||
@extends('base')
|
|
||||||
@section('body-class', 'overview')
|
|
||||||
@section('header')
|
|
||||||
<nav>
|
|
||||||
<strong>Start</strong>
|
|
||||||
<ul>
|
|
||||||
@foreach($regions as $r)
|
|
||||||
<li><a href="/region/{{ $r['id'] }}">{{ $r['name'] }}</a></li>
|
|
||||||
@endforeach
|
|
||||||
</ul>
|
|
||||||
</nav>
|
|
||||||
@endsection
|
|
||||||
@section('main')
|
|
||||||
<article class="header" style="grid-area: chart1;">
|
|
||||||
<header>
|
|
||||||
<h2>Verfügbarkeit aller Mietobjekte über gesamten beobachteten Zeitraum</h2>
|
|
||||||
<button popovertarget="pop1">
|
|
||||||
<span>Erklärungen zum Diagramm</span>
|
|
||||||
</button>
|
|
||||||
<div popover id="pop1">
|
|
||||||
<h2>Verfügbarkeit aller Mietobjekte über gesamten beobachteten Zeitraum</h2>
|
|
||||||
<p>
|
|
||||||
Das Diagramm zeigt die Verfügbarkeit aller Mietobjekte zu allen beobachteten Zeitpunkten.
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>X-Achse: Zeitpunkt Beobachtung.</li>
|
|
||||||
<li>Y-Achse: Mietobjekte.</li>
|
|
||||||
<li>Kategorien: 0% = Das Mietobjekt ist komplett Ausgebucht; 100% = Das Mietobjekt kann zu allen Verfügbaren Daten gebucht werden.</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
<h3>Berrechnung Verfügbarkeit</h3>
|
|
||||||
<p>Die Verfügbarkeit eines Mietobjekt errechnet sich folgendermassen:</p>
|
|
||||||
<p class="formula">
|
|
||||||
Verfügbarkeit = (100 / (Anzahl Buchungsdaten * 2)) * Summe Status
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>Status: Jeder verfügbare Kalendertag kann den Status «Nicht Verfügbar» (0), «Verfügbar (kein Anreisetag)» (1) oder «Verfügbar» (2) aufweisen.</li>
|
|
||||||
<li>Anzahl Buchungsdaten: Die Summe aller angebotenen Buchungsdaten mit zwei multipliziert (= Alle Buchungdaten haben den Status «Verfügbar»)</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
</header>
|
|
||||||
<div id="chart-heatmap"></div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart2;">
|
|
||||||
<header>
|
|
||||||
<h2>
|
|
||||||
Anzahl jemals gefundene Kurzzeitmietobjekte pro Region
|
|
||||||
</h2>
|
|
||||||
<button popovertarget="pop2">
|
|
||||||
<span>Erklärungen zum Diagramm</span>
|
|
||||||
</button>
|
|
||||||
<div popover id="pop2">
|
|
||||||
<h2>Anzahl jemals gefundener Mietobjekte pro Region</h2>
|
|
||||||
<p>
|
|
||||||
Das Balkendiagramm zeigt die Anzahl jemals gefundener Mietobjekte pro Region.
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>X-Achse: Region</li>
|
|
||||||
<li>Y-Achse: Anzahl Mietobjekte</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
</header>
|
|
||||||
<div id="chart-props-per-region"></div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart3;">
|
|
||||||
<header>
|
|
||||||
<h2>
|
|
||||||
Entwicklung der Anzahl jemals gefunden Kurzzeitmietobjekte
|
|
||||||
</h2>
|
|
||||||
<button popovertarget="pop3">
|
|
||||||
<span>Erklärungen zum Diagramm</span>
|
|
||||||
</button>
|
|
||||||
<div popover id="pop3">
|
|
||||||
<h2>Entwicklung Anzahl jemals gefundener Mietobjekte pro Region</h2>
|
|
||||||
<p>
|
|
||||||
Das Liniendiagramm zeigt die Entwicklung aller jemals gefundener Mietobjekte pro Region.
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>X-Achse: Zeitpunkt Beobachtung</li>
|
|
||||||
<li>Y-Achse: Anzahl Mietobjekte</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
</header>
|
|
||||||
<div id="extractions"></div>
|
|
||||||
</article>
|
|
||||||
<article style="grid-area: chart4;">
|
|
||||||
<div id="leaflet"></div>
|
|
||||||
</article>
|
|
||||||
<script type="module">
|
|
||||||
const sharedOptions = {
|
|
||||||
basic: {
|
|
||||||
color: {!! $diagramsOptions['shared']['colors'] !!},
|
|
||||||
grid: {
|
|
||||||
top: 30,
|
|
||||||
left: 70,
|
|
||||||
right: 0,
|
|
||||||
bottom: 45
|
|
||||||
},
|
|
||||||
name: (opt) => {
|
|
||||||
return {
|
|
||||||
name: opt.name,
|
|
||||||
nameLocation: opt.location,
|
|
||||||
nameGap: 50,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const extractionDates = {!! $diagramsOptions['shared']['extractionDates'] !!};
|
|
||||||
|
|
||||||
const chartHeatmap = document.getElementById('chart-heatmap');
|
|
||||||
const cHeatmap = echarts.init(chartHeatmap);
|
|
||||||
const cHeatmapOptions = {
|
|
||||||
animation: false,
|
|
||||||
tooltip: {
|
|
||||||
position: 'top'
|
|
||||||
},
|
|
||||||
grid: {
|
|
||||||
show: true,
|
|
||||||
borderWidth: 1,
|
|
||||||
borderColor: '#aaa',
|
|
||||||
top: 30,
|
|
||||||
right: 45,
|
|
||||||
bottom: 70,
|
|
||||||
left: 30
|
|
||||||
},
|
|
||||||
dataZoom: [{
|
|
||||||
type: 'slider'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'slider',
|
|
||||||
show: true,
|
|
||||||
yAxisIndex: 0,
|
|
||||||
}],
|
|
||||||
xAxis: {
|
|
||||||
show: true,
|
|
||||||
name: 'Zeitpunkt Beobachtung',
|
|
||||||
type: 'category',
|
|
||||||
data: extractionDates,
|
|
||||||
splitArea: {
|
|
||||||
show: false
|
|
||||||
},
|
|
||||||
splitArea: {
|
|
||||||
show: false
|
|
||||||
},
|
|
||||||
axisLabel: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
axisTick: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
axisLine: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 10,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
show: true,
|
|
||||||
type: 'category',
|
|
||||||
data: {!! $diagramsOptions['heatmap']['yAxis']['data'] !!},
|
|
||||||
splitArea: {
|
|
||||||
show: false
|
|
||||||
},
|
|
||||||
axisTick: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
axisLine: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
axisLabel: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
name: 'Mietobjekte',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 10,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
visualMap: {
|
|
||||||
type: 'piecewise',
|
|
||||||
min: 0,
|
|
||||||
max: 100,
|
|
||||||
calculable: true,
|
|
||||||
orient: 'horizontal',
|
|
||||||
left: 'center',
|
|
||||||
top: 0,
|
|
||||||
formatter: (v1, v2) => {
|
|
||||||
return `${v1} – ${v2} %`;
|
|
||||||
},
|
|
||||||
inRange: {
|
|
||||||
color: sharedOptions.basic.color,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
series: [
|
|
||||||
{
|
|
||||||
name: 'Verfügbarkeit',
|
|
||||||
type: 'heatmap',
|
|
||||||
blurSize: 0,
|
|
||||||
data: {!! $diagramsOptions['heatmap']['series']['data'] !!},
|
|
||||||
label: {
|
|
||||||
show: false
|
|
||||||
},
|
|
||||||
tooltip: {
|
|
||||||
formatter: (data) => {
|
|
||||||
return `Kurzzeitmietobjekte-ID: ${data.data[1]}<br />Beobachtungszeitpunkt: ${data.data[0]}<br/>Verfügbarkeit: ${data.data[2].toFixed(2)} %`
|
|
||||||
},
|
|
||||||
},
|
|
||||||
emphasis: {
|
|
||||||
itemStyle: {
|
|
||||||
borderColor: '#000',
|
|
||||||
borderWidth: 2
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
cHeatmap.setOption(cHeatmapOptions);
|
|
||||||
|
|
||||||
const chartPropsPerRegion = document.getElementById('chart-props-per-region');
|
|
||||||
const cPropsPerRegion = echarts.init(chartPropsPerRegion);
|
|
||||||
const cPropsPerRegionOptions = {
|
|
||||||
grid: sharedOptions.basic.grid,
|
|
||||||
color: sharedOptions.basic.color,
|
|
||||||
xAxis: {
|
|
||||||
name: 'Region',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 30,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
type: 'category',
|
|
||||||
data: {!! $diagramsOptions['propertiesPerRegion']['xAxis']['data'] !!}
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
type: 'value',
|
|
||||||
name: 'Anzahl Mietobjekte',
|
|
||||||
nameLocation: 'middle',
|
|
||||||
nameGap: 50,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
series: [
|
|
||||||
{
|
|
||||||
data: {!! $diagramsOptions['propertiesPerRegion']['yAxis']['data'] !!},
|
|
||||||
type: 'bar',
|
|
||||||
itemStyle: {
|
|
||||||
color: (e) => {
|
|
||||||
return sharedOptions.basic.color[e.dataIndex];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
cPropsPerRegion.setOption(cPropsPerRegionOptions);
|
|
||||||
|
|
||||||
const chartExtractions = document.getElementById('extractions');
|
|
||||||
const cExtractions = echarts.init(chartExtractions);
|
|
||||||
|
|
||||||
const cExtractionsOptions = {
|
|
||||||
color: sharedOptions.basic.color,
|
|
||||||
tooltip: {
|
|
||||||
trigger: 'axis'
|
|
||||||
},
|
|
||||||
legend: {
|
|
||||||
show: true
|
|
||||||
},
|
|
||||||
grid: sharedOptions.basic.grid,
|
|
||||||
xAxis: {
|
|
||||||
name: 'Zeitpunkt Beobachtung',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 24,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
type: 'category',
|
|
||||||
boundaryGap: false,
|
|
||||||
data: extractionDates
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
name: 'Anzahl Mietobjekte',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 50,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
type: 'value'
|
|
||||||
},
|
|
||||||
series: [
|
|
||||||
{
|
|
||||||
name: 'Alle',
|
|
||||||
type: 'line',
|
|
||||||
stack: 'Total',
|
|
||||||
data: {!! json_encode($diagramsOptions['extractions']['series']['total_all']) !!},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connectNulls: true,
|
|
||||||
name: 'Davos',
|
|
||||||
type: 'line',
|
|
||||||
data: {!! json_encode($diagramsOptions['extractions']['series']['total_davos']) !!}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connectNulls: true,
|
|
||||||
name: 'Engadin',
|
|
||||||
type: 'line',
|
|
||||||
data: {!! json_encode($diagramsOptions['extractions']['series']['total_engadin']) !!}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connectNulls: true,
|
|
||||||
name: 'Heidiland',
|
|
||||||
type: 'line',
|
|
||||||
data: {!! json_encode($diagramsOptions['extractions']['series']['total_heidiland']) !!}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
connectNulls: true,
|
|
||||||
name: 'St. Moritz',
|
|
||||||
type: 'line',
|
|
||||||
data: {!! json_encode($diagramsOptions['extractions']['series']['total_stmoritz']) !!}
|
|
||||||
},
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
cExtractions.setOption(cExtractionsOptions);
|
|
||||||
|
|
||||||
const map = L.map('leaflet');
|
|
||||||
|
|
||||||
L.tileLayer('https://tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
|
||||||
maxZoom: 19,
|
|
||||||
attribution: '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>'
|
|
||||||
}).addTo(map);
|
|
||||||
|
|
||||||
function icon(id){
|
|
||||||
return L.divIcon({
|
|
||||||
className: "region"+id,
|
|
||||||
html: '<span></span>'
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const markers = L.featureGroup([
|
|
||||||
@foreach($geo as $g)
|
|
||||||
L.marker([{{ $g['latlng'] }}], {icon: icon({{ $g['region_id'] }})}).bindPopup('<a href="/property/{{ $g['property_id'] }}">{{ $g['latlng'] }}</a>'),
|
|
||||||
@endforeach
|
|
||||||
]).addTo(map);
|
|
||||||
|
|
||||||
map.fitBounds(markers.getBounds(), {padding: [20,20]})
|
|
||||||
|
|
||||||
cHeatmap.on('click', 'series', (e) => {
|
|
||||||
window.open(`/property/${e.value[1]}?date=${e.value[0]}`, '_self');
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
</script>
|
|
||||||
@endsection
|
|
|
@ -1,27 +0,0 @@
|
||||||
@extends('base')
|
|
||||||
@section('body-class', 'property')
|
|
||||||
@section('header')
|
|
||||||
<nav>
|
|
||||||
<strong>Property: {{ $base['check_data'] }} ({{ $base['region_name'] }})</strong>
|
|
||||||
<ul>
|
|
||||||
<li><a href="/">Start</a></li>
|
|
||||||
@foreach($regions as $r)
|
|
||||||
<li><a href="/region/{{ $r['id'] }}">{{ $r['name'] }}</a></li>
|
|
||||||
@endforeach
|
|
||||||
</ul>
|
|
||||||
</nav>
|
|
||||||
<button popovertarget="prop-details"></button>
|
|
||||||
<div popover id="prop-details">
|
|
||||||
<dl>
|
|
||||||
<dt>Region</dt>
|
|
||||||
<dd>{{ $base['region_name'] }}</dd>
|
|
||||||
<dt>Zum ersten mal gefunden</dt>
|
|
||||||
<dd>{{ $base['first_found'] }}</dd>
|
|
||||||
<dt>Zum letzten mal gefunden</dt>
|
|
||||||
<dd>{{ $base['last_found'] }}</dd>
|
|
||||||
</dl>
|
|
||||||
</div>
|
|
||||||
@endsection
|
|
||||||
@section('main')
|
|
||||||
<p>Für dieses Mietobjekt sind keine Daten vorhanden.</p>
|
|
||||||
@endsection
|
|
|
@ -1,463 +0,0 @@
|
||||||
@extends('base')
|
|
||||||
@section('body-class', 'property')
|
|
||||||
@section('header')
|
|
||||||
<nav>
|
|
||||||
<strong>Mietobjekt: {{ $base['latlng'] }} ({{ $base['region_name'] }})</strong>
|
|
||||||
<ul>
|
|
||||||
<li><a href="/">Start</a></li>
|
|
||||||
@foreach($regions as $r)
|
|
||||||
<li><a href="/region/{{ $r['id'] }}">{{ $r['name'] }}</a></li>
|
|
||||||
@endforeach
|
|
||||||
</ul>
|
|
||||||
</nav>
|
|
||||||
<button popovertarget="prop-details"></button>
|
|
||||||
<div popover id="prop-details">
|
|
||||||
<dl>
|
|
||||||
<dt>Region</dt>
|
|
||||||
<dd>{{ $base['region_name'] }}</dd>
|
|
||||||
<dt>Zum ersten mal gefunden</dt>
|
|
||||||
<dd>{{ $base['first_found'] }}</dd>
|
|
||||||
<dt>Zum letzten mal gefunden</dt>
|
|
||||||
<dd>{{ $base['last_found'] }}</dd>
|
|
||||||
</dl>
|
|
||||||
</div>
|
|
||||||
@endsection
|
|
||||||
|
|
||||||
@section('main')
|
|
||||||
<article style="grid-area: timeline;">
|
|
||||||
<div id="timeline"></div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart2;">
|
|
||||||
<header>
|
|
||||||
<h2 id="belegung-title">
|
|
||||||
Kalenderansicht der Verfügbarkeit am <span class="date">{{ $startDate }}</span>
|
|
||||||
</h2><button popovertarget="popup-cal"></button>
|
|
||||||
<div popover id="popup-cal">
|
|
||||||
<p>
|
|
||||||
Das Kalenderdiagram zeigt die drei Verfügbarkeitskategorien des Mietobjekts.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
<div id="chart-calendar"></div>
|
|
||||||
</article>
|
|
||||||
<article class="header map" style="grid-area: chart5;">
|
|
||||||
<header>
|
|
||||||
<h2 id="belegung-title">
|
|
||||||
Kurzzeitmietobjekte in der Nähe
|
|
||||||
</h2>
|
|
||||||
</header>
|
|
||||||
<div id="chart-map"></div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart3;">
|
|
||||||
<header>
|
|
||||||
<h2>
|
|
||||||
Verfügbarkeit Mietobjekt Monate am <span class="date">{{ $startDate }}</span>
|
|
||||||
</h2>
|
|
||||||
</header>
|
|
||||||
<div id="chart-capacity-monthly">
|
|
||||||
</div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart1;">
|
|
||||||
<header>
|
|
||||||
<h2>
|
|
||||||
Entwicklung der Verfügbarkeit
|
|
||||||
</h2>
|
|
||||||
<button popovertarget="chart-capacity-popover"></button>
|
|
||||||
<div id="chart-capacity-popover" popover>
|
|
||||||
<h2>Erkläung zum Diagramm</h2>
|
|
||||||
<p>Das Liniendiagramm zeigt, wie sich die insgesamte Verfügbarkeit des Kurzzeitmietobjekts entwickelt hat.</p>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
<div id="chart-capacity"></div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart4;">
|
|
||||||
<header>
|
|
||||||
<h2>
|
|
||||||
Verfügbarkeit Mietobjekt Tage am <span class="date">{{ $startDate }}</span>
|
|
||||||
</h2>
|
|
||||||
</header>
|
|
||||||
<div id="chart-capacity-daily">
|
|
||||||
</article>
|
|
||||||
<script type="module">
|
|
||||||
|
|
||||||
const sharedOptions = {
|
|
||||||
extractiondates: {!! $diagramsOptions['shared']['extractiondates']!!},
|
|
||||||
basic: {
|
|
||||||
color: {!!$diagramsOptions['shared']['colors']!!},
|
|
||||||
grid: {
|
|
||||||
top: 20,
|
|
||||||
left: 60,
|
|
||||||
right: 0,
|
|
||||||
bottom: 50
|
|
||||||
},
|
|
||||||
tooltip: {
|
|
||||||
show: true,
|
|
||||||
trigger: 'axis',
|
|
||||||
valueFormatter: (value) => value.toFixed(2) + ' %'
|
|
||||||
},
|
|
||||||
name: (opt) => {
|
|
||||||
return {
|
|
||||||
name: opt.name,
|
|
||||||
nameLocation: opt.location,
|
|
||||||
nameGap: 24,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const chartTimeline = document.getElementById('timeline');
|
|
||||||
const cTimeline = echarts.init(chartTimeline);
|
|
||||||
const cTimelineOptions = {
|
|
||||||
grid: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
timeline: {
|
|
||||||
data: sharedOptions.extractiondates,
|
|
||||||
playInterval: 1000,
|
|
||||||
axisType: 'time',
|
|
||||||
left: 8,
|
|
||||||
right: 8,
|
|
||||||
bottom: 0,
|
|
||||||
label: {
|
|
||||||
show: false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
cTimeline.setOption(cTimelineOptions);
|
|
||||||
|
|
||||||
const chartCapacityMonthly = document.getElementById('chart-capacity-monthly');
|
|
||||||
const cCapacityMonthly = echarts.init(chartCapacityMonthly);
|
|
||||||
|
|
||||||
const cCapacityMonthlyOptions = {
|
|
||||||
tooltip: sharedOptions.basic.tooltip,
|
|
||||||
timeline: {
|
|
||||||
show: false,
|
|
||||||
data: sharedOptions.extractiondates,
|
|
||||||
axisType: 'time',
|
|
||||||
},
|
|
||||||
grid: {
|
|
||||||
top: 5,
|
|
||||||
bottom: 40,
|
|
||||||
left: 70,
|
|
||||||
right: 10
|
|
||||||
},
|
|
||||||
xAxis: {
|
|
||||||
type: 'value',
|
|
||||||
max: 100,
|
|
||||||
name: 'Verfügbarkeit in %',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 25,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
type: 'category',
|
|
||||||
},
|
|
||||||
options: [
|
|
||||||
@foreach ($diagramsOptions['capacityMonthly']['options'] as $cM)
|
|
||||||
{
|
|
||||||
yAxis: {
|
|
||||||
data: {!! json_encode($cM['months']) !!}
|
|
||||||
},
|
|
||||||
series: [{
|
|
||||||
type: 'bar',
|
|
||||||
itemStyle: {
|
|
||||||
color: sharedOptions.basic.color[3]
|
|
||||||
},
|
|
||||||
data: {!! json_encode($cM['capacities']) !!}
|
|
||||||
}]
|
|
||||||
},
|
|
||||||
@endforeach
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
cCapacityMonthly.setOption(cCapacityMonthlyOptions);
|
|
||||||
|
|
||||||
const chartCapacityDaily = document.getElementById('chart-capacity-daily');
|
|
||||||
const cCapacityDaily = echarts.init(chartCapacityDaily);
|
|
||||||
|
|
||||||
const cCapacityDailyOptions = {
|
|
||||||
tooltip: sharedOptions.basic.tooltip,
|
|
||||||
timeline: {
|
|
||||||
show: false,
|
|
||||||
data: sharedOptions.extractiondates,
|
|
||||||
axisType: 'time',
|
|
||||||
},
|
|
||||||
grid: {
|
|
||||||
top: 5,
|
|
||||||
bottom: 40,
|
|
||||||
left: 70,
|
|
||||||
right: 10
|
|
||||||
},
|
|
||||||
xAxis: {
|
|
||||||
type: 'value',
|
|
||||||
max: 100,
|
|
||||||
name: 'Verfügbarkeit in %',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 25,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
type: 'category',
|
|
||||||
},
|
|
||||||
options: [
|
|
||||||
@foreach ($diagramsOptions['capacityDaily']['options'] as $cD)
|
|
||||||
{
|
|
||||||
yAxis: {
|
|
||||||
data: {!! json_encode($cD['weekdays']) !!}
|
|
||||||
},
|
|
||||||
series: [{
|
|
||||||
type: 'bar',
|
|
||||||
itemStyle: {
|
|
||||||
color: sharedOptions.basic.color[3]
|
|
||||||
},
|
|
||||||
data: {!! json_encode($cD['capacities']) !!}
|
|
||||||
}]
|
|
||||||
},
|
|
||||||
@endforeach
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
cCapacityDaily.setOption(cCapacityDailyOptions);
|
|
||||||
|
|
||||||
const chartCapacity = document.getElementById('chart-capacity');
|
|
||||||
const cCapacity = echarts.init(chartCapacity);
|
|
||||||
|
|
||||||
const cCapacityOptions = {
|
|
||||||
color: sharedOptions.basic.color,
|
|
||||||
legend: {
|
|
||||||
show: true
|
|
||||||
},
|
|
||||||
tooltip: {
|
|
||||||
trigger: 'axis',
|
|
||||||
valueFormatter: (value) => value.toFixed(2)+' %'
|
|
||||||
},
|
|
||||||
grid: {
|
|
||||||
top: 40,
|
|
||||||
left: 25,
|
|
||||||
right: 10,
|
|
||||||
bottom: 20,
|
|
||||||
containLabel: true
|
|
||||||
},
|
|
||||||
xAxis: {
|
|
||||||
type: 'category',
|
|
||||||
boundaryGap: false,
|
|
||||||
data: {!! $diagramsOptions['capacities']['xAxis']['data'] !!},
|
|
||||||
name: 'Zeitpunkt Beobachtung',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 24,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
type: 'value',
|
|
||||||
min: 0,
|
|
||||||
max: 100,
|
|
||||||
name: 'Verfügbarkeit in %',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 38,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
series: [
|
|
||||||
{
|
|
||||||
name: 'Verfügbarkeit Mietobjekt',
|
|
||||||
type: 'line',
|
|
||||||
symbolSize: 7,
|
|
||||||
data: {!! $diagramsOptions['capacities']["series"][0]["data"] !!}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Verfügbarkeit {{ $base['region_name'] }}',
|
|
||||||
type: 'line',
|
|
||||||
symbolSize: 7,
|
|
||||||
data: {!! $diagramsOptions['capacities']["series"][1]["data"] !!}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Verfügbarkeit alle Regionen',
|
|
||||||
type: 'line',
|
|
||||||
symbolSize: 7,
|
|
||||||
data: {!! $diagramsOptions['capacities']["series"][2]["data"] !!}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
cCapacity.setOption(cCapacityOptions);
|
|
||||||
|
|
||||||
const chartCalendar = document.getElementById('chart-calendar');
|
|
||||||
const cCalendar = echarts.init(chartCalendar);
|
|
||||||
const h2Belegung = document.getElementById('belegung-title');
|
|
||||||
|
|
||||||
const cCalendarOptions = {
|
|
||||||
timeline: {
|
|
||||||
show: false,
|
|
||||||
data: sharedOptions.extractiondates,
|
|
||||||
axisType: 'time',
|
|
||||||
},
|
|
||||||
visualMap: {
|
|
||||||
categories: [0,1,2],
|
|
||||||
inRange: {
|
|
||||||
color: ['#ca0020', '#92c5de', '#0571b0']
|
|
||||||
},
|
|
||||||
formatter: (cat) => {
|
|
||||||
switch (cat) {
|
|
||||||
case 0:
|
|
||||||
return 'Ausgebucht';
|
|
||||||
case 1:
|
|
||||||
return 'Verfügbar (kein Anreisetag)';
|
|
||||||
case 2:
|
|
||||||
return 'Verfügbar';
|
|
||||||
}
|
|
||||||
},
|
|
||||||
type: 'piecewise',
|
|
||||||
orient: 'horizontal',
|
|
||||||
left: 'center',
|
|
||||||
top: 0
|
|
||||||
},
|
|
||||||
calendar:[
|
|
||||||
{
|
|
||||||
orient: 'horizontal',
|
|
||||||
range: '2024',
|
|
||||||
top: '15%',
|
|
||||||
right: 10,
|
|
||||||
bottom: '65%',
|
|
||||||
left: 50,
|
|
||||||
dayLabel: {
|
|
||||||
fontSize: 10
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
orient: 'horizontal',
|
|
||||||
range: '2025',
|
|
||||||
top: '47%',
|
|
||||||
right: 10,
|
|
||||||
bottom: '33%',
|
|
||||||
left: 50,
|
|
||||||
dayLabel: {
|
|
||||||
fontSize: 10
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
orient: 'horizontal',
|
|
||||||
range: '2026',
|
|
||||||
top: '79%',
|
|
||||||
right: 10,
|
|
||||||
bottom: '1%',
|
|
||||||
left: 50,
|
|
||||||
dayLabel: {
|
|
||||||
fontSize: 10
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
options: [
|
|
||||||
@foreach ($diagramsOptions['calendar']['series'] as $c)
|
|
||||||
{
|
|
||||||
series: [{
|
|
||||||
type: 'heatmap',
|
|
||||||
coordinateSystem: 'calendar',
|
|
||||||
calendarIndex: 0,
|
|
||||||
data: {!! json_encode($c) !!}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'heatmap',
|
|
||||||
coordinateSystem: 'calendar',
|
|
||||||
calendarIndex: 1,
|
|
||||||
data: {!! json_encode($c) !!}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'heatmap',
|
|
||||||
coordinateSystem: 'calendar',
|
|
||||||
calendarIndex: 2,
|
|
||||||
data: {!! json_encode($c) !!}
|
|
||||||
}]
|
|
||||||
},
|
|
||||||
@endforeach
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
cCalendar.setOption(cCalendarOptions);
|
|
||||||
|
|
||||||
|
|
||||||
cTimeline.on('timelinechanged', (e) => {
|
|
||||||
|
|
||||||
let dateTitles = document.querySelectorAll('span.date');
|
|
||||||
dateTitles.forEach(el => {
|
|
||||||
el.innerText = cTimelineOptions.timeline.data[e.currentIndex];
|
|
||||||
});
|
|
||||||
|
|
||||||
// Set markpoint on linechart
|
|
||||||
let x = cCapacityOptions.xAxis.data[e.currentIndex];
|
|
||||||
let y = cCapacityOptions.series[0].data[e.currentIndex];
|
|
||||||
|
|
||||||
cCapacityMonthly.dispatchAction({
|
|
||||||
type: 'timelineChange',
|
|
||||||
currentIndex: e.currentIndex
|
|
||||||
});
|
|
||||||
|
|
||||||
cCapacityDaily.dispatchAction({
|
|
||||||
type: 'timelineChange',
|
|
||||||
currentIndex: e.currentIndex
|
|
||||||
});
|
|
||||||
|
|
||||||
cCalendar.dispatchAction({
|
|
||||||
type: 'timelineChange',
|
|
||||||
currentIndex: e.currentIndex
|
|
||||||
});
|
|
||||||
|
|
||||||
cCapacity.setOption({
|
|
||||||
series: {
|
|
||||||
markPoint: {
|
|
||||||
data: [{
|
|
||||||
coord: [x, y]
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
})
|
|
||||||
|
|
||||||
/* Map w/ neighbours*/
|
|
||||||
const map = L.map('chart-map');
|
|
||||||
|
|
||||||
L.tileLayer('https://tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
|
||||||
maxZoom: 19,
|
|
||||||
attribution: '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>'
|
|
||||||
}).addTo(map);
|
|
||||||
|
|
||||||
function icon(id = 0){
|
|
||||||
return L.divIcon({
|
|
||||||
className: "region"+id,
|
|
||||||
html: '<span></span>'
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const markers = L.featureGroup([
|
|
||||||
L.marker([{{ $base['latlng'] }}], {icon: icon(1)}),
|
|
||||||
@foreach($neighbours as $n)
|
|
||||||
L.marker([{{ $n['lat'] }}, {{ $n['lon'] }}], {icon: icon()}).bindPopup('<a href="/property/{{ $n['id'] }}">{{ $n['lat'] }}, {{ $n['lon'] }}</a>'),
|
|
||||||
@endforeach
|
|
||||||
]).addTo(map);
|
|
||||||
|
|
||||||
map.fitBounds(markers.getBounds(), {padding: [20,20]})
|
|
||||||
|
|
||||||
cCapacity.on('click', 'series', (e) => {
|
|
||||||
|
|
||||||
// Switch to correct calendar in the timeline
|
|
||||||
cTimeline.dispatchAction({
|
|
||||||
type: 'timelineChange',
|
|
||||||
currentIndex: e.dataIndex
|
|
||||||
});
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
</script>
|
|
||||||
@endsection
|
|
|
@ -1,583 +0,0 @@
|
||||||
@extends('base')
|
|
||||||
@section('body-class', 'region')
|
|
||||||
@section('header')
|
|
||||||
<nav>
|
|
||||||
<strong>{{ $region['name'] }}</strong>
|
|
||||||
<ul>
|
|
||||||
<li><a href="/">Start</a></li>
|
|
||||||
@foreach($regions as $r)
|
|
||||||
@if($r['id'] != $regionId)
|
|
||||||
<li><a href="/region/{{ $r['id'] }}">{{ $r['name'] }}</a></li>
|
|
||||||
@endif
|
|
||||||
@endforeach
|
|
||||||
</ul>
|
|
||||||
</nav>
|
|
||||||
@endsection
|
|
||||||
@section('main')
|
|
||||||
<article style="grid-area: timeline;">
|
|
||||||
<div id="timeline"></div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart6;">
|
|
||||||
<header>
|
|
||||||
<h2 id="prediction-title">Gleitender Mittelwert für die Verfügbarkeit der Region</h2>
|
|
||||||
<button popovertarget="chart-prediction-popover"></button>
|
|
||||||
<div id="chart-prediction-popover" popover>
|
|
||||||
<h2>Gleitender Mittelwert für die Verfügbarkeit der Region</h2>
|
|
||||||
<p>Das Diagramm...</p>
|
|
||||||
<ul>
|
|
||||||
<li>X-Achse: Zeitpunkt der Beobachtung</li>
|
|
||||||
<li>Y-Achse: Verfügbarkeit einer Region. 0% = Alle Mietobjekte der Region sind komplett ausgebucht; 100% = Alle Mietobjekte der Region können zu allen verfügbaren Daten gebucht werden. </li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
<div id="chart-prediction"></div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart1;">
|
|
||||||
<header>
|
|
||||||
<h2 id="belegung-title">Verfügbarkeit aller Mietobjekte der Region über gesamten beobachteten Zeitraum</h2>
|
|
||||||
<button popovertarget="popup-heat"></button><div popover id="popup-heat">
|
|
||||||
<h2>Verfügbarkeit aller Mietobjekte der Region über gesamten beobachteten Zeitraum</h2>
|
|
||||||
<p>
|
|
||||||
Das Diagramm zeigt die Verfügbarkeit aller Mietobjekte der Region zu allen beobachteten Zeitpunkten.
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>X-Achse: Zeitpunkt Beobachtung.</li>
|
|
||||||
<li>Y-Achse: Mietobjekte.</li>
|
|
||||||
<li>Kategorien: 0% = Das Mietobjekt ist komplett Ausgebucht; 100% = Das Mietobjekt kann zu allen Verfügbaren Daten gebucht werden.</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
<h3>Berrechnung Verfügbarkeit</h3>
|
|
||||||
<p>Die Verfügbarkeit eines Mietobjekt errechnet sich folgendermassen:</p>
|
|
||||||
<p class="formula">
|
|
||||||
Verfügbarkeit = (100 / (Anzahl Buchungsdaten * 2)) * Summe Status
|
|
||||||
</p>
|
|
||||||
<ul>
|
|
||||||
<li>Status: Jeder verfügbare Kalendertag kann den Status «Nicht Verfügbar» (0), «Verfügbar (kein Anreisetag)» (1) oder «Verfügbar» (2) aufweisen.</li>
|
|
||||||
<li>Anzahl Buchungsdaten: Die Summe aller angebotenen Buchungsdaten mit zwei multipliziert (= Alle Buchungdaten haben den Status «Verfügbar»)</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
</header>
|
|
||||||
<div id="chart-heatmap"></div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart3;">
|
|
||||||
<header>
|
|
||||||
<h2>
|
|
||||||
Verfügbarkeit nach Monat am <span class="date">{{ $startDate }}</span>
|
|
||||||
</h2>
|
|
||||||
</header>
|
|
||||||
<div id="chart-capacity-monthly">
|
|
||||||
</div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart2;">
|
|
||||||
<header>
|
|
||||||
<h2>
|
|
||||||
Entwicklung der Verfügbarkeit
|
|
||||||
</h2>
|
|
||||||
<button popovertarget="chart-capacity-popover"></button>
|
|
||||||
<div id="chart-capacity-popover" popover>
|
|
||||||
<h2>Entwicklung der Verfügbarkeit</h2>
|
|
||||||
<p>Das Liniendiagramm zeigt die Entwicklung Verfügbarkeit der Region im Vergleich zu allen Regionen an.</p>
|
|
||||||
<ul>
|
|
||||||
<li>X-Achse: Zeitpunkt der Beobachtung</li>
|
|
||||||
<li>Y-Achse: Verfügbarkeit einer Region. 0% = Alle Mietobjekte der Region sind komplett ausgebucht; 100% = Alle Mietobjekte der Region können zu allen verfügbaren Daten gebucht werden. </li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
<div id="chart-capacity"></div>
|
|
||||||
</article>
|
|
||||||
<article class="header" style="grid-area: chart4;">
|
|
||||||
<header>
|
|
||||||
<h2>
|
|
||||||
Verfügbarkeit nach Wochentage am <span class="date">{{ $startDate }}</span>
|
|
||||||
</h2>
|
|
||||||
</header>
|
|
||||||
<div id="chart-capacity-daily">
|
|
||||||
</article>
|
|
||||||
|
|
||||||
<script type="module">
|
|
||||||
|
|
||||||
const sharedOptions = {
|
|
||||||
basic: {
|
|
||||||
color: {!! $diagramsOptions['shared']['colors'] !!},
|
|
||||||
grid: {
|
|
||||||
top: 20,
|
|
||||||
left: 60,
|
|
||||||
right: 0,
|
|
||||||
bottom: 50
|
|
||||||
},
|
|
||||||
tooltip: {
|
|
||||||
show: true,
|
|
||||||
trigger: 'axis',
|
|
||||||
valueFormatter: (value) => value == null ? 'N/A' : value.toFixed(2)+' %'
|
|
||||||
},
|
|
||||||
name: (opt) => {
|
|
||||||
return {
|
|
||||||
name: opt.name,
|
|
||||||
nameLocation: opt.location,
|
|
||||||
nameGap: 24,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const chartCapacity = document.getElementById('chart-capacity');
|
|
||||||
const cCapacity = echarts.init(chartCapacity);
|
|
||||||
|
|
||||||
const cCapacityOptions = {
|
|
||||||
legend: {
|
|
||||||
show: true
|
|
||||||
},
|
|
||||||
tooltip: sharedOptions.basic.tooltip,
|
|
||||||
color: sharedOptions.basic.color,
|
|
||||||
grid: {
|
|
||||||
top: 20,
|
|
||||||
left: 25,
|
|
||||||
right: 10,
|
|
||||||
bottom: 20,
|
|
||||||
containLabel: true
|
|
||||||
},
|
|
||||||
xAxis: {
|
|
||||||
type: 'category',
|
|
||||||
boundaryGap: false,
|
|
||||||
data: {!! $diagramsOptions['capacity']['xAxis']['data'] !!},
|
|
||||||
name: 'Zeitpunkt Beobachtung',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 24,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
type: 'value',
|
|
||||||
min: 0,
|
|
||||||
max: 100,
|
|
||||||
name: 'Verfügbarkeit in %',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 38,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
series: [{
|
|
||||||
name: 'Verfügbarkeit alle Regionen',
|
|
||||||
type: 'line',
|
|
||||||
symbolSize: 7,
|
|
||||||
data: {!! $diagramsOptions['capacity']['series']['all']['data'] !!}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Verfügbarkeit Region',
|
|
||||||
type: 'line',
|
|
||||||
symbolSize: 7,
|
|
||||||
data: {!! $diagramsOptions['capacity']['series']['region']['data'] !!}
|
|
||||||
}]
|
|
||||||
};
|
|
||||||
|
|
||||||
cCapacity.setOption(cCapacityOptions);
|
|
||||||
|
|
||||||
const chartCapacityMonthly = document.getElementById('chart-capacity-monthly');
|
|
||||||
const cCapacityMonthly = echarts.init(chartCapacityMonthly);
|
|
||||||
|
|
||||||
const cCapacityMonthlyOptions = {
|
|
||||||
timeline: {
|
|
||||||
show: false,
|
|
||||||
data: {!! $diagramsOptions['capacity']['xAxis']['data'] !!},
|
|
||||||
axisType: 'time',
|
|
||||||
},
|
|
||||||
grid: {
|
|
||||||
top: 5,
|
|
||||||
bottom: 40,
|
|
||||||
left: 70,
|
|
||||||
right: 10
|
|
||||||
},
|
|
||||||
xAxis: {
|
|
||||||
type: 'value',
|
|
||||||
max: 100,
|
|
||||||
name: 'Verfügbarkeit in %',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 25,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
type: 'category',
|
|
||||||
},
|
|
||||||
tooltip: sharedOptions.basic.tooltip,
|
|
||||||
options: [
|
|
||||||
@foreach ($diagramsOptions['capacityMonthly']['options'] as $m)
|
|
||||||
{
|
|
||||||
yAxis: {
|
|
||||||
data: {!! json_encode($m['months']) !!}
|
|
||||||
},
|
|
||||||
series: [{
|
|
||||||
type: 'bar',
|
|
||||||
itemStyle: {
|
|
||||||
color: sharedOptions.basic.color[3]
|
|
||||||
},
|
|
||||||
data: {!! json_encode($m['capacities']) !!}
|
|
||||||
}]
|
|
||||||
},
|
|
||||||
@endforeach
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
cCapacityMonthly.setOption(cCapacityMonthlyOptions);
|
|
||||||
|
|
||||||
const chartCapacityDaily = document.getElementById('chart-capacity-daily');
|
|
||||||
const cCapacityDaily = echarts.init(chartCapacityDaily);
|
|
||||||
|
|
||||||
const cCapacityDailyOptions = {
|
|
||||||
timeline: {
|
|
||||||
show: false,
|
|
||||||
data: {!! $diagramsOptions['capacity']['xAxis']['data'] !!},
|
|
||||||
axisType: 'time',
|
|
||||||
},
|
|
||||||
tooltip: sharedOptions.basic.tooltip,
|
|
||||||
grid: {
|
|
||||||
top: 5,
|
|
||||||
bottom: 40,
|
|
||||||
left: 70,
|
|
||||||
right: 10
|
|
||||||
},
|
|
||||||
xAxis: {
|
|
||||||
type: 'value',
|
|
||||||
max: 100,
|
|
||||||
name: 'Verfügbarkeit in %',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 25,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
type: 'category',
|
|
||||||
},
|
|
||||||
options: [
|
|
||||||
@foreach ($diagramsOptions['capacityDaily']['options'] as $d)
|
|
||||||
{
|
|
||||||
yAxis: {
|
|
||||||
data: {!! json_encode($d['weekdays']) !!}
|
|
||||||
},
|
|
||||||
series: [{
|
|
||||||
type: 'bar',
|
|
||||||
itemStyle: {
|
|
||||||
color: sharedOptions.basic.color[3]
|
|
||||||
},
|
|
||||||
data: {!! json_encode($d['capacities']) !!}
|
|
||||||
}]
|
|
||||||
},
|
|
||||||
@endforeach
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
cCapacityDaily.setOption(cCapacityDailyOptions);
|
|
||||||
|
|
||||||
const chartPrediction = document.getElementById('chart-prediction');
|
|
||||||
const cPrediction = echarts.init(chartPrediction);
|
|
||||||
|
|
||||||
const cPredictionOptions = {
|
|
||||||
color: [sharedOptions.basic.color[0], sharedOptions.basic.color[4], sharedOptions.basic.color[5]],
|
|
||||||
timeline: {
|
|
||||||
show: false,
|
|
||||||
data: {!! $diagramsOptions['capacity']['xAxis']['data'] !!},
|
|
||||||
axisType: 'time',
|
|
||||||
replaceMerge: ['graphic', 'series']
|
|
||||||
},
|
|
||||||
legend: {
|
|
||||||
show: true
|
|
||||||
},
|
|
||||||
tooltip: sharedOptions.basic.tooltip,
|
|
||||||
grid: {
|
|
||||||
top: 20,
|
|
||||||
left: 25,
|
|
||||||
right: 10,
|
|
||||||
bottom: 20,
|
|
||||||
containLabel: true
|
|
||||||
},
|
|
||||||
xAxis: {
|
|
||||||
type: 'category',
|
|
||||||
boundaryGap: false,
|
|
||||||
name: 'Zeitpunkt Beobachtung',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 24,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
type: 'value',
|
|
||||||
min: 0,
|
|
||||||
max: 100,
|
|
||||||
name: 'Verfügbarkeit in %',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 38,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
options: [
|
|
||||||
@foreach ($diagramsOptions['predictions']['options'] as $p)
|
|
||||||
@if($p === null)
|
|
||||||
{
|
|
||||||
graphic: {
|
|
||||||
elements: [
|
|
||||||
{
|
|
||||||
type: 'text',
|
|
||||||
left: 'center',
|
|
||||||
top: 'center',
|
|
||||||
style: {
|
|
||||||
text: 'Keine Daten für Zeitspanne',
|
|
||||||
fontSize: 44,
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
@else
|
|
||||||
{
|
|
||||||
color: sharedOptions.basic.color,
|
|
||||||
graphic: {
|
|
||||||
elements: []
|
|
||||||
},
|
|
||||||
xAxis: {
|
|
||||||
data: {!! json_encode($p['dates']) !!}
|
|
||||||
},
|
|
||||||
series: [
|
|
||||||
{
|
|
||||||
name: 'Gleitender Mittelwert',
|
|
||||||
showSymbol: false,
|
|
||||||
connectNulls: true,
|
|
||||||
type: 'line',
|
|
||||||
symbolSize: 7,
|
|
||||||
data: {!! json_encode($p['capacities_moving_average']) !!}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Ausgangsdaten',
|
|
||||||
showSymbol: false,
|
|
||||||
connectNulls: true,
|
|
||||||
type: 'line',
|
|
||||||
symbolSize: 7,
|
|
||||||
data: {!! json_encode($p['capacities_timeframe_before']) !!}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Vergleichsdaten',
|
|
||||||
showSymbol: false,
|
|
||||||
connectNulls: true,
|
|
||||||
type: 'line',
|
|
||||||
symbolSize: 7,
|
|
||||||
data: {!! json_encode($p['capacities_timeframe_after']) !!}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
@endif
|
|
||||||
@endforeach
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
cPrediction.setOption(cPredictionOptions);
|
|
||||||
|
|
||||||
const chartHeatmap = document.getElementById('chart-heatmap');
|
|
||||||
const cHeatmap = echarts.init(chartHeatmap);
|
|
||||||
const cHeatmapOptions = {
|
|
||||||
animation: false,
|
|
||||||
tooltip: {
|
|
||||||
position: 'top'
|
|
||||||
},
|
|
||||||
grid: {
|
|
||||||
show: true,
|
|
||||||
borderWidth: 1,
|
|
||||||
borderColor: '#aaa',
|
|
||||||
top: 30,
|
|
||||||
right: 45,
|
|
||||||
bottom: 70,
|
|
||||||
left: 30
|
|
||||||
},
|
|
||||||
dataZoom: [{
|
|
||||||
type: 'slider'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: 'slider',
|
|
||||||
show: true,
|
|
||||||
yAxisIndex: 0,
|
|
||||||
}],
|
|
||||||
xAxis: {
|
|
||||||
show: true,
|
|
||||||
name: 'Zeitpunkt Beobachtung',
|
|
||||||
type: 'category',
|
|
||||||
data: {!! $diagramsOptions['heatmap']['xAxis']['data'] !!},
|
|
||||||
splitArea: {
|
|
||||||
show: false
|
|
||||||
},
|
|
||||||
splitArea: {
|
|
||||||
show: false
|
|
||||||
},
|
|
||||||
axisLabel: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
axisTick: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
axisLine: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 10,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
yAxis: {
|
|
||||||
show: true,
|
|
||||||
type: 'category',
|
|
||||||
data: {!! $diagramsOptions['heatmap']['yAxis']['data'] !!},
|
|
||||||
splitArea: {
|
|
||||||
show: false
|
|
||||||
},
|
|
||||||
axisTick: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
axisLine: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
axisLabel: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
name: 'Mietobjekte',
|
|
||||||
nameLocation: 'center',
|
|
||||||
nameGap: 10,
|
|
||||||
nameTextStyle: {
|
|
||||||
fontWeight: 'bold',
|
|
||||||
}
|
|
||||||
},
|
|
||||||
visualMap: {
|
|
||||||
type: 'piecewise',
|
|
||||||
min: 0,
|
|
||||||
max: 100,
|
|
||||||
calculable: true,
|
|
||||||
orient: 'horizontal',
|
|
||||||
left: 'center',
|
|
||||||
top: 0,
|
|
||||||
formatter: (v1, v2) => {
|
|
||||||
return `${v1} – ${v2} %`;
|
|
||||||
},
|
|
||||||
inRange: {
|
|
||||||
color: sharedOptions.basic.color,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
series: [
|
|
||||||
{
|
|
||||||
name: 'Verfügbarkeit',
|
|
||||||
type: 'heatmap',
|
|
||||||
blurSize: 0,
|
|
||||||
data: {!! $diagramsOptions['heatmap']['series']['data'] !!},
|
|
||||||
label: {
|
|
||||||
show: false
|
|
||||||
},
|
|
||||||
tooltip: {
|
|
||||||
formatter: (data) => {
|
|
||||||
return `Kurzzeitmietobjekte-ID: ${data.data[1]}<br />Beobachtungszeitpunkt: ${data.data[0]}<br/>Verfügbarkeit: ${data.data[2].toFixed(2)} %`
|
|
||||||
},
|
|
||||||
},
|
|
||||||
emphasis: {
|
|
||||||
itemStyle: {
|
|
||||||
borderColor: '#000',
|
|
||||||
borderWidth: 2
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
cHeatmap.setOption(cHeatmapOptions);
|
|
||||||
|
|
||||||
const chartTimeline = document.getElementById('timeline');
|
|
||||||
const cTimeline = echarts.init(chartTimeline);
|
|
||||||
|
|
||||||
const cTimelineOptions = {
|
|
||||||
grid: {
|
|
||||||
show: false,
|
|
||||||
},
|
|
||||||
timeline: {
|
|
||||||
data: {!! $diagramsOptions['capacity']['xAxis']['data'] !!},
|
|
||||||
playInterval: 2000,
|
|
||||||
axisType: 'time',
|
|
||||||
left: 8,
|
|
||||||
right: 8,
|
|
||||||
bottom: 0,
|
|
||||||
label: {
|
|
||||||
show: false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
cTimeline.setOption(cTimelineOptions);
|
|
||||||
|
|
||||||
cTimeline.on('timelinechanged', (e) => {
|
|
||||||
|
|
||||||
let dateTitles = document.querySelectorAll('span.date');
|
|
||||||
dateTitles.forEach(el => {
|
|
||||||
el.innerText = cTimelineOptions.timeline.data[e.currentIndex];
|
|
||||||
});
|
|
||||||
|
|
||||||
// Set markpoint on linechart
|
|
||||||
let x = cCapacityOptions.xAxis.data[e.currentIndex];
|
|
||||||
let y = cCapacityOptions.series[0].data[e.currentIndex];
|
|
||||||
|
|
||||||
cCapacityMonthly.dispatchAction({
|
|
||||||
type: 'timelineChange',
|
|
||||||
currentIndex: e.currentIndex
|
|
||||||
});
|
|
||||||
|
|
||||||
cCapacityDaily.dispatchAction({
|
|
||||||
type: 'timelineChange',
|
|
||||||
currentIndex: e.currentIndex
|
|
||||||
});
|
|
||||||
|
|
||||||
cPrediction.dispatchAction({
|
|
||||||
type: 'timelineChange',
|
|
||||||
currentIndex: e.currentIndex
|
|
||||||
});
|
|
||||||
|
|
||||||
cCapacity.setOption({
|
|
||||||
series: {
|
|
||||||
markPoint: {
|
|
||||||
data: [{
|
|
||||||
coord: [x, y]
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
document.querySelector('header').addEventListener('click', () => {
|
|
||||||
console.log('test');
|
|
||||||
cCapacityMonthly.dispatchAction({
|
|
||||||
type: 'timelineChange',
|
|
||||||
currentIndex: 10
|
|
||||||
});
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
cCapacity.on('click', 'series', (e) => {
|
|
||||||
|
|
||||||
// Switch to correct calendar in the timeline
|
|
||||||
cTimeline.dispatchAction({
|
|
||||||
type: 'timelineChange',
|
|
||||||
currentIndex: e.dataIndex
|
|
||||||
});
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
cHeatmap.on('click', 'series', (e) => {
|
|
||||||
window.open(`/property/${e.value[1]}?date=${e.value[0]}`, '_self');
|
|
||||||
})
|
|
||||||
|
|
||||||
</script>
|
|
||||||
@endsection
|
|
|
@ -1,228 +0,0 @@
|
||||||
<?php
|
|
||||||
|
|
||||||
use Illuminate\Support\Facades\Route;
|
|
||||||
use App\Api;
|
|
||||||
use App\Chart;
|
|
||||||
|
|
||||||
Route::get('/', function () {
|
|
||||||
|
|
||||||
$regionBase = Api::regionBase(-1);
|
|
||||||
$regionPropertiesCapacities = Api::regionPropertiesCapacities(-1);
|
|
||||||
$propertiesGrowth = Api::propertiesGrowth();
|
|
||||||
$regions = Api::regions()['regions'];
|
|
||||||
$propertiesPerRegion = $regions;
|
|
||||||
$regions[] = ['name' => 'Alle Regionen', 'id' => -1];
|
|
||||||
$propertiesGeo = Api::propertiesGeo()['properties'];
|
|
||||||
|
|
||||||
$heatmapValues = [];
|
|
||||||
|
|
||||||
foreach ($regionPropertiesCapacities['values'] as $el) {
|
|
||||||
$heatmapValues[] = array_values($el);
|
|
||||||
}
|
|
||||||
|
|
||||||
$diagramsOptions = [
|
|
||||||
"shared" => [
|
|
||||||
"extractionDates" => json_encode($regionPropertiesCapacities['dates']),
|
|
||||||
"colors" => Chart::colors()
|
|
||||||
],
|
|
||||||
"heatmap" => [
|
|
||||||
"yAxis" => [
|
|
||||||
"data" => json_encode($regionPropertiesCapacities['property_ids'])
|
|
||||||
],
|
|
||||||
"series" => [
|
|
||||||
"data" => json_encode($heatmapValues)
|
|
||||||
]
|
|
||||||
],
|
|
||||||
"propertiesPerRegion" => [
|
|
||||||
"yAxis" => [
|
|
||||||
"data" => json_encode(array_column($propertiesPerRegion, 'count_properties'))
|
|
||||||
],
|
|
||||||
"xAxis" => [
|
|
||||||
"data" => json_encode(array_column($propertiesPerRegion, 'name'))
|
|
||||||
]
|
|
||||||
],
|
|
||||||
"extractions" => [
|
|
||||||
"series" => $propertiesGrowth,
|
|
||||||
]
|
|
||||||
];
|
|
||||||
|
|
||||||
return view('overview', [
|
|
||||||
"regions" => $regions,
|
|
||||||
"region" => $regionBase,
|
|
||||||
"diagramsOptions" => $diagramsOptions,
|
|
||||||
"geo" => $propertiesGeo,
|
|
||||||
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
Route::get('/region/{id}', function (int $id) {
|
|
||||||
|
|
||||||
$regions = Api::regions()['regions'];
|
|
||||||
$regions[] = ['name' => 'Alle Regionen', 'id' => -1];
|
|
||||||
$region = $id >= 0 ? Api::regionBase($id) : ['name' => 'Alle Regionen'];
|
|
||||||
$regionPropertiesCapacities = Api::regionPropertiesCapacities($id);
|
|
||||||
$regionCapacitiesRegion = Api::regionCapacities($id);
|
|
||||||
$regionCapacitiesAll = Api::regionCapacities(-1);
|
|
||||||
$regionCapacitiesMonthly = [];
|
|
||||||
$regionCapacitiesDaily = [];
|
|
||||||
$regionPredictions = [];
|
|
||||||
|
|
||||||
$heatmapValues = [];
|
|
||||||
|
|
||||||
foreach ($regionPropertiesCapacities['values'] as $el) {
|
|
||||||
$heatmapValues[] = array_values($el);
|
|
||||||
}
|
|
||||||
|
|
||||||
foreach ($regionCapacitiesRegion['dates'] as $date) {
|
|
||||||
$regionCapacitiesMonthly[] = Api::regionCapacitiesMonthly($id, $date);
|
|
||||||
$regionCapacitiesDaily[] = Api::regionCapacitiesDaily($id, $date);
|
|
||||||
$regionPredictions[] = Api::regionMovingAverage($id, $date);
|
|
||||||
}
|
|
||||||
|
|
||||||
$diagramsOptions = [
|
|
||||||
"shared" => [
|
|
||||||
"extractionDates" => json_encode($regionPropertiesCapacities['dates']),
|
|
||||||
"colors" => Chart::colors()
|
|
||||||
],
|
|
||||||
"heatmap" => [
|
|
||||||
"xAxis" => [
|
|
||||||
"data" => json_encode($regionPropertiesCapacities['dates'])
|
|
||||||
],
|
|
||||||
"yAxis" => [
|
|
||||||
"data" => json_encode($regionPropertiesCapacities['property_ids'])
|
|
||||||
],
|
|
||||||
"series" => [
|
|
||||||
"data" => json_encode($heatmapValues)
|
|
||||||
]
|
|
||||||
],
|
|
||||||
"predictions" => [
|
|
||||||
"options" => $regionPredictions,
|
|
||||||
],
|
|
||||||
"capacityMonthly" => [
|
|
||||||
"options" => $regionCapacitiesMonthly,
|
|
||||||
],
|
|
||||||
"capacityDaily" => [
|
|
||||||
"options" => $regionCapacitiesDaily,
|
|
||||||
],
|
|
||||||
"capacity" => [
|
|
||||||
"xAxis" => [
|
|
||||||
"data" => json_encode($regionCapacitiesRegion['dates'])
|
|
||||||
],
|
|
||||||
"series" => [
|
|
||||||
"all" => [
|
|
||||||
"data" => json_encode($regionCapacitiesAll['capacities'])
|
|
||||||
],
|
|
||||||
"region" => [
|
|
||||||
"data" => json_encode($regionCapacitiesRegion['capacities'])
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
];
|
|
||||||
|
|
||||||
return view('region', [
|
|
||||||
'diagramsOptions' => $diagramsOptions,
|
|
||||||
'startDate' => $regionCapacitiesRegion['dates'][0],
|
|
||||||
'regions' => $regions,
|
|
||||||
'region' => $region,
|
|
||||||
'regionId' => $id,
|
|
||||||
'regionPropertiesCapacities' => $regionPropertiesCapacities,
|
|
||||||
'predictions' => $regionPredictions]);
|
|
||||||
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
Route::get('/property/{id}', function (int $id) {
|
|
||||||
|
|
||||||
|
|
||||||
$regions = Api::regions()['regions'];
|
|
||||||
$regions[] = ['name' => 'Alle Regionen', 'id' => -1];
|
|
||||||
|
|
||||||
$base = Api::propertyBase($id);
|
|
||||||
$calendars = Api::propertyExtractions($id)['extractions'];
|
|
||||||
$propertyCapacities = Api::propertyCapacities($id);
|
|
||||||
$propertyNeighbours = Api::propertyNeighbours($id)['neighbours'];
|
|
||||||
$regionCapacitiesRegion = Api::regionCapacities($base['region_id']);
|
|
||||||
$regionCapacitiesAll = Api::regionCapacities(-1);
|
|
||||||
$regionCapacities = [[],[]];
|
|
||||||
|
|
||||||
$propertyCapacitiesMonthly = [];
|
|
||||||
$propertyCapacitiesDaily = [];
|
|
||||||
|
|
||||||
if($propertyCapacities){
|
|
||||||
foreach ($propertyCapacities['dates'] as $date) {
|
|
||||||
$propertyCapacitiesMonthly[] = Api::propertyCapacitiesMonthly($id, $date);
|
|
||||||
$propertyCapacitiesDaily[] = Api::propertyCapacitiesDaily($id, $date);
|
|
||||||
}
|
|
||||||
|
|
||||||
// filter out all date, which were not scraped for the property
|
|
||||||
foreach ($regionCapacitiesAll['dates'] as $index => $date) {
|
|
||||||
if(in_array($date, $propertyCapacities['dates'])){
|
|
||||||
$regionCapacities[0][] = $regionCapacitiesAll['capacities'][$index];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
foreach ($regionCapacitiesRegion['dates'] as $index => $date) {
|
|
||||||
if(in_array($date, $propertyCapacities['dates'])){
|
|
||||||
$regionCapacities[1][] = $regionCapacitiesRegion['capacities'][$index];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}else{
|
|
||||||
return view('property-nodata', [
|
|
||||||
'base' => $base,
|
|
||||||
'regions' => $regions,
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
// prepare data for calendar chart
|
|
||||||
$calendarData = [];
|
|
||||||
|
|
||||||
foreach ($calendars as $el) {
|
|
||||||
|
|
||||||
$series = [];
|
|
||||||
$calendar = json_decode($el['calendar'], 1);
|
|
||||||
|
|
||||||
foreach ($calendar as $date => $status) {
|
|
||||||
$series[] = [$date, $status];
|
|
||||||
}
|
|
||||||
|
|
||||||
$calendarData[] = $series;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
$diagramsOptions = [
|
|
||||||
"shared" => [
|
|
||||||
"colors" => Chart::colors(),
|
|
||||||
"extractiondates" => json_encode($propertyCapacities['dates'])
|
|
||||||
],
|
|
||||||
"calendar" => [
|
|
||||||
"series" => $calendarData
|
|
||||||
],
|
|
||||||
"capacities" => [
|
|
||||||
"xAxis" => [
|
|
||||||
"data" => json_encode($propertyCapacities['dates'])
|
|
||||||
],
|
|
||||||
"series" => [
|
|
||||||
["data" => json_encode($propertyCapacities['capacities'])],
|
|
||||||
["data" => json_encode($regionCapacities[0])],
|
|
||||||
["data" => json_encode($regionCapacities[1])],
|
|
||||||
]
|
|
||||||
],
|
|
||||||
"capacityMonthly" => [
|
|
||||||
"options" => $propertyCapacitiesMonthly,
|
|
||||||
],
|
|
||||||
"capacityDaily" => [
|
|
||||||
"options" => $propertyCapacitiesDaily,
|
|
||||||
],
|
|
||||||
];
|
|
||||||
|
|
||||||
return view('property', [
|
|
||||||
'diagramsOptions' => $diagramsOptions,
|
|
||||||
'startDate' => $propertyCapacities['dates'][0],
|
|
||||||
'base' => $base,
|
|
||||||
'regions' => $regions,
|
|
||||||
'neighbours' => $propertyNeighbours
|
|
||||||
]);
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
|
@ -1,142 +0,0 @@
|
||||||
<mxfile host="app.diagrams.net" agent="Mozilla/5.0 (X11; Linux x86_64; rv:134.0) Gecko/20100101 Firefox/134.0" version="26.0.6">
|
|
||||||
<diagram name="Seite-1" id="WNMV8rePnVf-2Vz_xhjt">
|
|
||||||
<mxGraphModel dx="1937" dy="1185" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="827" pageHeight="1169" math="0" shadow="0">
|
|
||||||
<root>
|
|
||||||
<mxCell id="0" />
|
|
||||||
<mxCell id="1" parent="0" />
|
|
||||||
<mxCell id="e6qn9whkbaCBCFCjUvdY-7" value="" style="rounded=0;whiteSpace=wrap;html=1;strokeColor=none;fillColor=#F5F5F5;" vertex="1" parent="1">
|
|
||||||
<mxGeometry x="10" y="420" width="1070" height="690" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="ETL" c4Type="ContainerScopeBoundary" c4Application="Component" label="<font style="font-size: 16px"><b><div style="text-align: left">%c4Name%</div></b></font><div style="text-align: left">[%c4Application%]</div>" id="0Mexl9jQAquWokRCgHYt-11">
|
|
||||||
<mxCell style="rounded=1;fontSize=11;whiteSpace=wrap;html=1;dashed=1;arcSize=20;fillColor=default;strokeColor=#666666;fontColor=#333333;labelBackgroundColor=none;align=left;verticalAlign=bottom;labelBorderColor=none;spacingTop=0;spacing=10;dashPattern=8 4;metaEdit=1;rotatable=0;perimeter=rectanglePerimeter;noLabel=0;labelPadding=0;allowArrows=0;connectable=0;expand=0;recursiveResize=0;editable=1;pointerEvents=0;absoluteArcSize=1;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="30" y="440" width="1030" height="500" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<object placeholders="1" c4Name="Datenbank Aggregation" c4Type="Container" c4Technology="MySQL" c4Description="Datenbank welche während Aggregation verwendet wurde." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%:&nbsp;%c4Technology%]</div><br><div><font style="font-size: 11px"><font color="#E6E6E6">%c4Description%</font></div>" id="0Mexl9jQAquWokRCgHYt-1">
|
|
||||||
<mxCell style="shape=cylinder3;size=15;whiteSpace=wrap;html=1;boundedLbl=1;rounded=0;labelBackgroundColor=none;fillColor=#23A2D9;fontSize=12;fontColor=#ffffff;align=center;strokeColor=#0E7DAD;metaEdit=1;points=[[0.5,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.5,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];resizable=0;" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="40" y="100" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<object placeholders="1" c4Name="Datenbank Analyse" c4Type="Container" c4Technology="DuckDB" c4Description="Datenbank, welcher für die Analysen<br> verwendet wurden." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%:&nbsp;%c4Technology%]</div><br><div><font style="font-size: 11px"><font color="#E6E6E6">%c4Description%</font></div>" id="0Mexl9jQAquWokRCgHYt-2">
|
|
||||||
<mxCell style="shape=cylinder3;size=15;whiteSpace=wrap;html=1;boundedLbl=1;rounded=0;labelBackgroundColor=none;fillColor=#23A2D9;fontSize=12;fontColor=#ffffff;align=center;strokeColor=#0E7DAD;metaEdit=1;points=[[0.5,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.5,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];resizable=0;" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="790" y="100" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-5" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;dashed=1;dashPattern=8 8;" parent="1" source="0Mexl9jQAquWokRCgHYt-3" target="0Mexl9jQAquWokRCgHYt-1" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-7" value="liest Datenbank" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="0Mexl9jQAquWokRCgHYt-5" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="-0.2497" y="-1" relative="1" as="geometry">
|
|
||||||
<mxPoint x="-10" y="1" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="Sling" c4Type="sling-cli" c4Description="Kommandozeilenprogramm zur Migration von Datensätzen." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="0Mexl9jQAquWokRCgHYt-3">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="400" y="100" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-6" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" parent="1" source="0Mexl9jQAquWokRCgHYt-3" target="0Mexl9jQAquWokRCgHYt-2" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-8" value="schreibt in Datenbank" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="0Mexl9jQAquWokRCgHYt-6" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="-0.1744" relative="1" as="geometry">
|
|
||||||
<mxPoint x="12" y="-1" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="Preprocessing" c4Type="ContainerScopeBoundary" c4Application="Component" label="<font style="font-size: 16px"><b><div style="text-align: left">%c4Name%</div></b></font><div style="text-align: left">[%c4Application%]</div>" id="0Mexl9jQAquWokRCgHYt-9">
|
|
||||||
<mxCell style="rounded=1;fontSize=11;whiteSpace=wrap;html=1;dashed=1;arcSize=20;fillColor=none;strokeColor=#666666;fontColor=#333333;labelBackgroundColor=none;align=left;verticalAlign=bottom;labelBorderColor=none;spacingTop=0;spacing=10;dashPattern=8 4;metaEdit=1;rotatable=0;perimeter=rectanglePerimeter;noLabel=0;labelPadding=0;allowArrows=0;connectable=0;expand=0;recursiveResize=0;editable=1;pointerEvents=0;absoluteArcSize=1;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="20" y="40" width="1030" height="270" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<object placeholders="1" c4Name="Datenbank" c4Type="Container" c4Technology="DuckDB" c4Description="Datenbank, welcher für die Analysen<br> verwendet wurden." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%:&nbsp;%c4Technology%]</div><br><div><font style="font-size: 11px"><font color="#E6E6E6">%c4Description%</font></div>" id="0Mexl9jQAquWokRCgHYt-10">
|
|
||||||
<mxCell style="shape=cylinder3;size=15;whiteSpace=wrap;html=1;boundedLbl=1;rounded=0;labelBackgroundColor=none;fillColor=#23A2D9;fontSize=12;fontColor=#ffffff;align=center;strokeColor=#0E7DAD;metaEdit=1;points=[[0.5,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.5,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];resizable=0;" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="80" y="480" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-23" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" parent="1" source="0Mexl9jQAquWokRCgHYt-12" target="0Mexl9jQAquWokRCgHYt-14" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-24" value="verwendet" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontSize=14;" parent="0Mexl9jQAquWokRCgHYt-23" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="-0.0114" y="-2" relative="1" as="geometry">
|
|
||||||
<mxPoint y="-2" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="etl_*.py" c4Type="Python (Polars)" c4Description="Diverse Python Skripts zur Aufbereitung / Zusammenstellung der Daten." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="0Mexl9jQAquWokRCgHYt-12">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="430" y="710" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-16" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;exitPerimeter=0;dashed=1;dashPattern=8 8;" parent="1" source="0Mexl9jQAquWokRCgHYt-13" target="0Mexl9jQAquWokRCgHYt-10" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-17" value="liest Datenbank" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontSize=14;" parent="0Mexl9jQAquWokRCgHYt-16" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="-0.1633" relative="1" as="geometry">
|
|
||||||
<mxPoint as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="database.py" c4Type="Python (DuckDB Interface)" c4Description="Wrapper Skript zum Ausführen von SQL." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="0Mexl9jQAquWokRCgHYt-13">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="80" y="710" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-18" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=0;exitDx=0;exitDy=0;exitPerimeter=0;dashed=1;dashPattern=8 8;entryX=0.24;entryY=0.981;entryDx=0;entryDy=0;entryPerimeter=0;" parent="1" source="0Mexl9jQAquWokRCgHYt-14" target="0Mexl9jQAquWokRCgHYt-15" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry">
|
|
||||||
<mxPoint x="900" y="600" as="targetPoint" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-19" value="schreibt pickle objekt" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontSize=14;" parent="0Mexl9jQAquWokRCgHYt-18" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="-0.1818" y="2" relative="1" as="geometry">
|
|
||||||
<mxPoint as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="etl_cache.py" c4Type="Python (Pickle)" c4Description="Diverse Python Skripts zur Aufbereitung / Zusammenstellung der Daten." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="0Mexl9jQAquWokRCgHYt-14">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="780" y="710" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<object placeholders="1" c4Name="Cache" c4Type="Container" c4Technology="Filesystem" c4Description="Das Dateisystem wird als Pufferspeicher verwendet." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%:&nbsp;%c4Technology%]</div><br><div><font style="font-size: 11px"><font color="#E6E6E6">%c4Description%</font></div>" id="0Mexl9jQAquWokRCgHYt-15">
|
|
||||||
<mxCell style="shape=cylinder3;size=15;whiteSpace=wrap;html=1;boundedLbl=1;rounded=0;labelBackgroundColor=none;fillColor=#23A2D9;fontSize=12;fontColor=#ffffff;align=center;strokeColor=#0E7DAD;metaEdit=1;points=[[0.5,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.5,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];resizable=0;" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="780" y="480" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-20" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0.746;entryY=1.002;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;exitX=0.75;exitY=0;exitDx=0;exitDy=0;exitPerimeter=0;" parent="1" source="0Mexl9jQAquWokRCgHYt-14" target="0Mexl9jQAquWokRCgHYt-15" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-21" value="liest pickle objekt" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontSize=14;" parent="0Mexl9jQAquWokRCgHYt-20" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="-0.1076" y="1" relative="1" as="geometry">
|
|
||||||
<mxPoint x="8" y="-11" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-25" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" parent="1" source="0Mexl9jQAquWokRCgHYt-12" target="0Mexl9jQAquWokRCgHYt-13" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="0Mexl9jQAquWokRCgHYt-26" value="verwendet" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontSize=14;" parent="0Mexl9jQAquWokRCgHYt-25" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="0.0473" relative="1" as="geometry">
|
|
||||||
<mxPoint as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="e6qn9whkbaCBCFCjUvdY-3" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;exitPerimeter=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" edge="1" parent="1" source="e6qn9whkbaCBCFCjUvdY-1" target="0Mexl9jQAquWokRCgHYt-13">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="e6qn9whkbaCBCFCjUvdY-6" value="führt Funktionen aus" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontSize=14;" vertex="1" connectable="0" parent="e6qn9whkbaCBCFCjUvdY-3">
|
|
||||||
<mxGeometry x="0.0906" relative="1" as="geometry">
|
|
||||||
<mxPoint y="1" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="FastAPI" c4Type="Python (FastAPI)" c4Description="Stellt die aufbereiteten Daten über eine JSON-Schnittstelle zur Verfügung." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="e6qn9whkbaCBCFCjUvdY-1">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" vertex="1" parent="1">
|
|
||||||
<mxGeometry x="430" y="970" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="e6qn9whkbaCBCFCjUvdY-2" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;exitPerimeter=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" edge="1" parent="1" source="e6qn9whkbaCBCFCjUvdY-1" target="0Mexl9jQAquWokRCgHYt-12">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="e6qn9whkbaCBCFCjUvdY-5" value="führt Funktionen aus" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];fontSize=14;" vertex="1" connectable="0" parent="e6qn9whkbaCBCFCjUvdY-2">
|
|
||||||
<mxGeometry x="0.0286" y="-1" relative="1" as="geometry">
|
|
||||||
<mxPoint as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
</root>
|
|
||||||
</mxGraphModel>
|
|
||||||
</diagram>
|
|
||||||
</mxfile>
|
|
|
@ -1,187 +0,0 @@
|
||||||
<mxfile host="app.diagrams.net" agent="Mozilla/5.0 (X11; Linux x86_64; rv:134.0) Gecko/20100101 Firefox/134.0" version="26.0.6" pages="2">
|
|
||||||
<diagram name="Seite-1" id="chpUGVRRn7alPJZ1I-il">
|
|
||||||
<mxGraphModel dx="1291" dy="790" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="827" pageHeight="1169" math="0" shadow="0">
|
|
||||||
<root>
|
|
||||||
<mxCell id="0" />
|
|
||||||
<mxCell id="1" parent="0" />
|
|
||||||
<mxCell id="tzVNFCieMdwak3VSEkXc-1" value="" style="rounded=0;whiteSpace=wrap;html=1;strokeColor=none;fillColor=#F5F5F5;" vertex="1" parent="1">
|
|
||||||
<mxGeometry x="10" y="20" width="750" height="780" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="Visual Analytics Tool" c4Type="SystemScopeBoundary" c4Application="Software System" label="<font style="font-size: 16px"><b><div style="text-align: left">%c4Name%</div></b></font><div style="text-align: left">[%c4Application%]</div>" id="_wAeSdXpbb6KPP4DEc36-23">
|
|
||||||
<mxCell style="rounded=1;fontSize=11;whiteSpace=wrap;html=1;dashed=1;arcSize=20;fillColor=default;strokeColor=#666666;fontColor=#333333;labelBackgroundColor=none;align=left;verticalAlign=bottom;labelBorderColor=none;spacingTop=0;spacing=10;dashPattern=8 4;metaEdit=1;rotatable=0;perimeter=rectanglePerimeter;noLabel=0;labelPadding=0;allowArrows=0;connectable=0;expand=0;recursiveResize=0;editable=1;pointerEvents=0;absoluteArcSize=1;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="30" y="40" width="710" height="540" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<object placeholders="1" c4Name="Datenbank" c4Type="Container" c4Technology="DuckDB" c4Description="Aggregierte Daten." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%:&nbsp;%c4Technology%]</div><br><div><font style="font-size: 11px"><font color="#E6E6E6">%c4Description%</font></div>" id="_wAeSdXpbb6KPP4DEc36-2">
|
|
||||||
<mxCell style="shape=cylinder3;size=15;whiteSpace=wrap;html=1;boundedLbl=1;rounded=0;labelBackgroundColor=none;fillColor=#23A2D9;fontSize=12;fontColor=#ffffff;align=center;strokeColor=#0E7DAD;metaEdit=1;points=[[0.5,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.5,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];resizable=0;" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="50" y="60" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<object placeholders="1" c4Name="ETL" c4Type="SQL, Python (Polars)" c4Description="Bereitet Daten mittels algorithmischer<br> Verfahren auf." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="_wAeSdXpbb6KPP4DEc36-3">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="480" y="60" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-4" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" parent="1" source="_wAeSdXpbb6KPP4DEc36-3" target="_wAeSdXpbb6KPP4DEc36-2" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-5" value="Liest Datenbank" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="_wAeSdXpbb6KPP4DEc36-4" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="0.0412" y="1" relative="1" as="geometry">
|
|
||||||
<mxPoint x="-1" y="-1" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-15" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;dashed=1;dashPattern=8 8;entryX=0;entryY=0.5;entryDx=0;entryDy=0;entryPerimeter=0;" parent="1" source="_wAeSdXpbb6KPP4DEc36-6" target="_wAeSdXpbb6KPP4DEc36-13" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-21" value="<div>Führt Abfragen aus</div><div>[JSON/HTTPS]<br></div>" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="_wAeSdXpbb6KPP4DEc36-15" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="-0.0541" y="-1" relative="1" as="geometry">
|
|
||||||
<mxPoint as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="Webapplikation" c4Type="PHP (Laravel)" c4Description="Verarbeitet Anfragen von Benutzer:innen" label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="_wAeSdXpbb6KPP4DEc36-6">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="50" y="230" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<object placeholders="1" c4Name="Dashboard" c4Type="Container" c4Technology="Apache Echarts" c4Description="Stellt Benutzer:innen Auswertungs-<br>möglichkeiten zur Verfügbarkeit von Kurzzeitmietobjekten." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%:&nbsp;%c4Technology%]</div><br><div><font style="font-size: 11px"><font color="#E6E6E6">%c4Description%</font></div>" id="_wAeSdXpbb6KPP4DEc36-8">
|
|
||||||
<mxCell style="shape=mxgraph.c4.webBrowserContainer2;whiteSpace=wrap;html=1;boundedLbl=1;rounded=0;labelBackgroundColor=none;strokeColor=#118ACD;fillColor=#23A2D9;strokeColor=#118ACD;strokeColor2=#0E7DAD;fontSize=12;fontColor=#ffffff;align=center;metaEdit=1;points=[[0.5,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.5,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];resizable=0;" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="480" y="370" width="240" height="160" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-10" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;exitPerimeter=0;dashed=1;dashPattern=8 8;" parent="1" source="_wAeSdXpbb6KPP4DEc36-9" target="_wAeSdXpbb6KPP4DEc36-6" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-16" value="<div>Besucht Webapplikation</div><div>[HTTPS]<br></div>" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="_wAeSdXpbb6KPP4DEc36-10" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="0.1247" y="-2" relative="1" as="geometry">
|
|
||||||
<mxPoint as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-11" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;exitPerimeter=0;dashed=1;dashPattern=8 8;" parent="1" source="_wAeSdXpbb6KPP4DEc36-9" target="_wAeSdXpbb6KPP4DEc36-8" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-17" value="Betrachtet Auswertungen" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="_wAeSdXpbb6KPP4DEc36-11" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="0.2151" y="-1" relative="1" as="geometry">
|
|
||||||
<mxPoint x="2" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="Benutzer:in" c4Type="Person" c4Description="Person welche Auswertungen zur Verfügbarkeit von Kurzzeitmietobjekten in Ferienregionen durchführt." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="_wAeSdXpbb6KPP4DEc36-9">
|
|
||||||
<mxCell style="html=1;fontSize=11;dashed=0;whiteSpace=wrap;fillColor=#083F75;strokeColor=#06315C;fontColor=#ffffff;shape=mxgraph.c4.person2;align=center;metaEdit=1;points=[[0.5,0,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0]];resizable=0;" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="314" y="600" width="200" height="180" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-14" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;exitPerimeter=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" parent="1" source="_wAeSdXpbb6KPP4DEc36-13" target="_wAeSdXpbb6KPP4DEc36-3" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-20" value="Ruft ETL Verfahren auf" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="_wAeSdXpbb6KPP4DEc36-14" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="-0.0667" y="-1" relative="1" as="geometry">
|
|
||||||
<mxPoint as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="FastAPI" c4Type="Python (FastAPI)" c4Description="Stellt aufbereitete Daten via <br>JSON/HTTPS API zur Verfügung." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="_wAeSdXpbb6KPP4DEc36-13">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" parent="1" vertex="1">
|
|
||||||
<mxGeometry x="480" y="230" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-18" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" parent="1" source="_wAeSdXpbb6KPP4DEc36-6" target="_wAeSdXpbb6KPP4DEc36-8" edge="1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="_wAeSdXpbb6KPP4DEc36-19" value="<div>Liefert Inhalte zum Webbrowser&nbsp;</div><div>von Benutzer:innen</div>" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="_wAeSdXpbb6KPP4DEc36-18" vertex="1" connectable="0">
|
|
||||||
<mxGeometry x="-0.0888" y="-2" relative="1" as="geometry">
|
|
||||||
<mxPoint x="5" y="7" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
</root>
|
|
||||||
</mxGraphModel>
|
|
||||||
</diagram>
|
|
||||||
<diagram id="2goo0GJ--Dnj9rEJibSb" name="Seite-2">
|
|
||||||
<mxGraphModel dx="2285" dy="1267" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="827" pageHeight="1169" math="0" shadow="0">
|
|
||||||
<root>
|
|
||||||
<mxCell id="0" />
|
|
||||||
<mxCell id="1" parent="0" />
|
|
||||||
<object placeholders="1" c4Name="RDBMS" c4Type="Container" c4Technology="DuckDB" c4Description="Aggregierte Daten." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%:&nbsp;%c4Technology%]</div><br><div><font style="font-size: 11px"><font color="#E6E6E6">%c4Description%</font></div>" id="Xmw1x83A06H2_JC6hK8s-1">
|
|
||||||
<mxCell style="shape=cylinder3;size=15;whiteSpace=wrap;html=1;boundedLbl=1;rounded=0;labelBackgroundColor=none;fillColor=#23A2D9;fontSize=12;fontColor=#ffffff;align=center;strokeColor=#0E7DAD;metaEdit=1;points=[[0.5,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.5,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];resizable=0;" vertex="1" parent="1">
|
|
||||||
<mxGeometry x="40" y="230" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<object placeholders="1" c4Name="ETL" c4Type="SQL, Python (Polars)" c4Description="Bereitet Daten mittels algorithmischer<br> Verfahren auf." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="Xmw1x83A06H2_JC6hK8s-2">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" vertex="1" parent="1">
|
|
||||||
<mxGeometry x="40" y="464" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-3" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;exitPerimeter=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" edge="1" parent="1" source="Xmw1x83A06H2_JC6hK8s-2" target="Xmw1x83A06H2_JC6hK8s-1">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-4" value="Liest Datenbank" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="Xmw1x83A06H2_JC6hK8s-3">
|
|
||||||
<mxGeometry x="0.0412" y="1" relative="1" as="geometry">
|
|
||||||
<mxPoint x="-1" y="-1" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-5" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;dashed=1;dashPattern=8 8;entryX=1;entryY=0.5;entryDx=0;entryDy=0;entryPerimeter=0;" edge="1" parent="1" source="Xmw1x83A06H2_JC6hK8s-7" target="Xmw1x83A06H2_JC6hK8s-16">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-6" value="<div>Führt Abfragen aus</div><div>[JSON/HTTPS]<br></div>" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="Xmw1x83A06H2_JC6hK8s-5">
|
|
||||||
<mxGeometry x="-0.0541" y="-1" relative="1" as="geometry">
|
|
||||||
<mxPoint as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="Webapplikation" c4Type="PHP (Laravel)" c4Description="Verarbeitet Anfragen von Benutzer:innen" label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="Xmw1x83A06H2_JC6hK8s-7">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" vertex="1" parent="1">
|
|
||||||
<mxGeometry x="710" y="240" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<object placeholders="1" c4Name="Dashboard" c4Type="Container" c4Technology="Apache Echarts" c4Description="Stellt Benutzer:innen Auswertungs-<br>möglichkeiten zur Verfügbarkeit von Kurzzeitmietobjekten." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%:&nbsp;%c4Technology%]</div><br><div><font style="font-size: 11px"><font color="#E6E6E6">%c4Description%</font></div>" id="Xmw1x83A06H2_JC6hK8s-8">
|
|
||||||
<mxCell style="shape=mxgraph.c4.webBrowserContainer2;whiteSpace=wrap;html=1;boundedLbl=1;rounded=0;labelBackgroundColor=none;strokeColor=#118ACD;fillColor=#23A2D9;strokeColor=#118ACD;strokeColor2=#0E7DAD;fontSize=12;fontColor=#ffffff;align=center;metaEdit=1;points=[[0.5,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.5,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];resizable=0;" vertex="1" parent="1">
|
|
||||||
<mxGeometry x="710" y="470" width="240" height="160" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-9" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;dashed=1;dashPattern=8 8;entryX=1;entryY=0.5;entryDx=0;entryDy=0;entryPerimeter=0;" edge="1" parent="1" source="Xmw1x83A06H2_JC6hK8s-13" target="Xmw1x83A06H2_JC6hK8s-7">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-10" value="<div>Besucht Webapplikation</div><div>[HTTPS]<br></div>" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="Xmw1x83A06H2_JC6hK8s-9">
|
|
||||||
<mxGeometry x="0.1247" y="-2" relative="1" as="geometry">
|
|
||||||
<mxPoint x="4" y="4" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-11" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;dashed=1;dashPattern=8 8;entryX=1;entryY=0.5;entryDx=0;entryDy=0;entryPerimeter=0;" edge="1" parent="1" source="Xmw1x83A06H2_JC6hK8s-13" target="Xmw1x83A06H2_JC6hK8s-8">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-12" value="Betrachtet Auswertungen" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="Xmw1x83A06H2_JC6hK8s-11">
|
|
||||||
<mxGeometry x="0.2151" y="-1" relative="1" as="geometry">
|
|
||||||
<mxPoint x="13" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="Benutzer:in" c4Type="Person" c4Description="Person welche Auswertungen zur Verfügbarkeit von Kurzzeitmietobjekten in Ferienregionen durchführt." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="Xmw1x83A06H2_JC6hK8s-13">
|
|
||||||
<mxCell style="html=1;fontSize=11;dashed=0;whiteSpace=wrap;fillColor=#083F75;strokeColor=#06315C;fontColor=#ffffff;shape=mxgraph.c4.person2;align=center;metaEdit=1;points=[[0.5,0,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0]];resizable=0;" vertex="1" parent="1">
|
|
||||||
<mxGeometry x="1120" y="320" width="200" height="180" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-14" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;exitPerimeter=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" edge="1" parent="1" source="Xmw1x83A06H2_JC6hK8s-16" target="Xmw1x83A06H2_JC6hK8s-2">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-15" value="Ruft ETL Verfahren auf" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="Xmw1x83A06H2_JC6hK8s-14">
|
|
||||||
<mxGeometry x="-0.0667" y="-1" relative="1" as="geometry">
|
|
||||||
<mxPoint as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="FastAPI" c4Type="Python (FastAPI)" c4Description="Stellt aufbereitete Daten via <br>JSON/HTTPS API zur Verfügung." label="<font style="font-size: 16px"><b>%c4Name%</b></font><div>[%c4Type%]</div><br><div><font style="font-size: 11px"><font color="#cccccc">%c4Description%</font></div>" id="Xmw1x83A06H2_JC6hK8s-16">
|
|
||||||
<mxCell style="rounded=1;whiteSpace=wrap;html=1;labelBackgroundColor=none;fillColor=#1061B0;fontColor=#ffffff;align=center;arcSize=10;strokeColor=#0D5091;metaEdit=1;resizable=0;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" vertex="1" parent="1">
|
|
||||||
<mxGeometry x="330" y="240" width="240" height="120" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-17" style="rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;exitPerimeter=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;entryPerimeter=0;dashed=1;dashPattern=8 8;" edge="1" parent="1" source="Xmw1x83A06H2_JC6hK8s-7" target="Xmw1x83A06H2_JC6hK8s-8">
|
|
||||||
<mxGeometry relative="1" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
<mxCell id="Xmw1x83A06H2_JC6hK8s-18" value="<div>Liefert Inhalte zum Webbrowser&nbsp;</div><div>von Benutzer:innen</div>" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="Xmw1x83A06H2_JC6hK8s-17">
|
|
||||||
<mxGeometry x="-0.0888" y="-2" relative="1" as="geometry">
|
|
||||||
<mxPoint x="5" as="offset" />
|
|
||||||
</mxGeometry>
|
|
||||||
</mxCell>
|
|
||||||
<object placeholders="1" c4Name="Visual Analytics Tool" c4Type="SystemScopeBoundary" c4Application="Software System" label="<font style="font-size: 16px"><b><div style="text-align: left">%c4Name%</div></b></font><div style="text-align: left">[%c4Application%]</div>" id="Xmw1x83A06H2_JC6hK8s-19">
|
|
||||||
<mxCell style="rounded=1;fontSize=11;whiteSpace=wrap;html=1;dashed=1;arcSize=20;fillColor=none;strokeColor=#666666;fontColor=#333333;labelBackgroundColor=none;align=left;verticalAlign=bottom;labelBorderColor=none;spacingTop=0;spacing=10;dashPattern=8 4;metaEdit=1;rotatable=0;perimeter=rectanglePerimeter;noLabel=0;labelPadding=0;allowArrows=0;connectable=0;expand=0;recursiveResize=0;editable=1;pointerEvents=0;absoluteArcSize=1;points=[[0.25,0,0],[0.5,0,0],[0.75,0,0],[1,0.25,0],[1,0.5,0],[1,0.75,0],[0.75,1,0],[0.5,1,0],[0.25,1,0],[0,0.75,0],[0,0.5,0],[0,0.25,0]];" vertex="1" parent="1">
|
|
||||||
<mxGeometry x="20" y="210" width="1080" height="460" as="geometry" />
|
|
||||||
</mxCell>
|
|
||||||
</object>
|
|
||||||
</root>
|
|
||||||
</mxGraphModel>
|
|
||||||
</diagram>
|
|
||||||
</mxfile>
|
|
|
@ -1,25 +0,0 @@
|
||||||
## Installation
|
|
||||||
Folgende Schritte zur Installation vornehmen
|
|
||||||
|
|
||||||
### Abhängigkeiten installieren
|
|
||||||
Zur Verwaltung der Abhängigkeiten wird [pixi](https://pixi.sh/) verwendet.
|
|
||||||
```bash
|
|
||||||
pixi install
|
|
||||||
```
|
|
||||||
|
|
||||||
### Datenbankverbindung konfigurieren
|
|
||||||
Enviroment File erstellen:
|
|
||||||
```bash
|
|
||||||
cp src/.env.example .env
|
|
||||||
```
|
|
||||||
|
|
||||||
Im erstellten .env File die Datei anpassen:
|
|
||||||
```
|
|
||||||
DATABASE="/path/to/db.duckdb"
|
|
||||||
```
|
|
||||||
|
|
||||||
# FastAPI starten
|
|
||||||
FastAPI auf einem anderen Port ausführen als das Dashboard.
|
|
||||||
```bash
|
|
||||||
fastapi dev api/main.py --port 8080
|
|
||||||
```
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,7 +1,8 @@
|
||||||
[project]
|
[project]
|
||||||
authors = [{name = "Giò Diani", email = "mail@gionathandiani.name"}, {name = "Mauro Stoffel", email = "mauro.stoffel@stud.fhgr.ch"}, {name = "Colin Bolli", email = "colin.bolli@stud.fhgr.ch"}, {name = "Charles Winkler", email = "charles.winkler@stud.fhgr.ch"}]
|
authors = [{name = "Giò Diani", email = "mail@gionathandiani.name"}]
|
||||||
description = "Datenauferbeitung"
|
dependencies = []
|
||||||
name = "ETL"
|
description = "Add a short description here"
|
||||||
|
name = "consultancy_2"
|
||||||
requires-python = ">= 3.11"
|
requires-python = ">= 3.11"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|
||||||
|
@ -14,7 +15,7 @@ channels = ["conda-forge"]
|
||||||
platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"]
|
platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"]
|
||||||
|
|
||||||
[tool.pixi.pypi-dependencies]
|
[tool.pixi.pypi-dependencies]
|
||||||
etl = { path = ".", editable = true }
|
consultancy_2 = { path = ".", editable = true }
|
||||||
|
|
||||||
[tool.pixi.tasks]
|
[tool.pixi.tasks]
|
||||||
|
|
||||||
|
@ -24,6 +25,5 @@ pandas = ">=2.2.3,<3"
|
||||||
plotly = ">=5.24.1,<6"
|
plotly = ">=5.24.1,<6"
|
||||||
duckdb = ">=1.1.2,<2"
|
duckdb = ">=1.1.2,<2"
|
||||||
python-dotenv = ">=1.0.1,<2"
|
python-dotenv = ">=1.0.1,<2"
|
||||||
fastapi = ">=0.115.4,<0.116"
|
|
||||||
polars = ">=0.20.26,<2"
|
polars = ">=0.20.26,<2"
|
||||||
pyarrow = ">=18.0.0,<19"
|
pyarrow = ">=18.0.0,<19"
|
||||||
|
|
|
@ -1,268 +0,0 @@
|
||||||
import datetime
|
|
||||||
from typing import List, Union
|
|
||||||
|
|
||||||
import data
|
|
||||||
import polars as pl
|
|
||||||
from data import etl_property_capacities as etl_pc
|
|
||||||
from data import etl_property_capacities_daily as etl_pcd
|
|
||||||
from data import etl_property_capacities_monthly as etl_pcm
|
|
||||||
from data import etl_property_neighbours as etl_pn
|
|
||||||
from data import etl_region_capacities as etl_rc
|
|
||||||
from data import etl_region_capacities_daily as etl_rcd
|
|
||||||
from data import etl_region_capacities_monthly as etl_rcm
|
|
||||||
from data import etl_region_movAverage as etl_rmA
|
|
||||||
from data import etl_region_properties_capacities as etl_rpc
|
|
||||||
from fastapi import FastAPI
|
|
||||||
from fastapi.responses import JSONResponse
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class RegionsItems(BaseModel):
|
|
||||||
name: str
|
|
||||||
id: str
|
|
||||||
count_properties: int
|
|
||||||
|
|
||||||
class Regions(BaseModel):
|
|
||||||
regions: List[RegionsItems]
|
|
||||||
|
|
||||||
class RegionBase(BaseModel):
|
|
||||||
name: str
|
|
||||||
id: str
|
|
||||||
|
|
||||||
class RegionPropertiesCapacitiesValues(BaseModel):
|
|
||||||
date: str
|
|
||||||
property_id: str
|
|
||||||
capacity: float
|
|
||||||
|
|
||||||
class RegionCapacities(BaseModel):
|
|
||||||
capacities: List[float]
|
|
||||||
dates: List
|
|
||||||
|
|
||||||
class RegionCapacitiesMonthly(BaseModel):
|
|
||||||
months: List[str]
|
|
||||||
capacities: List[float]
|
|
||||||
|
|
||||||
class RegionCapacitiesDaily(BaseModel):
|
|
||||||
weekdays: List[str]
|
|
||||||
capacities: List[float]
|
|
||||||
|
|
||||||
class RegionPropertiesCapacities(BaseModel):
|
|
||||||
dates: List
|
|
||||||
property_ids: List
|
|
||||||
values: List[RegionPropertiesCapacitiesValues]
|
|
||||||
|
|
||||||
class RegionMovingAverage(BaseModel):
|
|
||||||
dates: List
|
|
||||||
capacities_timeframe_before: List[Union[float, None]]
|
|
||||||
capacities_timeframe_after: List[Union[float, None]]
|
|
||||||
capacities_moving_average: List[Union[float, None]]
|
|
||||||
|
|
||||||
class PropertiesGrowth(BaseModel):
|
|
||||||
dates: List
|
|
||||||
total_all: List[Union[int, None]]
|
|
||||||
total_heidiland: List[Union[int, None]]
|
|
||||||
total_engadin: List[Union[int, None]]
|
|
||||||
total_stmoritz: List[Union[int, None]]
|
|
||||||
total_davos: List[Union[int, None]]
|
|
||||||
|
|
||||||
class PropertiesGeoList(BaseModel):
|
|
||||||
property_id: str
|
|
||||||
latlng: str
|
|
||||||
region_id: str
|
|
||||||
|
|
||||||
class PropertiesGeo(BaseModel):
|
|
||||||
properties: List[PropertiesGeoList]
|
|
||||||
|
|
||||||
class PropertyNeighboursList(BaseModel):
|
|
||||||
id: str
|
|
||||||
lat: float
|
|
||||||
lon: float
|
|
||||||
|
|
||||||
class PropertyNeighbours(BaseModel):
|
|
||||||
neighbours: List[PropertyNeighboursList]
|
|
||||||
|
|
||||||
class PropertyNeighboursList(BaseModel):
|
|
||||||
id: str
|
|
||||||
lat: float
|
|
||||||
lon: float
|
|
||||||
|
|
||||||
class PropertyExtractionsList(BaseModel):
|
|
||||||
calendar: str
|
|
||||||
date: str
|
|
||||||
|
|
||||||
class PropertyExtractions(BaseModel):
|
|
||||||
extractions: List[PropertyExtractionsList]
|
|
||||||
|
|
||||||
class PropertyCapacities(BaseModel):
|
|
||||||
capacities: List[float]
|
|
||||||
dates: List[str]
|
|
||||||
|
|
||||||
class PropertyCapacitiesMonthly(BaseModel):
|
|
||||||
months: List[str]
|
|
||||||
capacities: List[float]
|
|
||||||
|
|
||||||
class PropertyCapacitiesDaily(BaseModel):
|
|
||||||
weekdays: List[str]
|
|
||||||
capacities: List[float]
|
|
||||||
|
|
||||||
class PropertyBaseDetail(BaseModel):
|
|
||||||
property_platform_id: str
|
|
||||||
first_found: str
|
|
||||||
last_found: str
|
|
||||||
latlng: str
|
|
||||||
region_id: str
|
|
||||||
region_name: str
|
|
||||||
|
|
||||||
class PropertyBase(BaseModel):
|
|
||||||
property_platform_id: str
|
|
||||||
first_found: str
|
|
||||||
last_found: str
|
|
||||||
latlng: str
|
|
||||||
region_id: str
|
|
||||||
region_name: str
|
|
||||||
|
|
||||||
|
|
||||||
d = data.load()
|
|
||||||
|
|
||||||
tags_metadata = [
|
|
||||||
{
|
|
||||||
"name": "regions",
|
|
||||||
"description": "Get data for regions.",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "properties",
|
|
||||||
"description": "Get data for properties",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
app = FastAPI(openapi_tags=tags_metadata)
|
|
||||||
|
|
||||||
@app.get("/")
|
|
||||||
def read_root():
|
|
||||||
return {"Hi there!"}
|
|
||||||
|
|
||||||
@app.get("/regions", response_model=Regions, tags=['region'])
|
|
||||||
def regions():
|
|
||||||
"""
|
|
||||||
Returns a list of all available regions.
|
|
||||||
"""
|
|
||||||
return {"regions" : d.properties_per_region().pl().to_dicts()}
|
|
||||||
|
|
||||||
@app.get("/regions/{id}/base", response_model=RegionBase, tags=['region'])
|
|
||||||
def region_base(id: int):
|
|
||||||
"""
|
|
||||||
Returns basic information about a region.
|
|
||||||
"""
|
|
||||||
base = d.region_base_data(id).pl().to_dicts()
|
|
||||||
return {"id": base[0]["id"], "name": base[0]["name"]}
|
|
||||||
|
|
||||||
@app.get("/regions/{id}/capacities", response_model=RegionCapacities, tags=['region'])
|
|
||||||
def region_capacities(id: int):
|
|
||||||
"""
|
|
||||||
Returs the capacities of a region, for every scraping. Set id to -1 to obtain data for all regions.
|
|
||||||
"""
|
|
||||||
capacities = etl_rc.region_capacities(id)
|
|
||||||
return capacities
|
|
||||||
|
|
||||||
@app.get("/regions/{id}/capacities/monthly/{date}", response_model=RegionCapacitiesMonthly, tags=['region'])
|
|
||||||
def region_capacities_monthly(id: int, date: datetime.date):
|
|
||||||
"""
|
|
||||||
Returns the capacities of a region for specified date by months. set id to -1 to obtain data for all regions.
|
|
||||||
"""
|
|
||||||
capacities = etl_rcm.region_capacities_monthly(id, date)
|
|
||||||
return capacities
|
|
||||||
|
|
||||||
@app.get("/regions/{id}/capacities/daily/{date}", response_model=RegionCapacitiesDaily, tags=['region'])
|
|
||||||
def region_capacities_daily(id: int, date: datetime.date):
|
|
||||||
"""
|
|
||||||
Returns the capacities of a region for specified date by days. set id to -1 to obtain data for all regions.
|
|
||||||
"""
|
|
||||||
capacities = etl_rcd.region_capacities_daily(id, date)
|
|
||||||
return capacities
|
|
||||||
|
|
||||||
@app.get("/regions/{id}/moving-average/{date}", response_model=RegionMovingAverage, tags=['region'])
|
|
||||||
def region_capacities_data(id: int, date: datetime.date):
|
|
||||||
"""
|
|
||||||
Returns the moving average of a region for specified date. set id to -1 to obtain data for all regions.
|
|
||||||
"""
|
|
||||||
result = etl_rmA.region_movingAverage(id, date)
|
|
||||||
return result
|
|
||||||
|
|
||||||
@app.get("/regions/{id}/properties/capacities", response_model=RegionPropertiesCapacities, tags=['region'])
|
|
||||||
def region_property_capacities(id: int):
|
|
||||||
"""
|
|
||||||
Returns the capacities of properties in region, for every scraping. set id to -1 to obtain data for all regions.
|
|
||||||
"""
|
|
||||||
capacities = etl_rpc.region_properties_capacities(id)
|
|
||||||
return capacities
|
|
||||||
|
|
||||||
@app.get("/properties/growth", response_model=PropertiesGrowth, tags=['property'])
|
|
||||||
def properties_growth():
|
|
||||||
"""
|
|
||||||
Returns the growth rate of found properties
|
|
||||||
"""
|
|
||||||
options = {"dates" : d.properties_growth().pl()['date'].to_list(), "total_all" : d.properties_growth().pl()['total_all'].to_list(), "total_heidiland" : d.properties_growth().pl()['total_heidiland'].to_list(), "total_engadin" : d.properties_growth().pl()['total_engadin'].to_list(), "total_davos" : d.properties_growth().pl()['total_davos'].to_list(), "total_stmoritz" : d.properties_growth().pl()['total_stmoritz'].to_list()}
|
|
||||||
return options
|
|
||||||
|
|
||||||
@app.get("/properties/geo", response_model=PropertiesGeo, tags=['property'])
|
|
||||||
def properties_geo():
|
|
||||||
"""
|
|
||||||
Returns the geocoordinates of properties
|
|
||||||
"""
|
|
||||||
return {"properties": d.properties_geo().pl().to_dicts()}
|
|
||||||
|
|
||||||
@app.get("/properties/{id}/base", response_model=PropertyBase, tags=['property'])
|
|
||||||
def property_base_data(id: int):
|
|
||||||
"""
|
|
||||||
Returns basic information about a property.
|
|
||||||
"""
|
|
||||||
base = d.property_base_data(id).pl().to_dicts()
|
|
||||||
return {
|
|
||||||
"property_platform_id": base[0]['property_platform_id'],
|
|
||||||
"first_found": str(base[0]['first_found']),
|
|
||||||
"last_found": str(base[0]['last_found']),
|
|
||||||
"latlng": base[0]['latlng'],
|
|
||||||
"region_id": base[0]['region_id'],
|
|
||||||
"region_name": base[0]['region_name']}
|
|
||||||
|
|
||||||
@app.get("/properties/{id}/neighbours", response_model=PropertyNeighbours, tags=['property'])
|
|
||||||
def property_neighbours(id: int):
|
|
||||||
"""
|
|
||||||
Returns the 10 nearest properties from given property.
|
|
||||||
"""
|
|
||||||
return {"neighbours" : etl_pn.property_neighbours(id)}
|
|
||||||
|
|
||||||
@app.get("/properties/{id}/extractions", response_model=PropertyExtractions, tags=['property'])
|
|
||||||
def property_extractions(id: int):
|
|
||||||
"""
|
|
||||||
Returns extracted data from given property.
|
|
||||||
"""
|
|
||||||
return {"extractions" : d.extractions_for(property_id = id).pl().cast({"date": pl.String}).to_dicts()}
|
|
||||||
|
|
||||||
@app.get("/properties/{id}/capacities", response_model=PropertyCapacities, tags=['property'])
|
|
||||||
def property_capacities_data(id: int):
|
|
||||||
"""
|
|
||||||
Returns capacities for given property.
|
|
||||||
"""
|
|
||||||
capacities = etl_pc.property_capacities(id)
|
|
||||||
return capacities
|
|
||||||
|
|
||||||
@app.get("/properties/{id}/capacities/monthly/{date}", response_model=PropertyCapacitiesMonthly, tags=['property'])
|
|
||||||
def property_capacities_data_monthly(id: int, date: datetime.date):
|
|
||||||
"""
|
|
||||||
Returns capacities for given property and date by month.
|
|
||||||
"""
|
|
||||||
capacities = etl_pcm.property_capacities_monthly(id, date)
|
|
||||||
return capacities
|
|
||||||
|
|
||||||
@app.get("/properties/{id}/capacities/daily/{date}", response_model=PropertyCapacitiesDaily, tags=['property'])
|
|
||||||
def property_capacities_data_daily(id: int, date: datetime.date):
|
|
||||||
"""
|
|
||||||
Returns capacities for given property and date by day.
|
|
||||||
"""
|
|
||||||
capacities = etl_pcd.property_capacities_daily(id, date)
|
|
||||||
return capacities
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
import polars as pl
|
||||||
|
from fastapi import FastAPI, Response
|
||||||
|
|
||||||
|
import data
|
||||||
|
|
||||||
|
d = data.load()
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
def read_root():
|
||||||
|
return {"Hello": "World"}
|
||||||
|
|
||||||
|
@app.get("/items/{item_id}")
|
||||||
|
def read_item(item_id: int):
|
||||||
|
ext = d.extractions_for(item_id).pl()
|
||||||
|
out = ext.with_columns(pl.col("calendar").str.extract_all(r"([0-9]{4}-[0-9]{2}-[0-9]{2})|[0-2]").alias("calendar_data"))
|
||||||
|
out = out.drop(['calendar', 'property_id'])
|
||||||
|
return Response(content=out.write_json(), media_type="application/json")
|
|
@ -28,6 +28,8 @@ class Database:
|
||||||
if(spatial_installed and not spatial_installed[0]):
|
if(spatial_installed and not spatial_installed[0]):
|
||||||
self.connection.sql("INSTALL spatial")
|
self.connection.sql("INSTALL spatial")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def db_overview(self):
|
def db_overview(self):
|
||||||
return self.connection.sql("DESCRIBE;").show()
|
return self.connection.sql("DESCRIBE;").show()
|
||||||
|
|
||||||
|
@ -44,100 +46,19 @@ class Database:
|
||||||
|
|
||||||
def properties_growth(self):
|
def properties_growth(self):
|
||||||
return self.connection.sql("""
|
return self.connection.sql("""
|
||||||
WITH PropertiesALL AS (
|
|
||||||
SELECT
|
SELECT
|
||||||
strftime(created_at, '%Y-%m-%d') AS date,
|
strftime(created_at, '%Y-%m-%d') AS date,
|
||||||
COUNT(*) as properties_count,
|
COUNT(*) as properties_count
|
||||||
SUM(properties_count) OVER (ORDER BY date) AS total
|
|
||||||
FROM
|
FROM
|
||||||
consultancy_d.properties p
|
consultancy_d.properties
|
||||||
GROUP BY
|
GROUP BY
|
||||||
date
|
date;
|
||||||
ORDER BY
|
|
||||||
date
|
|
||||||
),
|
|
||||||
PropertiesR1 AS (
|
|
||||||
SELECT
|
|
||||||
strftime(created_at, '%Y-%m-%d') AS date,
|
|
||||||
COUNT(*) as properties_count,
|
|
||||||
SUM(properties_count) OVER (ORDER BY date) AS total
|
|
||||||
FROM
|
|
||||||
consultancy_d.properties p
|
|
||||||
WHERE
|
|
||||||
p.seed_id = 1
|
|
||||||
GROUP BY
|
|
||||||
date
|
|
||||||
ORDER BY
|
|
||||||
date
|
|
||||||
),
|
|
||||||
PropertiesR2 AS (
|
|
||||||
SELECT
|
|
||||||
strftime(created_at, '%Y-%m-%d') AS date,
|
|
||||||
COUNT(*) as properties_count,
|
|
||||||
SUM(properties_count) OVER (ORDER BY date) AS total
|
|
||||||
FROM
|
|
||||||
consultancy_d.properties p
|
|
||||||
WHERE
|
|
||||||
p.seed_id = 2
|
|
||||||
GROUP BY
|
|
||||||
date
|
|
||||||
ORDER BY
|
|
||||||
date
|
|
||||||
),
|
|
||||||
PropertiesR3 AS (
|
|
||||||
SELECT
|
|
||||||
strftime(created_at, '%Y-%m-%d') AS date,
|
|
||||||
COUNT(*) as properties_count,
|
|
||||||
SUM(properties_count) OVER (ORDER BY date) AS total
|
|
||||||
FROM
|
|
||||||
consultancy_d.properties p
|
|
||||||
WHERE
|
|
||||||
p.seed_id = 3
|
|
||||||
GROUP BY
|
|
||||||
date
|
|
||||||
ORDER BY
|
|
||||||
date
|
|
||||||
),
|
|
||||||
PropertiesR4 AS (
|
|
||||||
SELECT
|
|
||||||
strftime(created_at, '%Y-%m-%d') AS date,
|
|
||||||
COUNT(*) as properties_count,
|
|
||||||
SUM(properties_count) OVER (ORDER BY date) AS total
|
|
||||||
FROM
|
|
||||||
consultancy_d.properties p
|
|
||||||
WHERE
|
|
||||||
p.seed_id = 4
|
|
||||||
GROUP BY
|
|
||||||
date
|
|
||||||
ORDER BY
|
|
||||||
date
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
p.date,
|
|
||||||
p.total AS total_all,
|
|
||||||
pR1.total as total_heidiland,
|
|
||||||
pR2.total AS total_davos,
|
|
||||||
pR3.total AS total_engadin,
|
|
||||||
pR4.total AS total_stmoritz
|
|
||||||
FROM
|
|
||||||
PropertiesAll p
|
|
||||||
LEFT JOIN
|
|
||||||
PropertiesR1 pR1 ON p.date = pR1.date
|
|
||||||
LEFT JOIN
|
|
||||||
PropertiesR2 pR2 ON p.date = pR2.date
|
|
||||||
LEFT JOIN
|
|
||||||
PropertiesR3 pR3 ON p.date = pR3.date
|
|
||||||
LEFT JOIN
|
|
||||||
PropertiesR4 pR4 ON p.date = pR4.date
|
|
||||||
ORDER BY
|
|
||||||
p.date
|
|
||||||
""")
|
""")
|
||||||
|
|
||||||
def properties_per_region(self):
|
def properties_per_region(self):
|
||||||
return self.connection.sql("""
|
return self.connection.sql("""
|
||||||
SELECT
|
SELECT
|
||||||
regions.name,
|
regions.name,
|
||||||
regions.id,
|
|
||||||
COUNT(*) AS count_properties
|
COUNT(*) AS count_properties
|
||||||
FROM
|
FROM
|
||||||
consultancy_d.properties
|
consultancy_d.properties
|
||||||
|
@ -147,22 +68,7 @@ class Database:
|
||||||
consultancy_d.regions ON regions.id = seeds.region_id
|
consultancy_d.regions ON regions.id = seeds.region_id
|
||||||
GROUP BY
|
GROUP BY
|
||||||
properties.seed_id,
|
properties.seed_id,
|
||||||
regions.name,
|
regions.name
|
||||||
regions.id
|
|
||||||
ORDER BY
|
|
||||||
count_properties ASC
|
|
||||||
""")
|
|
||||||
|
|
||||||
def propIds_with_region(self):
|
|
||||||
return self.connection.sql("""
|
|
||||||
SELECT
|
|
||||||
properties.id, seed_id, regions.name
|
|
||||||
FROM
|
|
||||||
consultancy_d.properties
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.seeds ON seeds.id = properties.seed_id
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.regions ON regions.id = seeds.region_id
|
|
||||||
""")
|
""")
|
||||||
|
|
||||||
def properties_unreachable(self):
|
def properties_unreachable(self):
|
||||||
|
@ -290,7 +196,21 @@ class Database:
|
||||||
""")
|
""")
|
||||||
|
|
||||||
def extractions(self):
|
def extractions(self):
|
||||||
return self.connection.sql("""
|
return self.connection.sql(f"""
|
||||||
|
SELECT
|
||||||
|
JSON_EXTRACT(body, '$.content.days') as calendar,
|
||||||
|
property_id,
|
||||||
|
created_at
|
||||||
|
FROM
|
||||||
|
consultancy_d.extractions
|
||||||
|
WHERE
|
||||||
|
type == 'calendar'
|
||||||
|
ORDER BY
|
||||||
|
property_id
|
||||||
|
""")
|
||||||
|
|
||||||
|
def extractions_for(self, property_id):
|
||||||
|
return self.connection.sql(f"""
|
||||||
SELECT
|
SELECT
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendar,
|
JSON_EXTRACT(body, '$.content.days') as calendar,
|
||||||
property_id,
|
property_id,
|
||||||
|
@ -299,63 +219,11 @@ class Database:
|
||||||
consultancy_d.extractions
|
consultancy_d.extractions
|
||||||
WHERE
|
WHERE
|
||||||
type == 'calendar' AND
|
type == 'calendar' AND
|
||||||
calendar NOT NULL
|
property_id = {property_id}
|
||||||
ORDER BY
|
ORDER BY
|
||||||
property_id
|
property_id
|
||||||
""")
|
""")
|
||||||
|
|
||||||
def extractions_with_region(self):
|
|
||||||
return self.connection.sql("""
|
|
||||||
SELECT
|
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendar,
|
|
||||||
extractions.property_id,
|
|
||||||
extractions.created_at,
|
|
||||||
properties.seed_id,
|
|
||||||
regions.name
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.properties ON properties.id = extractions.property_id
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.seeds ON seeds.id = properties.seed_id
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.regions ON regions.id = seeds.region_id
|
|
||||||
WHERE
|
|
||||||
calendar NOT NULL
|
|
||||||
""")
|
|
||||||
|
|
||||||
def extractions_for(self, property_id):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendar,
|
|
||||||
created_at as date
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
WHERE
|
|
||||||
type == 'calendar' AND
|
|
||||||
property_id = {property_id} AND
|
|
||||||
calendar NOT NULL
|
|
||||||
ORDER BY
|
|
||||||
created_at
|
|
||||||
""")
|
|
||||||
|
|
||||||
def extractions_propId_scrapeDate(self, property_id: int, scrape_date: str):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendar,
|
|
||||||
created_at
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
WHERE
|
|
||||||
type == 'calendar' AND
|
|
||||||
property_id = {property_id} AND
|
|
||||||
calendar NOT NULL AND
|
|
||||||
created_at >= '{scrape_date}'
|
|
||||||
ORDER BY
|
|
||||||
created_at
|
|
||||||
LIMIT 1
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Anzahl der extrahierten properties pro Exktraktionsvorgang
|
# Anzahl der extrahierten properties pro Exktraktionsvorgang
|
||||||
def properties_per_extraction(self, property_id):
|
def properties_per_extraction(self, property_id):
|
||||||
return self.connection.sql("""
|
return self.connection.sql("""
|
||||||
|
@ -399,180 +267,3 @@ class Database:
|
||||||
ORDER BY property_id
|
ORDER BY property_id
|
||||||
""")
|
""")
|
||||||
|
|
||||||
def property_base_data(self, id):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
p.property_platform_id,
|
|
||||||
p.created_at as first_found,
|
|
||||||
p.last_found,
|
|
||||||
p.check_data as latlng,
|
|
||||||
r.id as region_id,
|
|
||||||
r.name as region_name
|
|
||||||
FROM
|
|
||||||
consultancy_d.properties p
|
|
||||||
INNER JOIN consultancy_d.seeds s ON s.id = p.seed_id
|
|
||||||
INNER JOIN consultancy_d.regions r ON s.region_id = r.id
|
|
||||||
WHERE
|
|
||||||
p.id = {id}
|
|
||||||
""")
|
|
||||||
|
|
||||||
def region_base_data(self, id):
|
|
||||||
if id == -1:
|
|
||||||
where = ''
|
|
||||||
else:
|
|
||||||
where = f"WHERE r.id = {id}"
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
r.id as id,
|
|
||||||
r.name as name
|
|
||||||
FROM
|
|
||||||
consultancy_d.regions r
|
|
||||||
{where}
|
|
||||||
""")
|
|
||||||
|
|
||||||
def properties_geo(self):
|
|
||||||
return self.connection.sql("""
|
|
||||||
SELECT
|
|
||||||
p.id as property_id,
|
|
||||||
p.check_data as latlng,
|
|
||||||
r.id as region_id
|
|
||||||
FROM
|
|
||||||
consultancy_d.properties p
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.seeds s ON s.id = p.seed_id
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.regions r ON r.id = s.region_id
|
|
||||||
""")
|
|
||||||
|
|
||||||
def properties_geo_seeds(self):
|
|
||||||
return self.connection.sql("""
|
|
||||||
SELECT
|
|
||||||
p.id,
|
|
||||||
p.seed_id,
|
|
||||||
p.check_data as coordinates
|
|
||||||
FROM
|
|
||||||
consultancy_d.properties p
|
|
||||||
""")
|
|
||||||
|
|
||||||
def capacity_of_region(self, region_id):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendarBody,
|
|
||||||
strftime(extractions.created_at, '%Y-%m-%d') AS ScrapeDate,
|
|
||||||
extractions.property_id,
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.properties ON properties.id = extractions.property_id
|
|
||||||
WHERE
|
|
||||||
type == 'calendar' AND
|
|
||||||
properties.seed_id = {region_id} AND
|
|
||||||
calendarBody NOT NULL
|
|
||||||
""")
|
|
||||||
|
|
||||||
def singleScrape_of_region(self, region_id: int, scrape_date_min: str, scrape_date_max: str):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendarBody,
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.properties ON properties.id = extractions.property_id
|
|
||||||
WHERE
|
|
||||||
type == 'calendar' AND
|
|
||||||
properties.seed_id = {region_id} AND
|
|
||||||
extractions.created_at >= '{scrape_date_min}' AND
|
|
||||||
extractions.created_at < '{scrape_date_max}' AND
|
|
||||||
calendarBody NOT NULL
|
|
||||||
""")
|
|
||||||
|
|
||||||
def singleScrape_of_global(self, scrape_date_min: str, scrape_date_max: str):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendarBody,
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.properties ON properties.id = extractions.property_id
|
|
||||||
WHERE
|
|
||||||
type == 'calendar' AND
|
|
||||||
extractions.created_at >= '{scrape_date_min}' AND
|
|
||||||
extractions.created_at < '{scrape_date_max}' AND
|
|
||||||
calendarBody NOT NULL
|
|
||||||
""")
|
|
||||||
|
|
||||||
def singleScrape_of_region_scrapDate(self, region_id: int, scrape_date_min: str, scrape_date_max: str):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendarBody,
|
|
||||||
extractions.created_at
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.properties ON properties.id = extractions.property_id
|
|
||||||
WHERE
|
|
||||||
type == 'calendar' AND
|
|
||||||
properties.seed_id = {region_id} AND
|
|
||||||
extractions.created_at >= '{scrape_date_min}' AND
|
|
||||||
extractions.created_at < '{scrape_date_max}' AND
|
|
||||||
calendarBody NOT NULL
|
|
||||||
""")
|
|
||||||
|
|
||||||
def singleScrape_of_global_scrapDate(self, scrape_date_min: str, scrape_date_max: str):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendarBody,
|
|
||||||
extractions.created_at
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.properties ON properties.id = extractions.property_id
|
|
||||||
WHERE
|
|
||||||
type == 'calendar' AND
|
|
||||||
extractions.created_at >= '{scrape_date_min}' AND
|
|
||||||
extractions.created_at < '{scrape_date_max}' AND
|
|
||||||
calendarBody NOT NULL
|
|
||||||
""")
|
|
||||||
|
|
||||||
def capacity_global(self):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendarBody,
|
|
||||||
strftime(extractions.created_at, '%Y-%m-%d') AS ScrapeDate,
|
|
||||||
extractions.property_id,
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.properties ON properties.id = extractions.property_id
|
|
||||||
WHERE
|
|
||||||
type == 'calendar'
|
|
||||||
AND
|
|
||||||
calendarBody NOT NULL
|
|
||||||
""")
|
|
||||||
|
|
||||||
def capacity_comparison_of_region(self, region_id_1, region_id_2):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT
|
|
||||||
JSON_EXTRACT(body, '$.content.days') as calendarBody,
|
|
||||||
strftime(extractions.created_at, '%Y-%m-%d') AS ScrapeDate,
|
|
||||||
extractions.property_id,
|
|
||||||
properties.seed_id
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
LEFT JOIN
|
|
||||||
consultancy_d.properties ON properties.id = extractions.property_id
|
|
||||||
WHERE
|
|
||||||
type == 'calendar' AND
|
|
||||||
(properties.seed_id = {region_id_1} OR
|
|
||||||
properties.seed_id = {region_id_2}) AND
|
|
||||||
calendarBody NOT NULL
|
|
||||||
""")
|
|
||||||
|
|
||||||
def unique_scrapeDates(self):
|
|
||||||
return self.connection.sql(f"""
|
|
||||||
SELECT DISTINCT
|
|
||||||
strftime(extractions.created_at, '%Y-%m-%d') AS ScrapeDate,
|
|
||||||
FROM
|
|
||||||
consultancy_d.extractions
|
|
||||||
""")
|
|
||||||
|
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
from pathlib import Path
|
|
||||||
from pickle import dump, load
|
|
||||||
|
|
||||||
Path('cache').mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
# load pickle obj
|
|
||||||
def openObj(file):
|
|
||||||
filepath = Path(f"cache/{file}")
|
|
||||||
if filepath.is_file():
|
|
||||||
with open(filepath, 'rb') as f:
|
|
||||||
return load(f)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# save pickle obj
|
|
||||||
def saveObj(file, result):
|
|
||||||
filepath = Path(f"cache/{file}")
|
|
||||||
with open(filepath, 'wb') as f:
|
|
||||||
dump(result, f)
|
|
|
@ -0,0 +1,47 @@
|
||||||
|
import polars as pl
|
||||||
|
import json
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
def expansion_Pipeline(df):
|
||||||
|
'''
|
||||||
|
Rearranges a given extractions Dataframe into an expanded Dataframe.
|
||||||
|
New Columns :propId, created_at calendar_date, calendar_value
|
||||||
|
:param df: Inputs from database.py/extractions or database.py/extractions_for functions
|
||||||
|
:return: expanded dataframe
|
||||||
|
'''
|
||||||
|
data = []
|
||||||
|
|
||||||
|
for row in df.iter_rows():
|
||||||
|
propId = row[1]
|
||||||
|
createdAt = row[2]
|
||||||
|
if row[0]:
|
||||||
|
temp = json.loads(row[0])
|
||||||
|
keys = temp.keys()
|
||||||
|
for key in keys:
|
||||||
|
out = [propId, createdAt.date(), datetime.strptime(key, '%Y-%m-%d').date(), temp[key]]
|
||||||
|
data.append(out)
|
||||||
|
|
||||||
|
df = pl.DataFrame(data, schema=["property_id", "created_at", "calendar_date", "calendar_value"])
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
def liveDates_Pipeline(df):
|
||||||
|
'''
|
||||||
|
Returns the expanded Dataframe with only the live data and no future data
|
||||||
|
:param df: Inputs from database.py/extractions or database.py/extractions_for functions
|
||||||
|
:return: expanded and filtered dataframe
|
||||||
|
'''
|
||||||
|
df = expansion_Pipeline(df)
|
||||||
|
print(df)
|
||||||
|
df = df.filter(pl.col("calendar_date") == pl.col("created_at")+timedelta(days=2))
|
||||||
|
return df
|
||||||
|
|
||||||
|
def liveDates_PipelineFromExpanded(df):
|
||||||
|
'''
|
||||||
|
Filters an already expanded df and returns only the live data and no future data
|
||||||
|
NOTE: The actual live date and the next is always 0. The reason is most likely that it is forbidden to
|
||||||
|
book on the current or next day. Workaround: Compare with the day after tomorrow
|
||||||
|
:param df: Inputs from expansion_Pipeline
|
||||||
|
:return: expanded and filtered dataframe
|
||||||
|
'''
|
||||||
|
df = df.filter(pl.col("calendar_date") == pl.col("created_at")+timedelta(days=2))
|
||||||
|
return df
|
|
@ -1,46 +0,0 @@
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
import data
|
|
||||||
from data import etl_cache
|
|
||||||
|
|
||||||
d = data.load()
|
|
||||||
|
|
||||||
def property_capacities(id: int):
|
|
||||||
|
|
||||||
file = f"etl_property_capacities_{id}.obj"
|
|
||||||
obj = etl_cache.openObj(file)
|
|
||||||
if obj:
|
|
||||||
return obj
|
|
||||||
|
|
||||||
extractions = d.extractions_for(id).pl()
|
|
||||||
df_dates = pl.DataFrame()
|
|
||||||
|
|
||||||
for row in extractions.rows(named=True):
|
|
||||||
df_calendar = pl.read_json(StringIO(row['calendar']))
|
|
||||||
#df_calendar.insert_column(0, pl.Series("created_at", [row['created_at']]))
|
|
||||||
df_dates = pl.concat([df_calendar, df_dates], how="diagonal")
|
|
||||||
|
|
||||||
# order = sorted(df_dates.columns)
|
|
||||||
# df_dates = df_dates.select(order)
|
|
||||||
sum_hor = df_dates.sum_horizontal()
|
|
||||||
#print(sum_hor)
|
|
||||||
# Get the available dates per extraction
|
|
||||||
count_days = []
|
|
||||||
for dates in df_dates.rows():
|
|
||||||
# Remove all None values
|
|
||||||
liste = [x for x in dates if x is not None]
|
|
||||||
count_days.append(len(liste))
|
|
||||||
|
|
||||||
counts = pl.DataFrame({"count_days" : count_days, "sum" : sum_hor})
|
|
||||||
result = {"capacities": [], "dates": extractions['date'].cast(pl.Date).cast(pl.String).to_list() }
|
|
||||||
|
|
||||||
for row in counts.rows(named=True):
|
|
||||||
max_capacity = row['count_days'] * 2
|
|
||||||
max_capacity_perc = 100 / max_capacity
|
|
||||||
result['capacities'].append(round(max_capacity_perc * row['sum'], 2))
|
|
||||||
result['capacities'].reverse()
|
|
||||||
|
|
||||||
etl_cache.saveObj(file, result)
|
|
||||||
return result
|
|
|
@ -1,41 +0,0 @@
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
import data
|
|
||||||
from data import etl_cache
|
|
||||||
|
|
||||||
d = data.load()
|
|
||||||
|
|
||||||
def property_capacities_daily(id: int, scrapeDate: str):
|
|
||||||
|
|
||||||
file = f"etl_property_capacities_weekdays_{id}_{scrapeDate}.obj"
|
|
||||||
obj = etl_cache.openObj(file)
|
|
||||||
if obj:
|
|
||||||
return obj
|
|
||||||
|
|
||||||
extractions = d.extractions_propId_scrapeDate(id, scrapeDate).pl()
|
|
||||||
weekdays = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
|
|
||||||
df_calendar = pl.DataFrame()
|
|
||||||
numWeeks = 0
|
|
||||||
|
|
||||||
for row in extractions.rows(named=True):
|
|
||||||
scrapeDate = row['created_at']
|
|
||||||
df_calendar = pl.read_json(StringIO(row['calendar']))
|
|
||||||
columnTitles = df_calendar.columns
|
|
||||||
df_calendar = df_calendar.transpose()
|
|
||||||
df_calendar = df_calendar.with_columns(pl.Series(name="dates", values=columnTitles))
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("dates").str.to_date()))
|
|
||||||
numWeeks = round((df_calendar.get_column("dates").max() - df_calendar.get_column("dates").min()).days / 7, 0)
|
|
||||||
df_calendar = df_calendar.with_columns(pl.col("dates").dt.weekday().alias("weekday_num"))
|
|
||||||
df_calendar = df_calendar.with_columns(pl.col("dates").dt.strftime("%A").alias("weekday"))
|
|
||||||
df_calendar = df_calendar.drop("dates")
|
|
||||||
|
|
||||||
df_calendar = df_calendar.group_by(["weekday", "weekday_num"]).agg(pl.col("column_0").sum())
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("column_0") / numWeeks / 2 * 100).alias("column_0"))
|
|
||||||
df_calendar = df_calendar.sort('weekday_num')
|
|
||||||
df_calendar = df_calendar.drop('weekday_num')
|
|
||||||
|
|
||||||
result = {"date": scrapeDate, "weekdays": df_calendar['weekday'].to_list(), 'capacities': df_calendar['column_0'].to_list()}
|
|
||||||
etl_cache.saveObj(file, result)
|
|
||||||
return result
|
|
|
@ -1,38 +0,0 @@
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
import data
|
|
||||||
from data import etl_cache
|
|
||||||
|
|
||||||
d = data.load()
|
|
||||||
|
|
||||||
def property_capacities_monthly(id: int, scrapeDate: str):
|
|
||||||
|
|
||||||
file = f"etl_property_capacities_monthly_{id}_{scrapeDate}.obj"
|
|
||||||
obj = etl_cache.openObj(file)
|
|
||||||
if obj:
|
|
||||||
return obj
|
|
||||||
|
|
||||||
extractions = d.extractions_propId_scrapeDate(id, scrapeDate).pl()
|
|
||||||
df_calendar = pl.DataFrame()
|
|
||||||
|
|
||||||
for row in extractions.rows(named=True):
|
|
||||||
scrapeDate = row['created_at']
|
|
||||||
df_calendar = pl.read_json(StringIO(row['calendar']))
|
|
||||||
columnTitles = df_calendar.columns
|
|
||||||
df_calendar = df_calendar.transpose()
|
|
||||||
df_calendar = df_calendar.with_columns(pl.Series(name="dates", values=columnTitles))
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("dates").str.to_date()))
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("dates").dt.month_end().dt.day().alias('numDays')))
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("dates").dt.strftime("%b") + " " + (pl.col("dates").dt.strftime("%Y"))).alias('date_short'))
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("dates").dt.strftime("%Y") + " " + (pl.col("dates").dt.strftime("%m"))).alias('dates'))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
df_calendar = df_calendar.group_by(['dates', 'date_short', 'numDays']).agg(pl.col("column_0").sum())
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("column_0") / pl.col("numDays") / 2 * 100).alias("column_0"))
|
|
||||||
df_calendar = df_calendar.sort('dates')
|
|
||||||
result = {"months": df_calendar['date_short'].to_list(), 'capacities': df_calendar['column_0'].to_list()}
|
|
||||||
etl_cache.saveObj(file, result)
|
|
||||||
return result
|
|
|
@ -1,73 +0,0 @@
|
||||||
from math import asin, atan2, cos, degrees, radians, sin, sqrt
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
import data
|
|
||||||
from data import etl_cache
|
|
||||||
|
|
||||||
d = data.load()
|
|
||||||
|
|
||||||
|
|
||||||
def calcHaversinDistance(latMain, lonMain, lat, lon):
|
|
||||||
R = 6371
|
|
||||||
|
|
||||||
# convert decimal degrees to radians
|
|
||||||
latMain, lonMain, lat, lon = map(radians, [latMain, lonMain, lat, lon])
|
|
||||||
|
|
||||||
# haversine formula
|
|
||||||
dlon = lonMain - lon
|
|
||||||
dlat = latMain - lat
|
|
||||||
|
|
||||||
a = sin(dlat / 2) ** 2 + cos(lat) * cos(latMain) * sin(dlon / 2) ** 2
|
|
||||||
c = 2 * atan2(sqrt(a), sqrt(1-a))
|
|
||||||
d = R * c
|
|
||||||
|
|
||||||
return d
|
|
||||||
|
|
||||||
|
|
||||||
def property_neighbours(id: int):
|
|
||||||
|
|
||||||
file = f"etl_property_neighbours_{id}.obj"
|
|
||||||
obj = etl_cache.openObj(file)
|
|
||||||
if obj:
|
|
||||||
return obj
|
|
||||||
|
|
||||||
extractions = d.properties_geo_seeds().pl()
|
|
||||||
|
|
||||||
# Get lat, long and region from main property
|
|
||||||
latMain, lonMain = extractions.filter(pl.col('id') == str(id))['coordinates'][0].split(',')
|
|
||||||
latMain, lonMain = map(float, [latMain, lonMain])
|
|
||||||
region = extractions.filter(pl.col('id') == str(id))['seed_id'][0]
|
|
||||||
|
|
||||||
# Prefilter the dataframe to only the correct region
|
|
||||||
extractions = extractions.filter(pl.col('seed_id') == str(region))
|
|
||||||
extractions = extractions.drop('seed_id')
|
|
||||||
|
|
||||||
# Remove main property from DF
|
|
||||||
extractions = extractions.filter(pl.col('id') != str(id))
|
|
||||||
|
|
||||||
# Split coordinate into lat and lon
|
|
||||||
extractions = extractions.with_columns(pl.col("coordinates").str.split_exact(",", 1).struct.rename_fields(["lat", "lon"]).alias("lat/lon")).unnest("lat/lon")
|
|
||||||
extractions = extractions.drop('coordinates')
|
|
||||||
extractions = extractions.with_columns(pl.col("lat").cast(pl.Float32))
|
|
||||||
extractions = extractions.with_columns(pl.col("lon").cast(pl.Float32))
|
|
||||||
|
|
||||||
# Calculate distances
|
|
||||||
distances = []
|
|
||||||
for row in extractions.rows(named=True):
|
|
||||||
lat = row['lat']
|
|
||||||
lon = row['lon']
|
|
||||||
dist = calcHaversinDistance(latMain, lonMain, lat, lon)
|
|
||||||
distances.append(dist)
|
|
||||||
|
|
||||||
# Add distance to DF
|
|
||||||
extractions = extractions.with_columns(pl.Series(name="distances", values=distances))
|
|
||||||
|
|
||||||
# Sort for distance and give only first 10
|
|
||||||
extractions = extractions.sort("distances").head(10)
|
|
||||||
extractions = extractions.drop('distances')
|
|
||||||
|
|
||||||
result = extractions.to_dicts()
|
|
||||||
etl_cache.saveObj(file, result)
|
|
||||||
|
|
||||||
return result
|
|
|
@ -1,58 +0,0 @@
|
||||||
from datetime import date
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
import data
|
|
||||||
from data import etl_cache
|
|
||||||
|
|
||||||
d = data.load()
|
|
||||||
|
|
||||||
def region_capacities(id: int):
|
|
||||||
|
|
||||||
file = f"etl_region_capacities_{id}.obj"
|
|
||||||
obj = etl_cache.openObj(file)
|
|
||||||
if obj:
|
|
||||||
return obj
|
|
||||||
|
|
||||||
# Get Data
|
|
||||||
if id == -1:
|
|
||||||
extractions = d.capacity_global().pl()
|
|
||||||
else:
|
|
||||||
extractions = d.capacity_of_region(id).pl()
|
|
||||||
# turn PropertyIDs to ints for sorting
|
|
||||||
extractions = extractions.cast({"property_id": int})
|
|
||||||
|
|
||||||
extractions.drop('property_id')
|
|
||||||
df_dates = pl.DataFrame()
|
|
||||||
|
|
||||||
# Get Data from JSON
|
|
||||||
gridData = pl.DataFrame(schema=[("scrape_date", pl.String), ("sum_hor", pl.Int64), ("calendar_width", pl.Int64)])
|
|
||||||
dayCounts = []
|
|
||||||
for row in extractions.rows(named=True):
|
|
||||||
# Return 0 for sum if calendar is null
|
|
||||||
if row['calendarBody']:
|
|
||||||
calDF = pl.read_json(StringIO(row['calendarBody']))
|
|
||||||
sum_hor = calDF.sum_horizontal()[0]
|
|
||||||
else:
|
|
||||||
sum_hor = 0
|
|
||||||
gridData = gridData.vstack(pl.DataFrame({"scrape_date" : row['ScrapeDate'], "sum_hor": sum_hor, "calendar_width": calDF.width}))
|
|
||||||
|
|
||||||
# Create Aggregates of values
|
|
||||||
df_count = gridData.group_by("scrape_date").agg(pl.col("sum_hor").count())
|
|
||||||
df_sum = gridData.group_by("scrape_date").agg(pl.col("sum_hor").sum())
|
|
||||||
df_numDays = gridData.group_by("scrape_date").agg(pl.col("calendar_width").max())
|
|
||||||
|
|
||||||
# Join and rename DF's
|
|
||||||
df = df_sum.join(df_count, on= 'scrape_date').join(df_numDays, on= 'scrape_date')
|
|
||||||
|
|
||||||
# Calculate normed capacities for each scrapeDate
|
|
||||||
df = df.with_columns((pl.col("sum_hor") / pl.col("sum_hor_right") / (pl.col("calendar_width")*2) * 100).alias("capacity"))
|
|
||||||
|
|
||||||
# Sort the date column
|
|
||||||
df = df.cast({"scrape_date": date}).sort('scrape_date')
|
|
||||||
|
|
||||||
result = {"capacities": df['capacity'].to_list(), "dates": df['scrape_date'].to_list()}
|
|
||||||
|
|
||||||
etl_cache.saveObj(file, result)
|
|
||||||
return result
|
|
|
@ -1,64 +0,0 @@
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
import data
|
|
||||||
from data import etl_cache
|
|
||||||
|
|
||||||
d = data.load()
|
|
||||||
|
|
||||||
def region_capacities_daily(id: int, scrapeDate_start: str):
|
|
||||||
|
|
||||||
file = f"etl_region_capacities_weekdays_{id}_{scrapeDate_start}.obj"
|
|
||||||
obj = etl_cache.openObj(file)
|
|
||||||
if obj:
|
|
||||||
return obj
|
|
||||||
|
|
||||||
# Get end date of start search-window
|
|
||||||
scrapeDate_end = scrapeDate_start + timedelta(days=1)
|
|
||||||
|
|
||||||
# Get Data
|
|
||||||
if id == -1:
|
|
||||||
extractions = d.singleScrape_of_global_scrapDate(scrapeDate_start, scrapeDate_end).pl()
|
|
||||||
else:
|
|
||||||
extractions = d.singleScrape_of_region_scrapDate(id, scrapeDate_start, scrapeDate_end).pl()
|
|
||||||
|
|
||||||
df_calendar = pl.DataFrame()
|
|
||||||
numWeeks = 0
|
|
||||||
firstExe = True
|
|
||||||
counter = 0
|
|
||||||
for row in extractions.rows(named=True):
|
|
||||||
scrapeDate = row['created_at']
|
|
||||||
if row['calendarBody']:
|
|
||||||
counter += 1
|
|
||||||
df_calendar = pl.read_json(StringIO(row['calendarBody']))
|
|
||||||
columnTitles = df_calendar.columns
|
|
||||||
df_calendar = df_calendar.transpose()
|
|
||||||
df_calendar = df_calendar.with_columns(pl.Series(name="dates", values=columnTitles))
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("dates").str.to_date()))
|
|
||||||
numWeeks = round((df_calendar.get_column("dates").max() - df_calendar.get_column("dates").min()).days / 7, 0)
|
|
||||||
df_calendar = df_calendar.with_columns(pl.col("dates").dt.weekday().alias("weekday_num"))
|
|
||||||
df_calendar = df_calendar.with_columns(pl.col("dates").dt.strftime("%A").alias("weekday"))
|
|
||||||
df_calendar = df_calendar.drop("dates")
|
|
||||||
|
|
||||||
df_calendar = df_calendar.group_by(["weekday", "weekday_num"]).agg(pl.col("column_0").sum())
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("column_0") / numWeeks / 2 * 100).alias("column_0"))
|
|
||||||
df_calendar = df_calendar.sort('weekday_num')
|
|
||||||
df_calendar = df_calendar.drop('weekday_num')
|
|
||||||
df_calendar = df_calendar.rename({'column_0': str(counter)})
|
|
||||||
|
|
||||||
if firstExe:
|
|
||||||
outDf = df_calendar
|
|
||||||
firstExe = False
|
|
||||||
else:
|
|
||||||
outDf = outDf.join(df_calendar, on='weekday')
|
|
||||||
|
|
||||||
# Calculate horizontal Mean
|
|
||||||
means = outDf.mean_horizontal()
|
|
||||||
outDf = outDf.insert_column(1, means)
|
|
||||||
outDf = outDf[['weekday', 'mean']]
|
|
||||||
|
|
||||||
result = {"weekdays": outDf['weekday'].to_list(),'capacities': outDf['mean'].to_list()}
|
|
||||||
etl_cache.saveObj(file, result)
|
|
||||||
return result
|
|
|
@ -1,65 +0,0 @@
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
import data
|
|
||||||
from data import etl_cache
|
|
||||||
|
|
||||||
d = data.load()
|
|
||||||
|
|
||||||
|
|
||||||
def region_capacities_monthly(id: int, scrapeDate_start: str):
|
|
||||||
|
|
||||||
file = f"etl_region_capacities_monthly_{id}_{scrapeDate_start}.obj"
|
|
||||||
obj = etl_cache.openObj(file)
|
|
||||||
if obj:
|
|
||||||
return obj
|
|
||||||
|
|
||||||
# Get end date of start search-window
|
|
||||||
scrapeDate_end = scrapeDate_start + timedelta(days=1)
|
|
||||||
|
|
||||||
# Get Data
|
|
||||||
if id == -1:
|
|
||||||
extractions = d.singleScrape_of_global_scrapDate(scrapeDate_start, scrapeDate_end).pl()
|
|
||||||
else:
|
|
||||||
extractions = d.singleScrape_of_region_scrapDate(id, scrapeDate_start, scrapeDate_end).pl()
|
|
||||||
|
|
||||||
df_calendar = pl.DataFrame()
|
|
||||||
numWeeks = 0
|
|
||||||
firstExe = True
|
|
||||||
counter = 0
|
|
||||||
for row in extractions.rows(named=True):
|
|
||||||
scrapeDate = row['created_at']
|
|
||||||
if row['calendarBody']:
|
|
||||||
counter += 1
|
|
||||||
df_calendar = pl.read_json(StringIO(row['calendarBody']))
|
|
||||||
columnTitles = df_calendar.columns
|
|
||||||
df_calendar = df_calendar.transpose()
|
|
||||||
df_calendar = df_calendar.with_columns(pl.Series(name="dates", values=columnTitles))
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("dates").str.to_date()))
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("dates").dt.month_end().dt.day().alias('numDays')))
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("dates").dt.strftime("%b") + " " + (pl.col("dates").dt.strftime("%Y"))).alias('date_short'))
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("dates").dt.strftime("%Y") + " " + (pl.col("dates").dt.strftime("%m"))).alias('dates'))
|
|
||||||
df_calendar = df_calendar.group_by(['dates', 'date_short','numDays']).agg(pl.col("column_0").sum())
|
|
||||||
df_calendar = df_calendar.with_columns((pl.col("column_0") / pl.col("numDays") / 2 * 100).alias("column_0"))
|
|
||||||
df_calendar = df_calendar.sort('dates')
|
|
||||||
df_calendar = df_calendar.drop('dates')
|
|
||||||
df_calendar = df_calendar.drop('numDays')
|
|
||||||
|
|
||||||
df_calendar = df_calendar.rename({'column_0': str(counter)})
|
|
||||||
|
|
||||||
if firstExe:
|
|
||||||
outDf = df_calendar
|
|
||||||
firstExe = False
|
|
||||||
else:
|
|
||||||
outDf = outDf.join(df_calendar, on='date_short')
|
|
||||||
|
|
||||||
# Calculate horizontal Mean
|
|
||||||
means = outDf.mean_horizontal()
|
|
||||||
outDf = outDf.insert_column(1, means)
|
|
||||||
outDf = outDf[['date_short', 'mean']]
|
|
||||||
|
|
||||||
result = {"date": scrapeDate, "months": outDf['date_short'].to_list(),'capacities': outDf['mean'].to_list()}
|
|
||||||
etl_cache.saveObj(file, result)
|
|
||||||
return result
|
|
|
@ -1,136 +0,0 @@
|
||||||
from datetime import date, datetime, timedelta
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
import data
|
|
||||||
from data import etl_cache
|
|
||||||
|
|
||||||
d = data.load()
|
|
||||||
|
|
||||||
def region_movingAverage(id: int, scrape_date_start_min: datetime.date):
|
|
||||||
|
|
||||||
file = f"etl_region_movingAverage_{id}_{scrape_date_start_min}.obj"
|
|
||||||
obj = etl_cache.openObj(file)
|
|
||||||
if obj:
|
|
||||||
return obj
|
|
||||||
|
|
||||||
# Settings
|
|
||||||
# Offset between actual and predict ScrapeDate
|
|
||||||
timeOffset = 30
|
|
||||||
|
|
||||||
# Calculation Frame
|
|
||||||
calcFrame = 180
|
|
||||||
|
|
||||||
# Filter Setting
|
|
||||||
windowSize = 7
|
|
||||||
|
|
||||||
|
|
||||||
# Get unique ScrapeDates
|
|
||||||
uniqueScrapeDates = d.unique_scrapeDates().pl()
|
|
||||||
uniqueScrapeDates = uniqueScrapeDates.get_column('ScrapeDate').str.to_date()
|
|
||||||
uniqueScrapeDates = uniqueScrapeDates.sort().to_list()
|
|
||||||
|
|
||||||
# Get end date of start search-window
|
|
||||||
scrape_date_start_max = scrape_date_start_min + timedelta(days=1)
|
|
||||||
|
|
||||||
# Get start and end date of End search-window
|
|
||||||
scrape_date_end_min = scrape_date_start_min + timedelta(days=timeOffset)
|
|
||||||
|
|
||||||
# Get closest ScrapeDate
|
|
||||||
scrape_date_end_min = min(uniqueScrapeDates, key=lambda x: abs(x - scrape_date_end_min))
|
|
||||||
scrape_date_end_max = scrape_date_end_min + timedelta(days=1)
|
|
||||||
|
|
||||||
final_end_date = scrape_date_end_min + timedelta(days=calcFrame)
|
|
||||||
|
|
||||||
# Get Data
|
|
||||||
if id == -1:
|
|
||||||
ex_start = d.singleScrape_of_global(scrape_date_start_min, scrape_date_start_max)
|
|
||||||
ex_start_count = ex_start.shape[0]
|
|
||||||
|
|
||||||
ex_end = d.singleScrape_of_global(scrape_date_end_min, scrape_date_end_max)
|
|
||||||
ex_end_count = ex_end.shape[0]
|
|
||||||
else:
|
|
||||||
ex_start = d.singleScrape_of_region(id, scrape_date_start_min, scrape_date_start_max)
|
|
||||||
ex_start_count = ex_start.shape[0]
|
|
||||||
|
|
||||||
ex_end = d.singleScrape_of_region(id, scrape_date_end_min, scrape_date_end_max)
|
|
||||||
ex_end_count = ex_end.shape[0]
|
|
||||||
|
|
||||||
num_properties = [ex_start_count, ex_end_count]
|
|
||||||
start_end = [ex_start, ex_end]
|
|
||||||
outDFList = []
|
|
||||||
|
|
||||||
for df in start_end:
|
|
||||||
df = df.pl()
|
|
||||||
firstExe = True
|
|
||||||
counter = 1
|
|
||||||
outDF = pl.DataFrame(schema={"0": int, "dates": date})
|
|
||||||
for row in df.rows(named=True):
|
|
||||||
if row['calendarBody']:
|
|
||||||
calDF = pl.read_json(StringIO(row['calendarBody']))
|
|
||||||
columnTitles = calDF.columns
|
|
||||||
calDF = calDF.transpose()
|
|
||||||
calDF = calDF.with_columns(pl.Series(name="dates", values=columnTitles))
|
|
||||||
calDF = calDF.with_columns((pl.col("dates").str.to_date()))
|
|
||||||
|
|
||||||
# Filter out all Data that's in the calculation frame
|
|
||||||
calDF = calDF.filter((pl.col("dates") >= (scrape_date_start_min + timedelta(days=1))))
|
|
||||||
calDF = calDF.filter((pl.col("dates") < final_end_date))
|
|
||||||
|
|
||||||
# Join all information into one Dataframe
|
|
||||||
if firstExe:
|
|
||||||
outDF = calDF
|
|
||||||
firstExe = False
|
|
||||||
else:
|
|
||||||
outDF = outDF.join(calDF, on='dates')
|
|
||||||
outDF = outDF.rename({'column_0': str(counter)})
|
|
||||||
counter += 1
|
|
||||||
|
|
||||||
outDF = outDF.sort('dates')
|
|
||||||
outDFList.append(outDF)
|
|
||||||
|
|
||||||
|
|
||||||
# Calculate the horizontal Sum for all Dates
|
|
||||||
arrayCunter = 0
|
|
||||||
tempDFList = []
|
|
||||||
for df in outDFList:
|
|
||||||
dates = df.select(pl.col("dates"))
|
|
||||||
values = df.select(pl.exclude("dates"))
|
|
||||||
sum_hor = values.sum_horizontal()
|
|
||||||
|
|
||||||
sum_hor = sum_hor / num_properties[arrayCunter] / 2 * 100
|
|
||||||
arrayCunter += 1
|
|
||||||
|
|
||||||
newDF = dates.with_columns(sum_hor=pl.Series(sum_hor))
|
|
||||||
tempDFList.append(newDF)
|
|
||||||
|
|
||||||
# Join actual and predict Values
|
|
||||||
outDF = tempDFList[0].join(tempDFList[1], on='dates', how='outer')
|
|
||||||
|
|
||||||
# Rename Columns for clarity
|
|
||||||
outDF = outDF.drop('dates_right')
|
|
||||||
|
|
||||||
# sum_hor_predict is the data from the earlier ScrapeDate
|
|
||||||
outDF = outDF.rename({'sum_hor_right': 'sum_hor_actual', 'sum_hor': 'sum_hor_predict'})
|
|
||||||
|
|
||||||
# Calculate Moving average from Start
|
|
||||||
baseValues = outDF.get_column('sum_hor_predict').to_list()
|
|
||||||
i = 0
|
|
||||||
moving_averages = []
|
|
||||||
while i < len(baseValues) - windowSize + 1:
|
|
||||||
window = baseValues[i: i + windowSize]
|
|
||||||
window_average = sum(window) / windowSize
|
|
||||||
moving_averages.append(window_average)
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
# Add empty values back to the front and end of moving_averages
|
|
||||||
num_empty = int(windowSize / 2)
|
|
||||||
moving_averages = [None] *num_empty + moving_averages + [None] * num_empty
|
|
||||||
|
|
||||||
# Add moving_averages to df
|
|
||||||
outDF = outDF.with_columns(moving_averages=pl.Series(moving_averages))
|
|
||||||
|
|
||||||
result = {'dates': outDF.get_column('dates').to_list(), 'capacities_timeframe_before': outDF.get_column('sum_hor_predict').to_list(), 'capacities_timeframe_after':outDF.get_column('sum_hor_actual').to_list(), 'capacities_moving_average':outDF.get_column('moving_averages').to_list(),}
|
|
||||||
etl_cache.saveObj(file, result)
|
|
||||||
return result
|
|
|
@ -1,64 +0,0 @@
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import polars as pl
|
|
||||||
|
|
||||||
import data
|
|
||||||
from data import etl_cache
|
|
||||||
|
|
||||||
d = data.load()
|
|
||||||
|
|
||||||
def region_properties_capacities(id: int):
|
|
||||||
|
|
||||||
file = f"etl_region_properties_capacities_{id}.obj"
|
|
||||||
obj = etl_cache.openObj(file)
|
|
||||||
if obj:
|
|
||||||
return obj
|
|
||||||
|
|
||||||
# Get Data
|
|
||||||
if id == -1:
|
|
||||||
df = d.capacity_global().pl()
|
|
||||||
else:
|
|
||||||
df = d.capacity_of_region(id).pl()
|
|
||||||
# turn PropertyIDs to ints for sorting
|
|
||||||
df = df.cast({"property_id": int})
|
|
||||||
|
|
||||||
# Get uniques for dates and propIDs and sort them
|
|
||||||
listOfDates = df.get_column("ScrapeDate").unique().sort()
|
|
||||||
listOfPropertyIDs = df.get_column("property_id").unique().sort()
|
|
||||||
|
|
||||||
# Create DFs from lists to merge later
|
|
||||||
datesDF = pl.DataFrame(listOfDates).with_row_index("date_index")
|
|
||||||
propIdDF = pl.DataFrame(listOfPropertyIDs).with_row_index("prop_index")
|
|
||||||
|
|
||||||
# Merge Dataframe to generate indices
|
|
||||||
df = df.join(datesDF, on='ScrapeDate')
|
|
||||||
df = df.join(propIdDF, on='property_id')
|
|
||||||
|
|
||||||
# Calculate grid values
|
|
||||||
gridData = pl.DataFrame(schema=[("scrape_date", pl.String), ("property_id", pl.String), ("sum_hor", pl.Int64)])
|
|
||||||
for row in df.rows(named=True):
|
|
||||||
# Return 0 for sum if calendar is null
|
|
||||||
if row['calendarBody']:
|
|
||||||
calDF = pl.read_json(StringIO(row['calendarBody']))
|
|
||||||
sum_hor = calDF.sum_horizontal()[0]
|
|
||||||
else:
|
|
||||||
sum_hor = 0
|
|
||||||
|
|
||||||
gridData = gridData.vstack(pl.DataFrame({"scrape_date" : row['ScrapeDate'], "property_id": str(row['property_id']), "sum_hor": sum_hor}))
|
|
||||||
|
|
||||||
# get the overall maximum sum
|
|
||||||
maxValue = gridData['sum_hor'].max()
|
|
||||||
values = []
|
|
||||||
|
|
||||||
for row in gridData.rows(named=True):
|
|
||||||
capacity = (row['sum_hor']*100)/maxValue
|
|
||||||
values.append({"date" : row['scrape_date'], "property_id": row['property_id'], "capacity": capacity})
|
|
||||||
|
|
||||||
# Cast listOfDates to datetime
|
|
||||||
listOfDates = listOfDates.cast(pl.Date).to_list()
|
|
||||||
listOfPropertyIDs = listOfPropertyIDs.cast(pl.String).to_list()
|
|
||||||
|
|
||||||
# Create JSON
|
|
||||||
outDict = {'dates': listOfDates, 'property_ids': listOfPropertyIDs, 'values': values}
|
|
||||||
etl_cache.saveObj(file, outDict)
|
|
||||||
return outDict
|
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,22 @@
|
||||||
|
import polars as pl
|
||||||
|
|
||||||
|
import data
|
||||||
|
|
||||||
|
inst = data.load()
|
||||||
|
test = inst.extractions_for(1).pl()
|
||||||
|
|
||||||
|
out = test.with_columns(
|
||||||
|
pl.col("calendar").str.extract_all(r"([0-9]{4}-[0-9]{2}-[0-9]{2})|[0-2]").alias("extracted_nrs"),
|
||||||
|
)
|
||||||
|
out.drop(['calendar', 'property_id'])
|
||||||
|
ll = out.get_column("extracted_nrs").explode().gather_every(2)
|
||||||
|
llo = out.get_column("extracted_nrs").explode().gather_every(2, offset=1)
|
||||||
|
lli = ll.list.concat(llo)
|
||||||
|
|
||||||
|
|
||||||
|
print(ll)
|
||||||
|
print(lli)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,199 @@
|
||||||
|
import MySQLdb
|
||||||
|
import json
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
def getPropertyDataFromDB():
|
||||||
|
db = MySQLdb.connect(host="localhost",user="root",passwd="admin",db="consultancy")
|
||||||
|
cur = db.cursor()
|
||||||
|
|
||||||
|
cur.execute("SELECT id, seed_id, check_data "
|
||||||
|
"FROM properties ")
|
||||||
|
propData = cur.fetchall()
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
return propData
|
||||||
|
|
||||||
|
def getDataFromDB(propId):
|
||||||
|
'''
|
||||||
|
Function to get data from MySQL database filter with the given propId
|
||||||
|
:return: scrapeDates and calendarData
|
||||||
|
'''
|
||||||
|
db = MySQLdb.connect(host="localhost",user="root",passwd="admin",db="consultancy")
|
||||||
|
cur = db.cursor()
|
||||||
|
|
||||||
|
cur.execute("SELECT JSON_EXTRACT(header, '$.Date') "
|
||||||
|
"FROM extractions "
|
||||||
|
f"WHERE type='calendar' AND property_id = {propId};")
|
||||||
|
scrapeDates = cur.fetchall()
|
||||||
|
|
||||||
|
cur.execute("SELECT JSON_EXTRACT(body, '$.content.days') "
|
||||||
|
"FROM extractions "
|
||||||
|
f"WHERE type='calendar' AND property_id = {propId};")
|
||||||
|
calendarData = cur.fetchall()
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
return scrapeDates, calendarData
|
||||||
|
|
||||||
|
def getUniqueScrapeDates():
|
||||||
|
db = MySQLdb.connect(host="localhost",user="root",passwd="admin",db="consultancy")
|
||||||
|
cur = db.cursor()
|
||||||
|
|
||||||
|
cur.execute("SELECT JSON_EXTRACT(header, '$.Date') "
|
||||||
|
"FROM extractions "
|
||||||
|
f"WHERE type='calendar'")
|
||||||
|
uniqueScrapeDates = cur.fetchall()
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
return uniqueScrapeDates
|
||||||
|
|
||||||
|
def getPropsPerScrape(scrapeDate):
|
||||||
|
date = datetime.strptime(scrapeDate, '%Y-%m-%d')
|
||||||
|
end_date = date + timedelta(days=1)
|
||||||
|
|
||||||
|
db = MySQLdb.connect(host="localhost",user="root",passwd="admin",db="consultancy")
|
||||||
|
cur = db.cursor()
|
||||||
|
|
||||||
|
cur.execute("SELECT property_id "
|
||||||
|
"FROM extractions "
|
||||||
|
f"WHERE type='calendar' AND created_at > '{scrapeDate}' AND created_at < '{str(end_date)}'")
|
||||||
|
uniqueScrapeDates = cur.fetchall()
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
return uniqueScrapeDates
|
||||||
|
|
||||||
|
def getuniquePropIdFromDB():
|
||||||
|
'''
|
||||||
|
Function to get unique propId from MySQL database
|
||||||
|
:return: propList
|
||||||
|
'''
|
||||||
|
db = MySQLdb.connect(host="localhost",user="root",passwd="admin",db="consultancy")
|
||||||
|
cur = db.cursor()
|
||||||
|
cur.execute("SELECT DISTINCT property_id "
|
||||||
|
"FROM extractions;")
|
||||||
|
propIds = cur.fetchall()
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
propList = []
|
||||||
|
for propId in propIds:
|
||||||
|
propList.append(propId[0])
|
||||||
|
|
||||||
|
return propList
|
||||||
|
|
||||||
|
def reformatScrapeDates(scrapeDatesIn):
|
||||||
|
'''
|
||||||
|
Reformats the scrapeDates column to a shortened datetime format
|
||||||
|
:param scrapeDatesIn:
|
||||||
|
:return:
|
||||||
|
'''
|
||||||
|
scrapeDates = []
|
||||||
|
for row in scrapeDatesIn:
|
||||||
|
date = datetime.strptime(json.loads(row[0])[0], '%a, %d %b %Y %H:%M:%S %Z').date()
|
||||||
|
str = date.strftime('%Y-%m-%d')
|
||||||
|
scrapeDates.append(str)
|
||||||
|
|
||||||
|
return scrapeDates
|
||||||
|
|
||||||
|
def checkForLostProprty(calendarData):
|
||||||
|
'''
|
||||||
|
Checks if there are "None" Entries in the calendarData meaning they were no longer found
|
||||||
|
:param calendarData:
|
||||||
|
:return: Boolean indicating if there are "None" Entries in the calendarData
|
||||||
|
'''
|
||||||
|
for row in calendarData:
|
||||||
|
if None in row:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def getMinMaxDate(calendarData):
|
||||||
|
'''
|
||||||
|
Gets the min and max values from a calendar data
|
||||||
|
:param calendarData: get all calendar data from querry
|
||||||
|
:return: the minimal and maximal date
|
||||||
|
'''
|
||||||
|
#minimales und maximales Datum ermitteln
|
||||||
|
fullDateList = []
|
||||||
|
for row in calendarData:
|
||||||
|
tempJson = json.loads(row[0]).keys()
|
||||||
|
for key in tempJson:
|
||||||
|
#print(key)
|
||||||
|
fullDateList.append(datetime.strptime(key, '%Y-%m-%d').date())
|
||||||
|
|
||||||
|
end_dt = max(fullDateList)
|
||||||
|
start_dt = min(fullDateList)
|
||||||
|
delta = timedelta(days=1)
|
||||||
|
HeaderDates = []
|
||||||
|
|
||||||
|
while start_dt <= end_dt:
|
||||||
|
HeaderDates.append(start_dt)
|
||||||
|
start_dt += delta
|
||||||
|
|
||||||
|
return HeaderDates
|
||||||
|
|
||||||
|
|
||||||
|
def creatDataMatrix(HeaderDates, calendarData):
|
||||||
|
'''
|
||||||
|
Creates the data matrix from a calendar data
|
||||||
|
:param HeaderDates: The list of all possible Dates in the dataset is used as the headers
|
||||||
|
:param calendarData: the main information from the sql querry
|
||||||
|
:return: data Matrix with all the dates in the dataset
|
||||||
|
'''
|
||||||
|
data = []
|
||||||
|
for row in calendarData:
|
||||||
|
tempList = [-1] * len(HeaderDates)
|
||||||
|
tempJson = json.loads(row[0])
|
||||||
|
for key in tempJson:
|
||||||
|
date = datetime.strptime(key, '%Y-%m-%d').date()
|
||||||
|
content = tempJson[key]
|
||||||
|
index = [i for i, x in enumerate(HeaderDates) if x == date]
|
||||||
|
tempList[index[0]] = content
|
||||||
|
data.append(tempList)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def getAccuracy(df, baseLine, compLine):
|
||||||
|
'''
|
||||||
|
Calculates the accuracy of a given dataframe with a given baseLine and compLine
|
||||||
|
:param df:
|
||||||
|
:param baseLine:
|
||||||
|
:param compLine:
|
||||||
|
:return: Accuracy: The percentage of dates that had the same information in both baseLine and compLine
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
df = df.iloc[[baseLine,compLine]]
|
||||||
|
except IndexError:
|
||||||
|
return -1
|
||||||
|
total = 0
|
||||||
|
noChange = 0
|
||||||
|
first = True
|
||||||
|
for series_name, series in df.items():
|
||||||
|
if first:
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
total += 1
|
||||||
|
#print(series_name)
|
||||||
|
if series[baseLine] != -1:
|
||||||
|
if series[compLine] != -1:
|
||||||
|
if series[baseLine] == series[compLine]:
|
||||||
|
noChange += 1
|
||||||
|
|
||||||
|
accuracy = noChange / total
|
||||||
|
return accuracy
|
||||||
|
|
||||||
|
def getMeanAccuracy(accList):
|
||||||
|
'''
|
||||||
|
Get the mean Accuracy of the entire timedelay of one property
|
||||||
|
:param accList: List of accuracy Values of a comparison
|
||||||
|
:return: Average of the accuracy values while ignoring the '-1' values
|
||||||
|
'''
|
||||||
|
out = []
|
||||||
|
for row in accList:
|
||||||
|
row = [x for x in row if x != -1]
|
||||||
|
out.append(np.average(row))
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,83 @@
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import json
|
||||||
|
|
||||||
|
import MySQLdb #Version 2.2.4
|
||||||
|
import pandas as pd #Version 2.2.3
|
||||||
|
import plotly.express as px #Version 5.24.1
|
||||||
|
|
||||||
|
db = MySQLdb.connect(host="localhost",user="root",passwd="admin",db="consultancy")
|
||||||
|
cur = db.cursor()
|
||||||
|
|
||||||
|
cur.execute("SELECT JSON_EXTRACT(header, '$.Date') "
|
||||||
|
"FROM extractions "
|
||||||
|
"WHERE type='calendar' AND property_id = 200;")
|
||||||
|
dateoutput = cur.fetchall()
|
||||||
|
|
||||||
|
|
||||||
|
cur.execute("SELECT JSON_EXTRACT(body, '$.content.days') "
|
||||||
|
"FROM extractions "
|
||||||
|
"WHERE type='calendar' AND property_id = 200;")
|
||||||
|
|
||||||
|
output = cur.fetchall()
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
#createScrapedate Liste
|
||||||
|
ytickVals = list(range(0, 30, 5))
|
||||||
|
scrapeDates = []
|
||||||
|
#print(dateoutput)
|
||||||
|
for row in dateoutput:
|
||||||
|
date = datetime.strptime(json.loads(row[0])[0], '%a, %d %b %Y %H:%M:%S %Z').date()
|
||||||
|
str = date.strftime('%d/%m/%Y')
|
||||||
|
scrapeDates.append(str)
|
||||||
|
|
||||||
|
#minimales und maximales Datum ermitteln
|
||||||
|
fullDateList = []
|
||||||
|
for row in output:
|
||||||
|
tempJson = json.loads(row[0]).keys()
|
||||||
|
for key in tempJson:
|
||||||
|
#print(key)
|
||||||
|
fullDateList.append(datetime.strptime(key, '%Y-%m-%d').date())
|
||||||
|
|
||||||
|
end_dt = max(fullDateList)
|
||||||
|
start_dt = min(fullDateList)
|
||||||
|
delta = timedelta(days=1)
|
||||||
|
HeaderDates = []
|
||||||
|
|
||||||
|
while start_dt <= end_dt:
|
||||||
|
HeaderDates.append(start_dt)
|
||||||
|
start_dt += delta
|
||||||
|
|
||||||
|
#Create data-Matrix
|
||||||
|
data = []
|
||||||
|
for row in output:
|
||||||
|
tempList = [-1] * len(HeaderDates)
|
||||||
|
tempJson = json.loads(row[0])
|
||||||
|
for key in tempJson:
|
||||||
|
date = datetime.strptime(key, '%Y-%m-%d').date()
|
||||||
|
content = tempJson[key]
|
||||||
|
index = [i for i, x in enumerate(HeaderDates) if x == date]
|
||||||
|
tempList[index[0]] = content
|
||||||
|
data.append(tempList)
|
||||||
|
|
||||||
|
#Transform to Dataframe for Plotly
|
||||||
|
df = pd.DataFrame(data, columns=HeaderDates)
|
||||||
|
|
||||||
|
#Generate Plotly Diagramm
|
||||||
|
colScale = [[0, 'rgb(0, 0, 0)'], [0.33, 'rgb(204, 16, 16)'], [0.66, 'rgb(10, 102, 15)'], [1, 'rgb(17, 184, 26)']]
|
||||||
|
fig = px.imshow(df, color_continuous_scale= colScale)
|
||||||
|
lines = list(range(0,30,1))
|
||||||
|
for i in lines:
|
||||||
|
#fig.add_hline(y=i+0.5, line_color="white")
|
||||||
|
fig.add_hline(y=i+0.5)
|
||||||
|
|
||||||
|
fig.update_layout(yaxis = dict(tickfont = dict(size=50))),
|
||||||
|
fig.update_layout(xaxis = dict(tickfont = dict(size=50)))
|
||||||
|
fig.update_layout(xaxis_title="Verfügbarkeitsdaten Mietobjekt", yaxis_title="Scrapingvorgang")
|
||||||
|
fig.update_xaxes(title_font_size=100, title_font_weight="bold")
|
||||||
|
fig.update_yaxes(title_font_size=100, title_font_weight="bold")
|
||||||
|
fig.update_layout(yaxis = dict(tickmode = 'array',tickvals = ytickVals, ticktext = scrapeDates))
|
||||||
|
fig.update_xaxes(title_standoff = 80)
|
||||||
|
fig.update_yaxes(title_standoff = 80)
|
||||||
|
fig.update_layout(xaxis={'side': 'top'})
|
||||||
|
fig.show()
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
import Data_Analysis as DA
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
accuracy = pd.read_csv(f'results/accMeanDf.csv')
|
||||||
|
|
||||||
|
propData = DA.getPropertyDataFromDB()
|
||||||
|
propData = pd.DataFrame(propData, columns =['property_id', 'region', 'geoLocation'])
|
||||||
|
propData = propData.drop(columns=['geoLocation'])
|
||||||
|
#print(propData)
|
||||||
|
|
||||||
|
merge = pd.merge(propData, accuracy, on="property_id")
|
||||||
|
#print(merge)
|
||||||
|
|
||||||
|
#1 = Heidiland, 2 = Davos, 3 = Engadin 4 = St.Moritz
|
||||||
|
|
||||||
|
heidiAcc = merge[merge['region'] == 1]
|
||||||
|
davosAcc = merge[merge['region'] == 2]
|
||||||
|
EngadAcc = merge[merge['region'] == 3]
|
||||||
|
StMorAcc = merge[merge['region'] == 4]
|
||||||
|
|
||||||
|
|
||||||
|
heidiMean = heidiAcc.mean(axis=0)
|
||||||
|
davosMean = davosAcc.mean(axis=0)
|
||||||
|
EngadMean = EngadAcc.mean(axis=0)
|
||||||
|
StMorMean = StMorAcc.mean(axis=0)
|
||||||
|
|
||||||
|
heidiSDev = heidiAcc.std(axis=0)
|
||||||
|
davosSDev = davosAcc.std(axis=0)
|
||||||
|
EngadSDev = EngadAcc.std(axis=0)
|
||||||
|
StMorSDev = StMorAcc.std(axis=0)
|
||||||
|
|
||||||
|
|
||||||
|
accuracyOverview = pd.DataFrame()
|
||||||
|
|
||||||
|
accuracyOverview.insert(0, "St. Moritz StdDev", StMorSDev, True)
|
||||||
|
accuracyOverview.insert(0, "St. Moritz Mean", StMorMean, True)
|
||||||
|
accuracyOverview.insert(0, "Engadin StdDev", EngadSDev, True)
|
||||||
|
accuracyOverview.insert(0, "Engadin Mean", EngadMean, True)
|
||||||
|
accuracyOverview.insert(0, "Davos StdDev", davosSDev, True)
|
||||||
|
accuracyOverview.insert(0, "Davos Mean", davosMean, True)
|
||||||
|
accuracyOverview.insert(0, "Heidi StdDev", heidiSDev, True)
|
||||||
|
accuracyOverview.insert(0, "Heidi Mean", heidiMean, True)
|
||||||
|
|
||||||
|
|
||||||
|
accuracyOverview.drop(index=accuracyOverview.index[0], axis=0, inplace=True)
|
||||||
|
accuracyOverview.drop(index=accuracyOverview.index[0], axis=0, inplace=True)
|
||||||
|
accuracyOverview.to_csv('results/accuracyOverview.csv', index=True)
|
||||||
|
|
||||||
|
#delete unused DF's
|
||||||
|
del merge, accuracy, propData
|
||||||
|
del heidiAcc, davosAcc, EngadAcc, StMorAcc
|
||||||
|
del heidiMean, davosMean, EngadMean, StMorMean
|
||||||
|
del heidiSDev, davosSDev, EngadSDev, StMorSDev
|
||||||
|
|
||||||
|
|
||||||
|
print(accuracyOverview)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,73 @@
|
||||||
|
import pandas as pd
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
def getAccuracy(df, baseLine, compLine):
|
||||||
|
try:
|
||||||
|
df = df.iloc[[baseLine,compLine]]
|
||||||
|
except IndexError:
|
||||||
|
return -1
|
||||||
|
total = 0
|
||||||
|
noChange = 0
|
||||||
|
first = True
|
||||||
|
for series_name, series in df.items():
|
||||||
|
if first:
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
total += 1
|
||||||
|
#print(series_name)
|
||||||
|
if series[baseLine] != -1:
|
||||||
|
if series[compLine] != -1:
|
||||||
|
if series[baseLine] == series[compLine]:
|
||||||
|
noChange += 1
|
||||||
|
|
||||||
|
accuracy = noChange / total
|
||||||
|
return accuracy
|
||||||
|
|
||||||
|
def getMeanAccuracy(accList):
|
||||||
|
out = []
|
||||||
|
for row in accList:
|
||||||
|
row = [x for x in row if x != -1]
|
||||||
|
out.append(np.average(row))
|
||||||
|
return out
|
||||||
|
|
||||||
|
deltaList = [1, 2, 10, 20]
|
||||||
|
#1 = 1 Scrape Interval
|
||||||
|
#2 = ca. 1 Woche
|
||||||
|
#10 = 1 Monat (30Tage)
|
||||||
|
#20 = 2 Monate
|
||||||
|
|
||||||
|
|
||||||
|
directory = os.fsencode("dok")
|
||||||
|
|
||||||
|
columnNames = ['property_id', 'timedelay_1', 'timedelay_2','timedelay_10','timedelay_20']
|
||||||
|
accListDf = pd.DataFrame(columns = columnNames)
|
||||||
|
accMeanDf = pd.DataFrame(columns = columnNames)
|
||||||
|
|
||||||
|
|
||||||
|
for file in os.listdir(directory):
|
||||||
|
filename = os.fsdecode(file)
|
||||||
|
if filename.endswith(".csv"):
|
||||||
|
propId = re.findall("\d+", filename)[0]
|
||||||
|
print(propId)
|
||||||
|
df = pd.read_csv(f'dok/{filename}')
|
||||||
|
fullList = []
|
||||||
|
accList = []
|
||||||
|
#Loop though all deltas in the deltaList
|
||||||
|
for delta in deltaList:
|
||||||
|
accList = []
|
||||||
|
#Loop through all Dates as Baseline date
|
||||||
|
for i in range(df.shape[0]):
|
||||||
|
acc = getAccuracy(df, i, i+delta)
|
||||||
|
accList.append(acc)
|
||||||
|
fullList.append(accList)
|
||||||
|
|
||||||
|
|
||||||
|
meanList = getMeanAccuracy(fullList)
|
||||||
|
accListDf = accListDf._append({'property_id': propId, 'timedelay_1': fullList[0], 'timedelay_2': fullList[1], 'timedelay_10': fullList[2], 'timedelay_20': fullList[3]}, ignore_index=True)
|
||||||
|
accMeanDf = accMeanDf._append({'property_id': propId, 'timedelay_1': meanList[0], 'timedelay_2': meanList[1], 'timedelay_10': meanList[2], 'timedelay_20': meanList[3]}, ignore_index=True)
|
||||||
|
|
||||||
|
|
||||||
|
accListDf.to_csv('results/accListDf.csv', index=False)
|
||||||
|
accMeanDf.to_csv('results/accMeanDf.csv', index=False)
|
|
@ -0,0 +1,20 @@
|
||||||
|
import Data_Analysis as DA
|
||||||
|
import csv
|
||||||
|
|
||||||
|
propIds = DA.getuniquePropIdFromDB()
|
||||||
|
|
||||||
|
lostProperties = []
|
||||||
|
|
||||||
|
for propId in propIds:
|
||||||
|
print(propId)
|
||||||
|
scrapeDates, calendarData = DA.getDataFromDB(propId)
|
||||||
|
if DA.checkForLostProprty(calendarData):
|
||||||
|
lostProperties.append(propId)
|
||||||
|
|
||||||
|
print(f"{len(lostProperties)} of {len(propIds)} properties are lost")
|
||||||
|
|
||||||
|
with open('results/allLostProperties', 'w') as f:
|
||||||
|
write = csv.writer(f)
|
||||||
|
write.writerow(lostProperties)
|
||||||
|
|
||||||
|
#Output: 221 of 1552 properties were lost at some point
|
|
@ -0,0 +1,28 @@
|
||||||
|
import Data_Analysis as DA
|
||||||
|
import pandas as pd
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
propIds = DA.getuniquePropIdFromDB()
|
||||||
|
|
||||||
|
for propId in propIds:
|
||||||
|
name = f"dok/calendarData_prop{propId}.csv"
|
||||||
|
if not os.path.exists(name):
|
||||||
|
print(propId)
|
||||||
|
scrapeDates, calendarData = DA.getDataFromDB(propId)
|
||||||
|
if DA.checkForLostProprty(calendarData):
|
||||||
|
print(f"Lost Proprty: {propId}")
|
||||||
|
else:
|
||||||
|
scrapeDates = DA.reformatScrapeDates(scrapeDates)
|
||||||
|
HeaderDates = DA.getMinMaxDate(calendarData)
|
||||||
|
data = DA.creatDataMatrix(HeaderDates, calendarData)
|
||||||
|
|
||||||
|
# Transform to Dataframe for Plotly
|
||||||
|
df = pd.DataFrame(data, columns=HeaderDates)
|
||||||
|
df.insert(0, "ScrapeDate", scrapeDates, True)
|
||||||
|
|
||||||
|
df = df.drop(index=0) # Irregulärer Abstand in den Scraping Zeiten (nur 2 Tage)
|
||||||
|
df = df.drop(df.columns[[1, 2]], axis=1)
|
||||||
|
df.to_csv(name, index=False)
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
import Data_Analysis as DA
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
#Alle Scrape Dates auslesen, umformatieren und doppelte Löschen
|
||||||
|
uniqueScrapeDates = DA.getUniqueScrapeDates()
|
||||||
|
uniqueScrapeDates = DA.reformatScrapeDates(uniqueScrapeDates)
|
||||||
|
uniqueScrapeDates= list(dict.fromkeys(uniqueScrapeDates))
|
||||||
|
#print(uniqueScrapeDates)
|
||||||
|
|
||||||
|
#Liste der Listen der properties pro Scrape Datum erstellen
|
||||||
|
fullPropList = []
|
||||||
|
for date in uniqueScrapeDates:
|
||||||
|
propList = []
|
||||||
|
strDate = date
|
||||||
|
properties = DA.getPropsPerScrape(strDate)
|
||||||
|
for prop in properties:
|
||||||
|
propList.append(prop[0])
|
||||||
|
propList = list(dict.fromkeys(propList))
|
||||||
|
fullPropList.append(propList)
|
||||||
|
#print(propList)
|
||||||
|
print(fullPropList)
|
||||||
|
|
||||||
|
#zu DF umwandeln, mit Property ID's in the Spaltennamen und One-Hot-Encoding
|
||||||
|
all_property_ids = sorted(set([item for sublist in fullPropList for item in sublist]))
|
||||||
|
print(all_property_ids)
|
||||||
|
df = pd.DataFrame(0, index=range(len(fullPropList)), columns=all_property_ids)
|
||||||
|
for i, property_list in enumerate(fullPropList):
|
||||||
|
df.loc[i, property_list] = 1
|
||||||
|
|
||||||
|
df.to_csv('results/PropertiesPerScrape.csv', index=True)
|
||||||
|
|
||||||
|
print(df)
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue