1
+ import math
2
+ import pandas as pd
1
3
import requests , time
2
4
import datetime
5
+ from tzlocal import get_localzone
6
+ from asgiref .sync import sync_to_async
7
+ from pytz import NonExistentTimeError
8
+ from bikes .models import Bikes
9
+ from greykite .framework .templates .autogen .forecast_config import ForecastConfig , MetadataParam , ModelComponentsParam
10
+ from greykite .framework .templates .forecaster import Forecaster
3
11
4
12
def get_data_from_open_data (date = None , id_compteur = '100057445-103057445' ):
5
13
url = "https://opendata.paris.fr/api/explore/v2.1/catalog/datasets/comptage-velo-donnees-compteurs/records"
@@ -26,3 +34,115 @@ def get_data_from_open_data(date=None, id_compteur='100057445-103057445'):
26
34
return data
27
35
else :
28
36
return "Waiting..."
37
+
38
+ @sync_to_async
39
+ def saveResultsToDB (result ):
40
+ local_tz = get_localzone () # Get local timezone
41
+ for data in result :
42
+ actual_data = None
43
+ if not math .isnan (data ['actual' ]):
44
+ actual_data = data ["actual" ]
45
+ try :
46
+ # Convert string to datetime object and localize to 'Europe/Paris' timezone
47
+ date_time = pd .to_datetime (data ["DateTime" ]).tz_localize ('Europe/Paris' )
48
+ except NonExistentTimeError :
49
+ # Handle NonExistentTimeError by using local timezone instead
50
+ date_time = pd .to_datetime (data ["DateTime" ]).tz_localize (local_tz , ambiguous = 'NaT' )
51
+ result_to_save = Bikes (
52
+ actual = actual_data ,
53
+ forecast = data ["forecast" ],
54
+ upper = data ["forecast_upper" ],
55
+ lower = data ["forecast_lower" ],
56
+ date_time = date_time
57
+ )
58
+ print (result_to_save .__dict__ )
59
+ result_to_save .save_result ()
60
+
61
+ def merge_data (data , treated_data ):
62
+ filtered_data = list (filter (lambda entry : entry ['actual' ] is not None , treated_data ))
63
+ treated_dates = {item ['DateTime' ] for item in filtered_data }
64
+ unique_data = [item for item in data if item ['DateTime' ] not in treated_dates ]
65
+ abridged_treated_data = []
66
+ for item in filtered_data :
67
+ abridged_treated_data .append ({'DateTime' : item ['DateTime' ], 'sum_counts' : item ['actual' ]})
68
+ merged_data = abridged_treated_data + unique_data
69
+ return sorted (merged_data , key = lambda x : x ['DateTime' ])
70
+
71
+ def runMLModel (data ):
72
+ forecast_config = ForecastConfig (
73
+ model_template = "SILVERKITE" ,
74
+ forecast_horizon = 24 ,
75
+ coverage = 0.85 ,
76
+ metadata_param = MetadataParam (
77
+ time_col = "DateTimeCol" ,
78
+ value_col = "sum_counts" ,
79
+ freq = "H" ,
80
+ train_end_date = datetime .datetime .today ()
81
+ )
82
+ )
83
+ df = pd .DataFrame (data )
84
+ df ['DateTime' ] = pd .to_datetime (df ['DateTime' ], utc = True , errors = 'coerce' )
85
+ df ['DateTime' ] = df ['DateTime' ].values
86
+ df .sort_values (by = 'DateTime' , inplace = True )
87
+ df ['DateTimeCol' ] = df ['DateTime' ]
88
+ df .set_index ('DateTime' , inplace = True )
89
+
90
+ forecaster = Forecaster ()
91
+ silverkite = forecaster .run_forecast_config (
92
+ df = df ,
93
+ config = forecast_config
94
+ )
95
+
96
+ forecast_data_json = silverkite .forecast .df .to_dict ()
97
+
98
+ last_24_entries = {}
99
+ for forecast_key in forecast_data_json :
100
+ if forecast_key == "DateTime" :
101
+ for y in forecast_data_json [forecast_key ]:
102
+ forecast_data_json [forecast_key ][y ] = forecast_data_json [forecast_key ][y ].strftime ("%Y-%m-%d %X" )
103
+ forecast_value = dict (list (forecast_data_json [forecast_key ].items ())[- 100 :])
104
+ else :
105
+ forecast_value = dict (list (forecast_data_json [forecast_key ].items ())[- 100 :])
106
+ for index_key in forecast_value :
107
+ if index_key in last_24_entries :
108
+ last_24_entries [index_key ][forecast_key ] = forecast_value [index_key ]
109
+ else :
110
+ last_24_entries [index_key ] = {forecast_key : forecast_value [index_key ]}
111
+
112
+ result = list (last_24_entries .values ())
113
+ for item in result :
114
+ if 'DateTimeCol' in item :
115
+ item ['DateTime' ] = item .pop ('DateTimeCol' )
116
+
117
+ return result
118
+
119
+ @sync_to_async
120
+ def get_treated_data ():
121
+ bikes_queryset = Bikes .objects .all ()
122
+ data = []
123
+ for bikes_data in bikes_queryset :
124
+ actual_data = None
125
+ if bikes_data .actual != None :
126
+ actual_data = float (bikes_data .actual )
127
+ data .append ({
128
+ "actual" :actual_data ,
129
+ "forecast" :float (bikes_data .forecast ),
130
+ "forecast_upper" :float (bikes_data .upper ),
131
+ "forecast_lower" :float (bikes_data .lower ),
132
+ "DateTime" :bikes_data .date_time
133
+ })
134
+
135
+ return data
136
+
137
+ def change_date_string_to_datetime (date_string ):
138
+ return datetime .datetime .fromisoformat (date_string )
139
+
140
+ @sync_to_async
141
+ def check_is_entry_today (today ):
142
+ todays_data = Bikes .objects .filter (date_time = today )
143
+ print ('doays_data' , todays_data , today )
144
+ if todays_data :
145
+ print ("todays_data" , todays_data [0 ].__dict__ )
146
+ print ("todays_data.exists() and todays_data[0].actual != None" , todays_data .exists () and todays_data [0 ].actual != None )
147
+ print ("todays_data[0].actual != None" , todays_data [0 ].actual != None , "todays_data[0]" , todays_data [0 ].__dict__ )
148
+ return todays_data .exists () and todays_data [0 ].actual != None
0 commit comments