11
11
import socket
12
12
import json
13
13
from datetime import datetime , timezone
14
+ import yaml
14
15
15
16
app = Flask (__name__ )
16
17
port = int (os .getenv ("CONTROLLER_FLASK_PORT" , 5000 ))
@@ -29,7 +30,7 @@ def submit_config():
29
30
except Exception as e :
30
31
return jsonify ({"error" : str (e )}), 400
31
32
elif "schema" in request .form :
32
- content = request .form ["schema" ].encode (' utf-8' )
33
+ content = request .form ["schema" ].encode (" utf-8" )
33
34
else :
34
35
return jsonify ({"error" : "No file part or config string in the request" }), 400
35
36
@@ -60,7 +61,7 @@ def submit_pgql_config():
60
61
except Exception as e :
61
62
return jsonify ({"error" : str (e )}), 400
62
63
elif "schema" in request .form :
63
- content = request .form ["schema" ].encode (' utf-8' )
64
+ content = request .form ["schema" ].encode (" utf-8" )
64
65
else :
65
66
return jsonify ({"error" : "No file part or config string in the request" }), 400
66
67
@@ -90,7 +91,186 @@ def submit_pgql_config():
90
91
except Exception as e :
91
92
time .sleep (5 )
92
93
return "PGQL config submitted" , 200
93
-
94
+
95
+
96
+ @app .route ("/submit-graph-schema" , methods = ["POST" ])
97
+ def submit_graph_schema ():
98
+ graph_schema = request .json .get ("schema" ).encode ("utf-8" )
99
+ # graph_schema = request.form["schema"].encode('utf-8')
100
+ etcd_server = os .getenv ("ETCD_SERVICE" , "etcd" )
101
+ if not etcd_server .startswith (("http://" , "https://" )):
102
+ etcd_server = f"http://{ etcd_server } "
103
+ etcd_prefix = os .getenv ("ETCD_PREFIX" , "gart_meta_" )
104
+ etcd_host = etcd_server .split ("://" )[1 ].split (":" )[0 ]
105
+ etcd_port = etcd_server .split (":" )[2 ]
106
+ etcd_client = etcd3 .client (host = etcd_host , port = etcd_port )
107
+ with open ("/tmp/graph_schema.json" , "wb" ) as f :
108
+ f .write (graph_schema )
109
+ try :
110
+ etcd_client .put (etcd_prefix + "gart_graph_schema_json" , graph_schema )
111
+ return "Graph schema submitted" , 200
112
+ except Exception as e :
113
+ return "Failed to submit graph schema: " + str (e ), 500
114
+
115
+
116
+ @app .route ("/submit-data-source" , methods = ["POST" ])
117
+ def submit_data_source ():
118
+ data_source_config = request .json .get ("schema" ).encode ("utf-8" )
119
+
120
+ etcd_server = os .getenv ("ETCD_SERVICE" , "etcd" )
121
+ if not etcd_server .startswith (("http://" , "https://" )):
122
+ etcd_server = f"http://{ etcd_server } "
123
+ etcd_prefix = os .getenv ("ETCD_PREFIX" , "gart_meta_" )
124
+ etcd_host = etcd_server .split ("://" )[1 ].split (":" )[0 ]
125
+ etcd_port = etcd_server .split (":" )[2 ]
126
+ etcd_client = etcd3 .client (host = etcd_host , port = etcd_port )
127
+
128
+ try :
129
+ etcd_client .put (etcd_prefix + "gart_data_source_json" , data_source_config )
130
+ return "Data source submitted" , 200
131
+ except Exception as e :
132
+ return "Failed to submit data source: " + str (e ), 500
133
+
134
+
135
+ @app .route ("/submit-data-loading" , methods = ["POST" ])
136
+ def submit_data_loading ():
137
+ etcd_server = os .getenv ("ETCD_SERVICE" , "etcd" )
138
+ if not etcd_server .startswith (("http://" , "https://" )):
139
+ etcd_server = f"http://{ etcd_server } "
140
+ etcd_prefix = os .getenv ("ETCD_PREFIX" , "gart_meta_" )
141
+ etcd_host = etcd_server .split ("://" )[1 ].split (":" )[0 ]
142
+ etcd_port = etcd_server .split (":" )[2 ]
143
+ etcd_client = etcd3 .client (host = etcd_host , port = etcd_port )
144
+ try :
145
+ graph_schema , _ = etcd_client .get (etcd_prefix + "gart_graph_schema_json" )
146
+ except Exception as e :
147
+ return "Failed to get graph schema: " + str (e ), 500
148
+
149
+ try :
150
+ data_source_config , _ = etcd_client .get (etcd_prefix + "gart_data_source_json" )
151
+ except Exception as e :
152
+ return "Failed to get data source: " + str (e ), 500
153
+
154
+ graph_schema = json .loads (graph_schema .decode ("utf-8" ))
155
+ data_source_config = json .loads (data_source_config .decode ("utf-8" ))
156
+ result_dict = {}
157
+ result_dict ["graph" ] = graph_schema ["name" ]
158
+ graph_schema = graph_schema ["schema" ]
159
+ result_dict ["loadingConfig" ] = {}
160
+ result_dict ["loadingConfig" ]["dataSource" ] = "rdbms"
161
+ result_dict ["loadingConfig" ]["method" ] = "append"
162
+ result_dict ["loadingConfig" ]["enableRowStore" ] = False
163
+ db_name = os .getenv ("DB_NAME" , "rdbms" )
164
+ result_dict ["loadingConfig" ]["database" ] = db_name
165
+
166
+ vertex_mappings_dict = {}
167
+ vertex_types_list = []
168
+ vertex_types_info = graph_schema ["vertex_types" ]
169
+ for vertex_id in range (len (vertex_types_info )):
170
+ vertex_type_element = {}
171
+ vertex_type_element ["type_name" ] = vertex_types_info [vertex_id ]["type_name" ]
172
+ mappings_list = []
173
+ for table_id in range (len (vertex_types_info )):
174
+ if (
175
+ vertex_type_element ["type_name" ]
176
+ == data_source_config ["vertex_mappings" ][table_id ]["type_name" ]
177
+ ):
178
+ vertex_type_element ["dataSourceName" ] = data_source_config [
179
+ "vertex_mappings"
180
+ ][table_id ]["inputs" ][0 ]
181
+ pk_prop_name = vertex_types_info [vertex_id ]["primary_keys" ][0 ]
182
+ column_mappings = data_source_config ["vertex_mappings" ][table_id ][
183
+ "column_mappings"
184
+ ]
185
+ for column_id in range (len (column_mappings )):
186
+ mappings_element = {}
187
+ mappings_element ["property" ] = column_mappings [column_id ][
188
+ "property"
189
+ ]
190
+ mappings_element ["dataField" ] = {}
191
+ mappings_element ["dataField" ]["name" ] = column_mappings [column_id ][
192
+ "column"
193
+ ]["name" ]
194
+ mappings_list .append (mappings_element )
195
+ if pk_prop_name == column_mappings [column_id ]["property" ]:
196
+ vertex_type_element ["idFieldName" ] = column_mappings [column_id ][
197
+ "column"
198
+ ]["name" ]
199
+ break
200
+ vertex_type_element ["mappings" ] = mappings_list
201
+ vertex_types_list .append (vertex_type_element )
202
+
203
+ vertex_mappings_dict ["vertex_types" ] = vertex_types_list
204
+
205
+ edge_mappings_dict = {}
206
+ edge_types_list = []
207
+ edge_types_info = graph_schema ["edge_types" ]
208
+ for edge_id in range (len (edge_types_info )):
209
+ edge_type_element = {}
210
+ edge_type_element ["type_pair" ] = {}
211
+ edge_type_element ["type_pair" ]["edge" ] = edge_types_info [edge_id ]["type_name" ]
212
+ edge_type_element ["type_pair" ]["source_vertex" ] = edge_types_info [edge_id ][
213
+ "vertex_type_pair_relations"
214
+ ][0 ]["source_vertex" ]
215
+ edge_type_element ["type_pair" ]["destination_vertex" ] = edge_types_info [edge_id ][
216
+ "vertex_type_pair_relations"
217
+ ][0 ]["destination_vertex" ]
218
+ for table_id in range (len (edge_types_info )):
219
+ if (
220
+ edge_type_element ["type_pair" ]["edge" ]
221
+ == data_source_config ["edge_mappings" ][table_id ]["type_triplet" ]["edge" ]
222
+ ):
223
+ edge_type_element ["dataSourceName" ] = data_source_config [
224
+ "edge_mappings"
225
+ ][table_id ]["inputs" ][0 ]
226
+ edge_type_element ["sourceVertexMappings" ] = [
227
+ {
228
+ "dataField" : {
229
+ "name" : data_source_config ["edge_mappings" ][table_id ][
230
+ "source_vertex_mappings"
231
+ ][0 ]["column" ]["name" ]
232
+ }
233
+ }
234
+ ]
235
+ edge_type_element ["destinationVertexMappings" ] = [
236
+ {
237
+ "dataField" : {
238
+ "name" : data_source_config ["edge_mappings" ][table_id ][
239
+ "destination_vertex_mappings"
240
+ ][0 ]["column" ]["name" ]
241
+ }
242
+ }
243
+ ]
244
+ data_field_mappings_list = []
245
+ column_mappings = data_source_config ["edge_mappings" ][table_id ][
246
+ "column_mappings"
247
+ ]
248
+ for column_id in range (len (column_mappings )):
249
+ mappings_element = {}
250
+ mappings_element ["property" ] = column_mappings [column_id ][
251
+ "property"
252
+ ]
253
+ mappings_element ["dataField" ] = {}
254
+ mappings_element ["dataField" ]["name" ] = column_mappings [column_id ][
255
+ "column"
256
+ ]["name" ]
257
+ data_field_mappings_list .append (mappings_element )
258
+ edge_type_element ["dataFieldMappings" ] = data_field_mappings_list
259
+ break
260
+ edge_types_list .append (edge_type_element )
261
+
262
+ edge_mappings_dict ["edge_types" ] = edge_types_list
263
+
264
+ result_dict ["vertexMappings" ] = vertex_mappings_dict
265
+ result_dict ["edgeMappings" ] = edge_mappings_dict
266
+
267
+ result_dict_str = yaml .dump (result_dict )
268
+
269
+ try :
270
+ etcd_client .put (etcd_prefix + "gart_rg_mapping_yaml" , result_dict_str )
271
+ return "Data loading config submitted" , 200
272
+ except Exception as e :
273
+ return "Failed to submit data loading config: " + str (e ), 500
94
274
95
275
96
276
@app .route ("/control/pause" , methods = ["POST" ])
@@ -155,13 +335,22 @@ def get_read_epoch_by_timestamp():
155
335
unix_time = int (dt .timestamp ())
156
336
epoch_unix_time_pairs = get_all_available_read_epochs_internal ()[1 ]
157
337
# iterate through the list of epoch_unix_time pairs from end to start
158
- for epoch , unix_time_epoch , num_vertices , num_edges in reversed (epoch_unix_time_pairs ):
338
+ for epoch , unix_time_epoch , num_vertices , num_edges in reversed (
339
+ epoch_unix_time_pairs
340
+ ):
159
341
if unix_time_epoch <= unix_time :
160
342
converted_time = datetime .fromtimestamp (unix_time_epoch )
161
343
# convert time into local time zone
162
- converted_time = converted_time .replace (tzinfo = timezone .utc ).astimezone (tz = None )
344
+ converted_time = converted_time .replace (tzinfo = timezone .utc ).astimezone (
345
+ tz = None
346
+ )
163
347
formatted_time = converted_time .strftime ("%Y-%m-%d %H:%M:%S" )
164
- result = {"version_id" : str (epoch ), "creation_time" : formatted_time , "num_vertices" : num_vertices , "num_edges" : num_edges }
348
+ result = {
349
+ "version_id" : str (epoch ),
350
+ "creation_time" : formatted_time ,
351
+ "num_vertices" : num_vertices ,
352
+ "num_edges" : num_edges ,
353
+ }
165
354
return json .dumps (result ), 200
166
355
return "No read epoch found" , 200
167
356
@@ -385,7 +574,9 @@ def get_all_available_read_epochs_internal():
385
574
converted_time = converted_time .replace (tzinfo = timezone .utc ).astimezone (tz = None )
386
575
formatted_time = converted_time .strftime ("%Y-%m-%d %H:%M:%S" )
387
576
available_epochs .append ([epoch , formatted_time , num_vertices , num_edges ])
388
- available_epochs_internal .append ([epoch , latest_timestamp , num_vertices , num_edges ])
577
+ available_epochs_internal .append (
578
+ [epoch , latest_timestamp , num_vertices , num_edges ]
579
+ )
389
580
return [available_epochs , available_epochs_internal ]
390
581
391
582
0 commit comments