4
4
# License MIT (https://opensource.org/licenses/MIT).
5
5
6
6
import base64
7
+ import csv
8
+ import io
7
9
import logging
8
10
import os
9
11
from datetime import datetime
12
+ from hashlib import sha256
13
+ from itertools import groupby
14
+ from operator import itemgetter
10
15
11
16
import urllib3
12
17
from pytz import timezone
@@ -94,6 +99,8 @@ class SyncProject(models.Model):
94
99
95
100
task_ids = fields .One2many ("sync.task" , "project_id" , copy = True )
96
101
task_count = fields .Integer (compute = "_compute_task_count" )
102
+ task_description = fields .Html (readonly = True )
103
+
97
104
trigger_cron_count = fields .Integer (
98
105
compute = "_compute_triggers" , help = "Enabled Crons"
99
106
)
@@ -103,13 +110,18 @@ class SyncProject(models.Model):
103
110
trigger_webhook_count = fields .Integer (
104
111
compute = "_compute_triggers" , help = "Enabled Webhooks"
105
112
)
113
+ sync_order_ids = fields .One2many (
114
+ "sync.order" , "sync_project_id" , string = "Sync Orders" , copy = True
115
+ )
116
+ sync_order_count = fields .Integer (compute = "_compute_sync_order_count" )
106
117
job_ids = fields .One2many ("sync.job" , "project_id" )
107
118
job_count = fields .Integer (compute = "_compute_job_count" )
108
119
log_ids = fields .One2many ("ir.logging" , "sync_project_id" )
109
120
log_count = fields .Integer (compute = "_compute_log_count" )
110
121
link_ids = fields .One2many ("sync.link" , "project_id" )
111
122
link_count = fields .Integer (compute = "_compute_link_count" )
112
123
data_ids = fields .One2many ("sync.data" , "project_id" )
124
+ data_description = fields .Html (readonly = True )
113
125
114
126
def copy (self , default = None ):
115
127
default = dict (default or {})
@@ -129,6 +141,11 @@ def _compute_task_count(self):
129
141
for r in self :
130
142
r .task_count = len (r .with_context (active_test = False ).task_ids )
131
143
144
+ @api .depends ("sync_order_ids" )
145
+ def _compute_sync_order_count (self ):
146
+ for r in self :
147
+ r .sync_order_count = len (r .sync_order_ids )
148
+
132
149
@api .depends ("job_ids" )
133
150
def _compute_job_count (self ):
134
151
for r in self :
@@ -259,6 +276,43 @@ def record2image(record, fname="image_1920"):
259
276
)
260
277
)
261
278
279
+ def group_by_lang (partners , default_lang = "en_US" ):
280
+ """
281
+ Yield groups of partners grouped by their language.
282
+
283
+ :param partners: recordset of res.partner
284
+ :return: generator yielding tuples of (lang, partners)
285
+ """
286
+ if not partners :
287
+ return
288
+
289
+ # Sort the partners by 'lang' to ensure groupby works correctly
290
+ partners = partners .sorted (key = lambda p : p .lang )
291
+
292
+ # Group the partners by 'lang'
293
+ for lang , group in groupby (partners , key = itemgetter ("lang" )):
294
+ partner_group = partners .browse ([partner .id for partner in group ])
295
+ yield lang or default_lang , partner_group
296
+
297
+ def gen2csv (generator ):
298
+ # Prepare a StringIO buffer to hold the CSV data
299
+ output = io .StringIO ()
300
+
301
+ # Create a CSV writer with quoting enabled
302
+ writer = csv .writer (output , quoting = csv .QUOTE_ALL )
303
+
304
+ # Write rows from the generator
305
+ for row in generator :
306
+ writer .writerow (row )
307
+
308
+ # Get the CSV content
309
+ csv_content = output .getvalue ()
310
+
311
+ # Close the StringIO buffer
312
+ output .close ()
313
+
314
+ return csv_content
315
+
262
316
context = dict (self .env .context , log_function = log , sync_project_id = self .id )
263
317
env = self .env (context = context )
264
318
link_functions = env ["sync.link" ]._get_eval_context ()
@@ -294,8 +348,11 @@ def record2image(record, fname="image_1920"):
294
348
"timezone" : timezone ,
295
349
"b64encode" : base64 .b64encode ,
296
350
"b64decode" : base64 .b64decode ,
351
+ "sha256" : sha256 ,
297
352
"type2str" : type2str ,
298
353
"record2image" : record2image ,
354
+ "gen2csv" : gen2csv ,
355
+ "group_by_lang" : group_by_lang ,
299
356
"DEFAULT_SERVER_DATETIME_FORMAT" : DEFAULT_SERVER_DATETIME_FORMAT ,
300
357
"AttrDict" : AttrDict ,
301
358
},
@@ -467,11 +524,16 @@ def magic_upgrade(self):
467
524
)
468
525
469
526
# [Documentation]
470
- vals ["description" ] = (
471
- compile_markdown_to_html (gist_files .get ("README.md" ))
472
- if gist_files .get ("README.md" )
473
- else "<h1>Please add README.md file to place some documentation here</h1>"
474
- )
527
+ for field_name , file_name in (
528
+ ("description" , "README.md" ),
529
+ ("task_description" , "tasks.markdown" ),
530
+ ("data_description" , "datas.markdown" ),
531
+ ):
532
+ vals [field_name ] = (
533
+ compile_markdown_to_html (gist_files .get (file_name ))
534
+ if gist_files .get (file_name )
535
+ else f"<h1>Please add { file_name } file to place some documentation here</h1>"
536
+ )
475
537
476
538
# [PARAMS] and [SECRETS]
477
539
for model , field_name , file_name in (
@@ -512,7 +574,7 @@ def magic_upgrade(self):
512
574
for file_info in gist_content ["files" ].values ():
513
575
# e.g. "data.emoji.csv"
514
576
file_name = file_info ["filename" ]
515
- if not file_name .startswith ("data." ):
577
+ if not ( file_name .startswith ("data." ) and file_name != "data.markdown " ):
516
578
continue
517
579
raw_url = file_info ["raw_url" ]
518
580
response = http .request ("GET" , raw_url )
@@ -574,6 +636,20 @@ def magic_upgrade(self):
574
636
else None ,
575
637
"project_id" : self .id ,
576
638
}
639
+ # Sync Order Model
640
+ if meta .get ("SYNC_ORDER_MODEL" ):
641
+ model = self ._get_model (meta .get ("SYNC_ORDER_MODEL" ))
642
+ task_vals ["sync_order_model_id" ] = model .id
643
+
644
+ # Parse docs
645
+ sync_order_description = gist_files .get (
646
+ file_name [: - len (".py" )] + ".markdown"
647
+ )
648
+ if sync_order_description :
649
+ task_vals ["sync_order_description" ] = compile_markdown_to_html (
650
+ sync_order_description
651
+ )
652
+
577
653
task = self .env ["sync.task" ]._create_or_update_by_xmlid (
578
654
task_vals , task_technical_name , namespace = self .id
579
655
)
@@ -585,7 +661,7 @@ def create_trigger(model, data):
585
661
trigger_name = data ["name" ],
586
662
)
587
663
return self .env [model ]._create_or_update_by_xmlid (
588
- vals , data ["name" ], namespace = self .id
664
+ vals , data ["name" ], namespace = f"p { self .id } t { task . id } "
589
665
)
590
666
591
667
# Create/Update triggers
@@ -596,20 +672,37 @@ def create_trigger(model, data):
596
672
create_trigger ("sync.trigger.webhook" , data )
597
673
598
674
for data in meta .get ("DB_TRIGGERS" , []):
599
- model_id = self .env ["ir.model" ]._get (data ["model" ]).id
600
- if not model_id :
601
- raise ValidationError (
602
- _ (
603
- "Model %s is not available. Check if you need to install an extra module first."
675
+ model = self ._get_model (data ["model" ])
676
+ if data .get ("trigger_fields" ):
677
+ trigger_field_ids = []
678
+ for f in data .pop ("trigger_fields" ).split ("," ):
679
+ ff = self .env ["ir.model.fields" ]._get (model .model , f )
680
+ trigger_field_ids .append (ff .id )
681
+ data ["trigger_field_ids" ] = [(6 , 0 , trigger_field_ids )]
682
+
683
+ for field_name in ("filter_pre_domain" , "filter_domain" ):
684
+ if data .get (field_name ):
685
+ data [field_name ] = data [field_name ].replace (
686
+ "{TASK_ID}" , str (task .id )
604
687
)
605
- % data ["model" ]
606
- )
688
+
607
689
create_trigger (
608
- "sync.trigger.automation" , dict (data , model_id = model_id , model = None )
690
+ "sync.trigger.automation" , dict (data , model_id = model . id , model = None )
609
691
)
610
692
611
693
self .update (vals )
612
694
695
+ def _get_model (self , model_name ):
696
+ model = self .env ["ir.model" ]._get (model_name )
697
+ if not model :
698
+ raise ValidationError (
699
+ _ (
700
+ "Model %s is not available. Check if you need to install an extra module first."
701
+ )
702
+ % model_name
703
+ )
704
+ return model
705
+
613
706
614
707
class SyncProjectParamMixin (models .AbstractModel ):
615
708
0 commit comments