@@ -155,7 +155,7 @@ def get_dataset_data(
155
155
breadbox_response = get_dataset_data_client .sync_detailed (
156
156
dataset_id = dataset_id ,
157
157
client = self .client ,
158
- json_body = request_params ,
158
+ body = request_params ,
159
159
)
160
160
response = self ._parse_client_response (breadbox_response )
161
161
try :
@@ -196,7 +196,7 @@ def upload_file(self, file_handle: io.BytesIO, mime_type="text/csv", chunk_size=
196
196
197
197
breadbox_response = upload_file .sync_detailed (
198
198
client = self .client ,
199
- multipart_data = BodyUploadFile (
199
+ body = BodyUploadFile (
200
200
file = File (
201
201
payload = io .BytesIO (chunk ),
202
202
file_name = "unnamed" ,
@@ -250,7 +250,7 @@ def add_table_dataset(
250
250
)
251
251
breadbox_response = add_dataset_uploads_client .sync_detailed (
252
252
client = self .client ,
253
- json_body = params ,
253
+ body = params ,
254
254
)
255
255
breadbox_response_ = typing .cast (AddDatasetResponse , self ._parse_client_response (breadbox_response ))
256
256
result = self .await_task_result (breadbox_response_ .id , timeout = timeout )
@@ -294,7 +294,7 @@ def add_matrix_dataset(
294
294
)
295
295
breadbox_response = add_dataset_uploads_client .sync_detailed (
296
296
client = self .client ,
297
- json_body = params ,
297
+ body = params ,
298
298
)
299
299
breadbox_response_ = typing .cast (AddDatasetResponse , self ._parse_client_response (breadbox_response ))
300
300
result = self .await_task_result (breadbox_response_ .id , timeout = timeout )
@@ -335,13 +335,13 @@ def add_dimension_type(self, name: str, id_column: str, axis: Union[AddDimension
335
335
336
336
params = AddDimensionType (axis = axis , id_column = id_column , name = name )
337
337
338
- breadbox_response = add_dimension_type_client .sync_detailed (client = self .client , json_body = params )
338
+ breadbox_response = add_dimension_type_client .sync_detailed (client = self .client , body = params )
339
339
return self ._parse_client_response (breadbox_response )
340
340
341
341
def update_dimension_type (self , name : str , metadata_dataset_id : str , properties_to_index : List [str ]):
342
342
params = UpdateDimensionType (metadata_dataset_id , properties_to_index )
343
343
344
- breadbox_response = update_dimension_type_client .sync_detailed (name = name , client = self .client , json_body = params )
344
+ breadbox_response = update_dimension_type_client .sync_detailed (name = name , client = self .client , body = params )
345
345
return self ._parse_client_response (breadbox_response )
346
346
347
347
def get_dimension_types (self ):
@@ -552,7 +552,7 @@ def compute_univariate_associations(
552
552
)
553
553
)
554
554
# Convert the breadbox task status response into the similar format used by the legacy portal
555
- breadbox_response = compute_univariate_associations_client .sync_detailed (client = self .client , json_body = params )
555
+ breadbox_response = compute_univariate_associations_client .sync_detailed (client = self .client , body = params )
556
556
return self ._parse_client_response (breadbox_response )
557
557
558
558
# OTHER
0 commit comments