Skip to content

Commit 8a70e83

Browse files
DarhkVoydabidlabsgradio-pr-bot
authored
switch from black to ruff formatter (#6543)
* migrate from black to ruff * fix script and dependencies * applying ruff * add changeset * add changeset * address ruff feedback * replace linter * fixed typing * fix typing --------- Co-authored-by: Abubakar Abid <[email protected]> Co-authored-by: gradio-pr-bot <[email protected]>
1 parent 5d5ab8c commit 8a70e83

38 files changed

+120
-124
lines changed

.changeset/wise-feet-fold.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
"gradio": patch
3+
"gradio_client": patch
4+
---
5+
6+
feat:switch from black to ruff formatter

client/python/gradio_client/client.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -780,7 +780,10 @@ def deploy_discord(
780780
)
781781
if is_private:
782782
huggingface_hub.add_space_secret(
783-
space_id, "HF_TOKEN", hf_token, token=hf_token # type: ignore
783+
space_id,
784+
"HF_TOKEN",
785+
hf_token, # type: ignore
786+
token=hf_token,
784787
)
785788

786789
url = f"https://huggingface.co/spaces/{space_id}"

client/python/gradio_client/documentation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ def document_fn(fn: Callable, cls) -> tuple[str, list[dict], dict, str | None]:
134134
del parameters[param_name]
135135
if param.default != inspect.Parameter.empty:
136136
default = param.default
137-
if type(default) == str:
137+
if isinstance(default, str):
138138
default = '"' + default + '"'
139139
if default.__class__.__module__ != "builtins":
140140
default = f"{default.__class__.__name__}()"

client/python/scripts/format.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,9 @@
22

33
cd "$(dirname ${0})/.."
44

5-
echo "Formatting the client library.. Our style follows the Black code style."
5+
echo "Formatting the client library.. Our style follows the ruff code style."
66
python -m ruff --fix .
7-
python -m black .
7+
python -m ruff format .
88

99
echo "Type checking the client library with pyright"
1010
python -m pyright gradio_client/*.py

client/python/scripts/lint.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ cd "$(dirname ${0})/.."
44

55
echo "Linting..."
66
python -m ruff test gradio_client
7-
python -m black --check test gradio_client
7+
python -m ruff format --check test gradio_client
88

99
echo "Type checking the client library with pyright"
1010
python -m pyright gradio_client/*.py

client/python/test/requirements.txt

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
1-
black==23.3.0
21
pytest-asyncio
32
pytest==7.1.2
4-
ruff==0.0.264
3+
ruff==0.1.7
54
pyright==1.1.327
65
gradio
76
pydub==0.25.1

client/python/test/test_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ def test_strip_invalid_filename_characters(orig_filename, new_filename):
109109

110110
class AsyncMock(MagicMock):
111111
async def __call__(self, *args, **kwargs):
112-
return super(AsyncMock, self).__call__(*args, **kwargs)
112+
return super().__call__(*args, **kwargs)
113113

114114

115115
@pytest.mark.asyncio

gradio/blocks.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1311,7 +1311,7 @@ def validate_outputs(self, fn_index: int, predictions: Any | list[Any]):
13111311

13121312
dep_outputs = dependency["outputs"]
13131313

1314-
if type(predictions) is not list and type(predictions) is not tuple:
1314+
if not isinstance(predictions, (list, tuple)):
13151315
predictions = [predictions]
13161316

13171317
if len(predictions) < len(dep_outputs):
@@ -1349,7 +1349,7 @@ def postprocess_data(
13491349
dependency = self.dependencies[fn_index]
13501350
batch = dependency["batch"]
13511351

1352-
if type(predictions) is dict and len(predictions) > 0:
1352+
if isinstance(predictions, dict) and len(predictions) > 0:
13531353
predictions = convert_component_dict_to_list(
13541354
dependency["outputs"], predictions
13551355
)
@@ -1418,7 +1418,11 @@ def postprocess_data(
14181418
f"{block.__class__} Component with id {output_id} not a valid output component."
14191419
)
14201420
prediction_value = block.postprocess(prediction_value)
1421-
outputs_cached = processing_utils.move_files_to_cache(prediction_value, block, postprocess=True) # type: ignore
1421+
outputs_cached = processing_utils.move_files_to_cache(
1422+
prediction_value,
1423+
block, # type: ignore
1424+
postprocess=True,
1425+
)
14221426
output.append(outputs_cached)
14231427

14241428
return output

gradio/chat_interface.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,8 @@ def __init__(
126126
if not isinstance(additional_inputs, list):
127127
additional_inputs = [additional_inputs]
128128
self.additional_inputs = [
129-
get_component_instance(i) for i in additional_inputs # type: ignore
129+
get_component_instance(i)
130+
for i in additional_inputs # type: ignore
130131
]
131132
else:
132133
self.additional_inputs = []

gradio/cli/commands/components/build.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,9 @@ def _build(
3737
pyproject_toml = parse((path / "pyproject.toml").read_text())
3838
if bump_version:
3939
pyproject_toml = parse((path / "pyproject.toml").read_text())
40-
version = semantic_version.Version(pyproject_toml["project"]["version"]).next_patch() # type: ignore
40+
version = semantic_version.Version(
41+
pyproject_toml["project"]["version"] # type: ignore
42+
).next_patch()
4143
live.update(
4244
f":1234: Using version [bold][magenta]{version}[/][/]. "
4345
"Set [bold][magenta]--no-bump-version[/][/] to use the version in pyproject.toml file."

0 commit comments

Comments
 (0)