Skip to content

Commit d4b8e92

Browse files
Merge pull request #1061 from PowerGridModel/feature/more-ruff-checks
Linting and Formatting: more ruff checks
2 parents a70feb0 + 2b17479 commit d4b8e92

16 files changed

+102
-93
lines changed

code_generation/code_gen.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,11 @@
1818
def _data_type_nan(data_type: str):
1919
if data_type == "ID":
2020
return "na_IntID"
21-
elif data_type == "double" or "RealValue" in data_type:
21+
if data_type == "double" or "RealValue" in data_type:
2222
return "nan"
23-
elif data_type == "IntS":
23+
if data_type == "IntS":
2424
return "na_IntS"
25-
else:
26-
return f"static_cast<{data_type}>(na_IntS)"
25+
return f"static_cast<{data_type}>(na_IntS)"
2726

2827

2928
class CodeGenerator:
@@ -45,7 +44,7 @@ def render_template(self, template_path: Path, output_path: Path, **data):
4544
output_file.write(output)
4645

4746
def render_attribute_classes(self, template_path: Path, data_path: Path, output_path: Path):
48-
with open(data_path) as data_file:
47+
with data_path.open() as data_file:
4948
json_data = data_file.read()
5049
dataset_meta_data: DatasetMetaData = DatasetMetaData.schema().loads(json_data)
5150
# flatten attribute list
@@ -95,7 +94,7 @@ def render_attribute_classes(self, template_path: Path, data_path: Path, output_
9594
)
9695

9796
def render_dataset_class_maps(self, template_path: Path, data_path: Path, output_path: Path):
98-
with open(data_path) as data_file:
97+
with data_path.open() as data_file:
9998
json_data = data_file.read()
10099
dataset_meta_data: list[DatasetMapData] = AllDatasetMapData.schema().loads(json_data).all_datasets
101100

docs/examples/Generic Branch Example.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939
" | |\n",
4040
" 0 Br9,T2 0 Br10, T3\n",
4141
" 0 0\n",
42-
" | | \n",
42+
" | |\n",
4343
" ---- -----> 7, Load\n",
4444
" |\n",
4545
" |\n",
@@ -48,7 +48,7 @@
4848
" |\n",
4949
" ----- 11, Station1\n",
5050
" |\n",
51-
" ----> 6, Load \n",
51+
" ----> 6, Load\n",
5252
"\n",
5353
"\n",
5454
"========================================\n",
@@ -162,7 +162,7 @@
162162
" | |\n",
163163
" 0 Br9,T2 0 Br10, T3\n",
164164
" 0 0\n",
165-
" | | \n",
165+
" | |\n",
166166
" ---- -----> 7, Load\n",
167167
" |\n",
168168
" |\n",
@@ -171,7 +171,7 @@
171171
" |\n",
172172
" ----- 11, Station1\n",
173173
" |\n",
174-
" ----> 6, Load \n",
174+
" ----> 6, Load\n",
175175
"\n",
176176
"\"\"\"\n",
177177
"# Voltage levels of the transformer\n",

docs/examples/Make Test Dataset.ipynb

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -278,8 +278,7 @@
278278
],
279279
"source": [
280280
"# we can display the json file\n",
281-
"\n",
282-
"with open(temp_path / \"input.json\", \"r\") as f:\n",
281+
"with (temp_path / \"input.json\").open(\"r\") as f:\n",
283282
" print(f.read())"
284283
]
285284
},
@@ -395,7 +394,7 @@
395394
"\n",
396395
"json_serialize_to_file(temp_path / \"update_batch.json\", time_series_mutation)\n",
397396
"\n",
398-
"with open(temp_path / \"update_batch.json\", \"r\") as f:\n",
397+
"with (temp_path / \"update_batch.json\").open(\"r\") as f:\n",
399398
" print(f.read())"
400399
]
401400
},
@@ -531,7 +530,7 @@
531530
"name": "python",
532531
"nbconvert_exporter": "python",
533532
"pygments_lexer": "ipython3",
534-
"version": "3.13.5"
533+
"version": "3.13.3"
535534
}
536535
},
537536
"nbformat": 4,

docs/examples/Serialization Example.ipynb

Lines changed: 56 additions & 55 deletions
Large diffs are not rendered by default.

pyproject.toml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,8 @@ show-fixes = true
9191
# Assume Python 3.11
9292
target-version = "py311"
9393

94+
extend-exclude = ["setup.py"]
95+
9496
[tool.ruff.lint]
9597
select = [
9698
# pycodestyle
@@ -115,6 +117,13 @@ select = [
115117
"SLOT",
116118
"PL",
117119
"NPY",
120+
"W",
121+
"PTH",
122+
"TID",
123+
"ARG",
124+
"RET",
125+
"PIE",
126+
"SLF"
118127
]
119128

120129
[tool.ruff.lint.isort]
@@ -126,6 +135,9 @@ combine-as-imports = true
126135
"docs/examples/*.ipynb" = ["F811", "E402"]
127136
# Pylint was only run in src directory before moving to Ruff
128137
"tests/*" = ["PLR0915", "PLR0912", "PLR0913"]
138+
# Ignore private member access: _model_ptr, _all_component_count...
139+
"src/power_grid_model/_core/*.py" = ["SLF001"]
140+
"tests/unit/test_error_handling.py" = ["SLF001"]
129141
"setup.py" = ["PL"]
130142

131143
[tool.mypy]

src/power_grid_model/_core/power_grid_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -244,7 +244,7 @@ def _calculate_impl( # noqa: PLR0913
244244
options: Options,
245245
continue_on_batch_error: bool,
246246
decode_error: bool,
247-
experimental_features: _ExperimentalFeatures | str, # NOSONAR
247+
experimental_features: _ExperimentalFeatures | str, # NOSONAR # noqa: ARG002
248248
):
249249
"""
250250
Core calculation routine

src/power_grid_model/data_types.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@
179179

180180
DenseBatchArray = _DenseBatchArray
181181
"""
182-
A dense batch array is a two-dimensional structured numpy array containing a list of components of
182+
A dense batch array is a two-dimensional structured numpy array containing a list of components of
183183
the same type for each scenario. Otherwise similar to :class:`SingleArray`.
184184
"""
185185

src/power_grid_model/utils.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ def json_deserialize_from_file(
141141
Returns:
142142
The deserialized dataset in Power grid model input format.
143143
"""
144-
with open(file_path, encoding="utf-8") as file_pointer:
144+
with file_path.open(encoding="utf-8") as file_pointer:
145145
return json_deserialize(file_pointer.read(), data_filter=data_filter)
146146

147147

@@ -169,7 +169,7 @@ def json_serialize_to_file(
169169
data=data, dataset_type=dataset_type, use_compact_list=use_compact_list, indent=-1 if indent is None else indent
170170
)
171171

172-
with open(file_path, mode="w", encoding="utf-8") as file_pointer:
172+
with file_path.open(mode="w", encoding="utf-8") as file_pointer:
173173
file_pointer.write(result)
174174

175175

@@ -190,7 +190,7 @@ def msgpack_deserialize_from_file(
190190
Returns:
191191
The deserialized dataset in Power grid model input format.
192192
"""
193-
with open(file_path, mode="rb") as file_pointer:
193+
with file_path.open(mode="rb") as file_pointer:
194194
return msgpack_deserialize(file_pointer.read(), data_filter=data_filter)
195195

196196

@@ -212,7 +212,7 @@ def msgpack_serialize_to_file(
212212
data = _map_to_component_types(data)
213213
result = msgpack_serialize(data=data, dataset_type=dataset_type, use_compact_list=use_compact_list)
214214

215-
with open(file_path, mode="wb") as file_pointer:
215+
with file_path.open(mode="wb") as file_pointer:
216216
file_pointer.write(result)
217217

218218

@@ -278,7 +278,7 @@ def _compatibility_deprecated_export_json_data(
278278
):
279279
serialized_data = json_serialize(data=data, use_compact_list=compact, indent=-1 if indent is None else indent)
280280
old_format_serialized_data = json.dumps(json.loads(serialized_data)["data"])
281-
with open(json_file, mode="w", encoding="utf-8") as file_pointer:
281+
with json_file.open(mode="w", encoding="utf-8") as file_pointer:
282282
file_pointer.write(old_format_serialized_data)
283283

284284

@@ -327,7 +327,7 @@ def import_update_data(json_file: Path) -> BatchDataset:
327327

328328

329329
def _compatibility_deprecated_import_json_data(json_file: Path, data_type: DatasetType):
330-
with open(json_file, mode="r", encoding="utf-8") as file_pointer:
330+
with Path(json_file).open(mode="r", encoding="utf-8") as file_pointer:
331331
data = json.load(file_pointer)
332332

333333
if "version" not in data: # convert old format to version 1.0
@@ -395,7 +395,7 @@ def self_test():
395395
json_serialize_to_file(output_file_path, output_data)
396396

397397
# Verify that the written output is correct
398-
with open(output_file_path, "r", encoding="utf-8") as output_file:
398+
with Path(output_file_path).open("r", encoding="utf-8") as output_file:
399399
output_data = json.load(output_file)
400400

401401
assert output_data is not None

src/power_grid_model/validation/_validation.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -882,8 +882,7 @@ def validate_generic_load_gen(data: SingleDataset, component: ComponentType) ->
882882

883883

884884
def validate_shunt(data: SingleDataset) -> list[ValidationError]:
885-
errors = validate_appliance(data, ComponentType.shunt)
886-
return errors
885+
return validate_appliance(data, ComponentType.shunt)
887886

888887

889888
def validate_generic_voltage_sensor(data: SingleDataset, component: ComponentType) -> list[ValidationError]:

tests/unit/deprecated/test_end_to_end.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,7 @@
99
from power_grid_model._core.power_grid_model import PowerGridModel
1010
from power_grid_model._core.utils import convert_batch_dataset_to_batch_list
1111
from power_grid_model.utils import import_json_data
12-
13-
from ..utils import compare_result
12+
from tests.unit.utils import compare_result
1413

1514

1615
@pytest.fixture()

0 commit comments

Comments
 (0)