diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8b45ba90c3866615be8a8250dd0c299270e8a6be..bb5a43f5c882dea947d7213e5a5d2a61e6957602 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,22 +18,20 @@ repos: hooks: - id: typos - - repo: https://github.com/psf/black - rev: 25.1.0 - hooks: - - id: black - - repo: https://github.com/Carreau/velin rev: 0.0.11 hooks: - id: velin - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: "v0.11.4" + rev: "v0.11.5" hooks: + # Run the linter. - id: ruff - args: ["--fix", "--select", "I"] + args: [--fix] + # Run the formatter. + - id: ruff-format - repo: https://github.com/adamchainz/blacken-docs rev: 1.19.1 diff --git a/b_asic/graph_component.py b/b_asic/graph_component.py index 449adce06331ff846374b3fead32d5c614d750e1..da0c50400091bbd2532249c71927737b30ceb3f8 100644 --- a/b_asic/graph_component.py +++ b/b_asic/graph_component.py @@ -171,9 +171,7 @@ class AbstractGraphComponent(GraphComponent): new_component.name = copy(self.name) new_component.graph_id = copy(self.graph_id) for name, value in self.params.items(): - new_component.set_param( - copy(name), deepcopy(value) - ) # pylint: disable=no-member + new_component.set_param(copy(name), deepcopy(value)) # pylint: disable=no-member return new_component def traverse(self) -> Generator[GraphComponent, None, None]: diff --git a/b_asic/operation.py b/b_asic/operation.py index 725f627acc26a9e8fcf36770dcff76fca3da7de7..5039e401865d6e8c56b1a31808074a761c35f540 100644 --- a/b_asic/operation.py +++ b/b_asic/operation.py @@ -516,9 +516,7 @@ class AbstractOperation(Operation, AbstractGraphComponent): @overload @abstractmethod - def evaluate( - self, *inputs: Operation - ) -> list[Operation]: # pylint: disable=arguments-differ + def evaluate(self, *inputs: Operation) -> list[Operation]: # pylint: disable=arguments-differ ... @overload diff --git a/b_asic/resources.py b/b_asic/resources.py index 0e06b295a0731495ffbcedc7d6244cf4b6301b91..a736c383e0f695009c328b9d0de771b84609baf7 100644 --- a/b_asic/resources.py +++ b/b_asic/resources.py @@ -920,15 +920,15 @@ class ProcessCollection: "ilp_graph_color", ] = "left_edge", coloring_strategy: Literal[ - 'largest_first', - 'random_sequential', - 'smallest_last', - 'independent_set', - 'connected_sequential_bfs', - 'connected_sequential_dfs', - 'connected_sequential', - 'saturation_largest_first', - 'DSATUR', + "largest_first", + "random_sequential", + "smallest_last", + "independent_set", + "connected_sequential_bfs", + "connected_sequential_dfs", + "connected_sequential", + "saturation_largest_first", + "DSATUR", ] = "saturation_largest_first", max_colors: int | None = None, solver: PULP_CBC_CMD | GUROBI | None = None, @@ -1215,7 +1215,6 @@ class ProcessCollection: sequence: list[Process], processing_elements: list["ProcessingElement"], ) -> list["ProcessCollection"]: - if set(self.collection) != set(sequence): raise KeyError("processes in `sequence` must be equal to processes in self") @@ -1281,7 +1280,6 @@ class ProcessCollection: sequence: list[Process], processing_elements: list["ProcessingElement"], ) -> list["ProcessCollection"]: - if set(self.collection) != set(sequence): raise KeyError("processes in `sequence` must be equal to processes in self") @@ -1308,7 +1306,6 @@ class ProcessCollection: for i, collection in enumerate(collections): if process_fits_in_collection[i]: - count_1 = ProcessCollection._count_number_of_pes_written_to( processing_elements, collection ) diff --git a/b_asic/scheduler.py b/b_asic/scheduler.py index b49ea990269eabefdd01b078cc9b55086e6ea2c0..78e1409070b271ec989976ca95aeb371cd4201c0 100644 --- a/b_asic/scheduler.py +++ b/b_asic/scheduler.py @@ -35,11 +35,11 @@ class Scheduler(ABC): """ __slots__ = ( - '_schedule', - '_op_laps', - '_input_times', - '_output_delta_times', - '_sort_y_location', + "_schedule", + "_op_laps", + "_input_times", + "_output_delta_times", + "_sort_y_location", ) _schedule: "Schedule" @@ -412,23 +412,23 @@ class ListScheduler(Scheduler): """ __slots__ = ( - '_remaining_ops', - '_deadlines', - '_output_slacks', - '_fan_outs', - '_current_time', - '_cached_execution_times_in_time', - '_alap_start_times', - '_sort_order', - '_max_resources', - '_max_concurrent_reads', - '_max_concurrent_writes', - '_remaining_ops_set', - '_alap_op_laps', - '_alap_schedule_time', - '_used_reads', - '_remaining_resources', - '_cached_execution_times', + "_remaining_ops", + "_deadlines", + "_output_slacks", + "_fan_outs", + "_current_time", + "_cached_execution_times_in_time", + "_alap_start_times", + "_sort_order", + "_max_resources", + "_max_concurrent_reads", + "_max_concurrent_writes", + "_remaining_ops_set", + "_alap_op_laps", + "_alap_schedule_time", + "_used_reads", + "_remaining_resources", + "_cached_execution_times", ) _remaining_ops: list["GraphID"] _deadlines: dict["GraphID", int] @@ -932,7 +932,7 @@ class RecursiveListScheduler(ListScheduler): If the y-location should be sorted based on start time of operations. """ - __slots__ = ('_recursive_ops', '_recursive_ops_set', '_remaining_recursive_ops') + __slots__ = ("_recursive_ops", "_recursive_ops_set", "_remaining_recursive_ops") def __init__( self, diff --git a/b_asic/scheduler_gui/compile.py b/b_asic/scheduler_gui/compile.py index bdd29cb3bad6ad7e7f8322c487153e04c8f7d1e2..4ab6156046e839425452344c118671116b93f058 100644 --- a/b_asic/scheduler_gui/compile.py +++ b/b_asic/scheduler_gui/compile.py @@ -85,9 +85,9 @@ def compile_rc(*filenames: str) -> None: if rcc is None: rcc = shutil.which("pyrcc5") arguments = f"-o {outfile} {filename}" - assert ( - rcc - ), "Qt Resource compiler failed, cannot find pyside2-rcc, rcc, or pyrcc5" + assert rcc, ( + "Qt Resource compiler failed, cannot find pyside2-rcc, rcc, or pyrcc5" + ) os_ = sys.platform if os_.startswith("linux"): # Linux diff --git a/b_asic/signal_flow_graph.py b/b_asic/signal_flow_graph.py index 349e0e8eee7340d7d70fda244e47d1e22f9b7280..f46e5d4451aeb94aa1d2b579098e930751a8cad9 100644 --- a/b_asic/signal_flow_graph.py +++ b/b_asic/signal_flow_graph.py @@ -65,9 +65,7 @@ class GraphIDGenerator: @property def id_number_offset(self) -> GraphIDNumber: """Get the graph id number offset of this generator.""" - return GraphIDNumber( - self._next_id_number.default_factory() - ) # pylint: disable=not-callable + return GraphIDNumber(self._next_id_number.default_factory()) # pylint: disable=not-callable class SFG(AbstractOperation): @@ -2245,9 +2243,8 @@ class SFG(AbstractOperation): input_port = sfgs[j].input(input_name_to_idx["input_t" + index]) input_port.connect(port) delay_placements[port] = [i, number_of_delays_between] - sfgs[i].graph_id = ( - f"sfg{i}" # deterministically set the graphID of the sfgs - ) + # deterministically set the graphID of the SFGs + sfgs[i].graph_id = f"sfg{i}" sfg = SFG(new_inputs, new_outputs) # create a new SFG to remove floating nodes diff --git a/test/unit/test_sfg.py b/test/unit/test_sfg.py index 9d0a7678c2b248d0fbf4ec1f13b026e49457be9d..b698cae373d6b4936984fd98cde4f3412ada05a5 100644 --- a/test/unit/test_sfg.py +++ b/test/unit/test_sfg.py @@ -1988,9 +1988,7 @@ class TestStateSpace: def test_sfg_two_inputs_two_outputs_independent_with_cmul( self, sfg_two_inputs_two_outputs_independent_with_cmul ): - ss = ( - sfg_two_inputs_two_outputs_independent_with_cmul.state_space_representation() - ) + ss = sfg_two_inputs_two_outputs_independent_with_cmul.state_space_representation() assert ss[0] == ["y0", "y1"] assert (ss[1] == np.array([[20.0, 0.0], [0.0, 8.0]])).all()