From 221031cbf4095871667efc4b77c52c18c6276af3 Mon Sep 17 00:00:00 2001
From: Oscar Gustafsson <oscar.gustafsson@liu.se>
Date: Mon, 14 Apr 2025 15:54:31 +0000
Subject: [PATCH] Misc fixed including no f-strings in logger

---
 b_asic/GUI/arrow.py                          |   4 +-
 b_asic/GUI/drag_button.py                    |  12 +-
 b_asic/GUI/main_window.py                    |  40 ++--
 b_asic/GUI/precedence_graph_window.py        |   2 +-
 b_asic/GUI/properties_window.py              |   2 +-
 b_asic/GUI/signal_generator_input.py         |   6 +-
 b_asic/gui_utils/decorators.py               |   5 +-
 b_asic/scheduler.py                          | 198 ++++++++++---------
 b_asic/scheduler_gui/compile.py              |   4 +-
 b_asic/scheduler_gui/main_window.py          |  37 ++--
 pyproject.toml                               |  16 +-
 test/conftest.py                             |   1 +
 test/fixtures/integration.py                 |  98 +++++++++
 test/integration/test_sfg_to_architecture.py | 116 ++++-------
 test/unit/test_graph_id_generator.py         |   2 +-
 15 files changed, 315 insertions(+), 228 deletions(-)
 create mode 100644 test/fixtures/integration.py

diff --git a/b_asic/GUI/arrow.py b/b_asic/GUI/arrow.py
index 3e5c83ee..b8e26ee6 100644
--- a/b_asic/GUI/arrow.py
+++ b/b_asic/GUI/arrow.py
@@ -144,8 +144,8 @@ class Arrow(QGraphicsPathItem):
                     ) and operation in self._window._operation_to_sfg:
                         self._window._logger.info(
                             "Operation detected in existing SFG, removing SFG"
-                            " with name:"
-                            f" {self._window._operation_to_sfg[operation].name}."
+                            " with name: %s",
+                            self._window._operation_to_sfg[operation].name,
                         )
                         del self._window._sfg_dict[
                             self._window._operation_to_sfg[operation].name
diff --git a/b_asic/GUI/drag_button.py b/b_asic/GUI/drag_button.py
index 7561808b..8ff52e57 100644
--- a/b_asic/GUI/drag_button.py
+++ b/b_asic/GUI/drag_button.py
@@ -217,15 +217,17 @@ class DragButton(QPushButton):
 
     def remove(self, event=None):
         """Remove button/operation from signal flow graph."""
-        self._window._logger.info("Removing operation with name " + self.operation.name)
+        self._window._logger.info(
+            "Removing operation with name %s", self.operation.name
+        )
         self._window._scene.removeItem(self._window._drag_operation_scenes[self])
 
         _signals = []
         for signal, ports in self._window._arrow_ports.items():
             if any(set(port).intersection(set(self._ports)) for port in ports):
                 self._window._logger.info(
-                    f"Removed signal with name: {signal.signal.name}"
-                    f" to/from operation: {self.operation.name}."
+                    "Removed signal with name: %s to/from operation: %s",
+                    (signal.signal.name, self.operation.name),
                 )
                 _signals.append(signal)
 
@@ -234,8 +236,8 @@ class DragButton(QPushButton):
 
         if self in self._window._operation_to_sfg:
             self._window._logger.info(
-                "Operation detected in existing SFG, removing SFG with name: "
-                + self._window._operation_to_sfg[self].name
+                "Operation detected in existing SFG, removing SFG with name: %s",
+                self._window._operation_to_sfg[self].name,
             )
             del self._window._sfg_dict[self._window._operation_to_sfg[self].name]
             self._window._operation_to_sfg = {
diff --git a/b_asic/GUI/main_window.py b/b_asic/GUI/main_window.py
index 72a09f14..cf279730 100644
--- a/b_asic/GUI/main_window.py
+++ b/b_asic/GUI/main_window.py
@@ -269,7 +269,7 @@ class SFGMainWindow(QMainWindow):
         if not accepted:
             return
 
-        self._logger.info("Saving SFG to path: " + str(module))
+        self._logger.info("Saving SFG to path: %s", module)
         operation_positions = {}
         for op_drag, op_scene in self._drag_operation_scenes.items():
             operation_positions[op_drag.operation.graph_id] = (
@@ -285,12 +285,12 @@ class SFGMainWindow(QMainWindow):
                 )
         except Exception as e:
             self._logger.error(
-                f"Failed to save SFG to path: {module}, with error: {e}."
+                "Failed to save SFG to path: %s, with error: %s.", (module, e)
             )
             return
 
-        self._logger.info("Saved SFG to path: " + str(module))
-        self.update_statusbar("Saved SFG to path: " + str(module))
+        self._logger.info("Saved SFG to path: %s", module)
+        self.update_statusbar(f"Saved SFG to path: {module}")
 
     def save_work(self, event=None) -> None:
         if not self._sfg_dict:
@@ -309,12 +309,12 @@ class SFGMainWindow(QMainWindow):
         self._load_from_file(module)
 
     def _load_from_file(self, module) -> None:
-        self._logger.info("Loading SFG from path: " + str(module))
+        self._logger.info("Loading SFG from path: %s", module)
         try:
             sfg, positions = python_to_sfg(module)
         except ImportError as e:
             self._logger.error(
-                f"Failed to load module: {module} with the following error: {e}."
+                "Failed to load module: %s with the following error: %s.", (module, e)
             )
             return
 
@@ -322,8 +322,8 @@ class SFGMainWindow(QMainWindow):
 
         while sfg.name in self._sfg_dict:
             self._logger.warning(
-                f"Duplicate SFG with name: {sfg.name} detected. "
-                "Please choose a new name."
+                "Duplicate SFG with name: %s detected, please choose a new name",
+                sfg.name,
             )
             name, accepted = QInputDialog.getText(
                 self, "Change SFG Name", "Name: ", QLineEdit.Normal
@@ -333,7 +333,7 @@ class SFGMainWindow(QMainWindow):
 
             sfg.name = name
         self._load_sfg(sfg, positions)
-        self._logger.info("Loaded SFG from path: " + str(module))
+        self._logger.info("Loaded SFG from path: %s ", module)
         self.update_statusbar(f"Loaded SFG from {module}")
 
     def _load_sfg(self, sfg: SFG, positions=None) -> None:
@@ -488,10 +488,10 @@ class SFGMainWindow(QMainWindow):
             self._logger.warning("Failed to initialize SFG with empty name.")
             return
 
-        self._logger.info(f"Creating SFG with name: {name} from selected operations.")
+        self._logger.info("Creating SFG with name: %s from selected operations.", name)
 
         sfg = SFG(inputs=inputs, outputs=outputs, name=name)
-        self._logger.info(f"Created SFG with name: {name} from selected operations.")
+        self._logger.info("Created SFG with name: %s from selected operations.", name)
         self.update_statusbar(f"Created SFG: {name}")
 
         def check_equality(signal: Signal, signal_2: Signal) -> bool:
@@ -615,9 +615,7 @@ class SFGMainWindow(QMainWindow):
         list
             A list of names of all the operations in the module.
         """
-        self._logger.info(
-            "Fetching operations from namespace: " + str(namespace.__name__)
-        )
+        self._logger.info("Fetching operations from namespace: %s", namespace.__name__)
         return [
             comp
             for comp in dir(namespace)
@@ -647,7 +645,7 @@ class SFGMainWindow(QMainWindow):
             except NotImplementedError:
                 pass
 
-        self._logger.info("Added operations from namespace: " + str(namespace.__name__))
+        self._logger.info("Added operations from namespace: %s", namespace.__name__)
 
     def add_namespace(self, event=None) -> None:
         """Callback for adding namespace."""
@@ -752,18 +750,18 @@ class SFGMainWindow(QMainWindow):
 
         except Exception as e:
             self._logger.error(
-                "Unexpected error occurred while creating operation: " + str(e)
+                "Unexpected error occurred while creating operation: %s", e
             )
 
     def _create_operation_item(self, item) -> None:
-        self._logger.info(f"Creating operation of type: {item.text()!s}")
+        self._logger.info("Creating operation of type: %s", item.text())
         try:
             attr_operation = self._operations_from_name[item.text()]()
             self.add_operation(attr_operation)
             self.update_statusbar(f"{item.text()} added.")
         except Exception as e:
             self._logger.error(
-                "Unexpected error occurred while creating operation: " + str(e)
+                "Unexpected error occurred while creating operation: %d", e
             )
 
     def _refresh_operations_list_from_namespace(self) -> None:
@@ -840,8 +838,8 @@ class SFGMainWindow(QMainWindow):
             if signal.destination is destination.port
         )
         self._logger.info(
-            f"Connecting: {source.operation.type_name()}"
-            f" -> {destination.operation.type_name()}."
+            "Connecting: %s -> %s",
+            (source.operation.type_name(), destination.operation.type_name()),
         )
         try:
             arrow = Arrow(source, destination, self, signal=next(signal_exists))
@@ -903,7 +901,7 @@ class SFGMainWindow(QMainWindow):
         self._thread = {}
         self._sim_worker = {}
         for sfg, properties in self._simulation_dialog._properties.items():
-            self._logger.info(f"Simulating SFG with name: {sfg.name!s}")
+            self._logger.info("Simulating SFG with name: %s", sfg.name)
             self._sim_worker[sfg] = SimulationWorker(sfg, properties)
             self._thread[sfg] = QThread()
             self._sim_worker[sfg].moveToThread(self._thread[sfg])
diff --git a/b_asic/GUI/precedence_graph_window.py b/b_asic/GUI/precedence_graph_window.py
index 022f5a0a..0d151cee 100644
--- a/b_asic/GUI/precedence_graph_window.py
+++ b/b_asic/GUI/precedence_graph_window.py
@@ -60,7 +60,7 @@ class PrecedenceGraphWindow(QDialog):
         for check_box, sfg in self._check_box_dict.items():
             if check_box.isChecked():
                 self._window._logger.info(
-                    f"Creating a precedence graph from SFG with name: {sfg}."
+                    "Creating a precedence graph from SFG with name: %s", sfg
                 )
                 self._window._sfg_dict[sfg].show_precedence_graph()
 
diff --git a/b_asic/GUI/properties_window.py b/b_asic/GUI/properties_window.py
index 6babd698..1a75597f 100644
--- a/b_asic/GUI/properties_window.py
+++ b/b_asic/GUI/properties_window.py
@@ -139,7 +139,7 @@ class PropertiesWindow(QDialog):
 
     def save_properties(self):
         self._window._logger.info(
-            f"Saving _properties of operation: {self.operation.name}."
+            "Saving properties of operation: %s", self.operation.name
         )
         self.operation.name = self._edit_name.text()
         self.operation.operation.name = self._edit_name.text()
diff --git a/b_asic/GUI/signal_generator_input.py b/b_asic/GUI/signal_generator_input.py
index 2927bdca..c2ea840f 100644
--- a/b_asic/GUI/signal_generator_input.py
+++ b/b_asic/GUI/signal_generator_input.py
@@ -39,8 +39,8 @@ class SignalGeneratorInput(QGridLayout):
             return _type(string)
         except ValueError:
             self._logger.warning(
-                f"Cannot parse {name}: {string} not a {_type.__name__}, setting to"
-                f" {default}"
+                "Cannot parse %s: %s not a %s, setting to %s",
+                (name, string, _type.__name__, default),
             )
             return default
 
@@ -106,7 +106,7 @@ class ZeroPadInput(SignalGeneratorInput):
                     val = 0
                 val = complex(val)
             except ValueError:
-                self._logger.warning(f"Skipping value: {val}, not a digit.")
+                self._logger.warning("Skipping value: %s, not a digit.", val)
                 continue
             input_values.append(val)
         return ZeroPad(input_values)
diff --git a/b_asic/gui_utils/decorators.py b/b_asic/gui_utils/decorators.py
index 1e3ec970..dca6d0f4 100644
--- a/b_asic/gui_utils/decorators.py
+++ b/b_asic/gui_utils/decorators.py
@@ -8,8 +8,9 @@ def handle_error(fn):
         try:
             return fn(self, *args, **kwargs)
         except Exception:
-            self._window._logger.error(f"Unexpected error: {format_exc()}")
-            QErrorMessage(self._window).showMessage(f"Unexpected error: {format_exc()}")
+            error_msg = f"Unexpected error: {format_exc()}"
+            self._window._logger.error(error_msg)
+            QErrorMessage(self._window).showMessage(error_msg)
 
     return wrapper
 
diff --git a/b_asic/scheduler.py b/b_asic/scheduler.py
index 17523302..05fe4763 100644
--- a/b_asic/scheduler.py
+++ b/b_asic/scheduler.py
@@ -3,7 +3,7 @@ from abc import ABC, abstractmethod
 from collections import defaultdict
 from typing import TYPE_CHECKING, cast
 
-import b_asic.logger as logger
+import b_asic.logger
 from b_asic.core_operations import DontCare, Sink
 from b_asic.port import OutputPort
 from b_asic.special_operations import Delay, Input, Output
@@ -14,15 +14,29 @@ if TYPE_CHECKING:
     from b_asic.schedule import Schedule
     from b_asic.types import GraphID
 
+log = b_asic.logger.getLogger(__name__)
+
 
 class Scheduler(ABC):
+    """
+    Scheduler base class
+
+    Parameters
+    ----------
+    input_times : dict(GraphID, int), optional
+        The times when inputs arrive.
+    output_delta_times : dict(GraphID, int), optional
+        The relative time when outputs should be produced
+    sort_y_location : bool, default: True
+        If the y-position should be sorted based on start time of operations.
+    """
+
     def __init__(
         self,
         input_times: dict["GraphID", int] | None = None,
         output_delta_times: dict["GraphID", int] | None = None,
-        sort_y_direction: bool = True,
+        sort_y_location: bool = True,
     ):
-        self._logger = logger.getLogger("scheduler")
         self._op_laps = {}
 
         if input_times is not None:
@@ -59,11 +73,12 @@ class Scheduler(ABC):
         else:
             self._output_delta_times = {}
 
-        self._sort_y_direction = sort_y_direction
+        self._sort_y_location = sort_y_location
 
     @abstractmethod
     def apply_scheduling(self, schedule: "Schedule") -> None:
-        """Applies the scheduling algorithm on the given Schedule.
+        """
+        Apply the scheduling algorithm on the given Schedule.
 
         Parameters
         ----------
@@ -73,14 +88,14 @@ class Scheduler(ABC):
         raise NotImplementedError
 
     def _place_inputs_on_given_times(self) -> None:
-        self._logger.debug("--- Input placement starting ---")
+        log.debug("Input placement starting")
         for input_id in self._input_times:
             self._schedule.start_times[input_id] = self._input_times[input_id]
             self._op_laps[input_id] = 0
-            self._logger.debug(
-                f"   {input_id} time: {self._schedule.start_times[input_id]}"
+            log.debug(
+                "Input %s at time: %d", input_id, self._schedule.start_times[input_id]
             )
-        self._logger.debug("--- Input placement completed ---")
+        log.debug("Input placement completed")
 
     def _place_outputs_asap(
         self, schedule: "Schedule", non_schedulable_ops: list["GraphID"] | None = []
@@ -102,7 +117,7 @@ class Scheduler(ABC):
                 ] + cast(int, source_port.latency_offset)
 
     def _place_outputs_on_given_times(self) -> None:
-        self._logger.debug("--- Output placement starting ---")
+        log.debug("Output placement starting")
         if self._schedule._cyclic and isinstance(self, ListScheduler):
             end = self._schedule._schedule_time
         else:
@@ -135,10 +150,10 @@ class Scheduler(ABC):
                     if self._schedule._schedule_time
                     else new_time
                 )
-                self._logger.debug(f"   {output.graph_id} time: {modulo_time}")
-        self._logger.debug("--- Output placement completed ---")
+                log.debug("Output %s at time: %d", (output.graph_id, modulo_time))
+        log.debug("Output placement completed")
 
-        self._logger.debug("--- Output placement optimization starting ---")
+        log.debug("Output placement optimization starting")
         min_slack = min(
             self._schedule.backward_slack(op.graph_id)
             for op in self._sfg.find_by_type(Output)
@@ -162,23 +177,36 @@ class Scheduler(ABC):
                         f"for scheduling time {self._schedule._schedule_time}. "
                         "Try to relax the scheduling time, change the output delta times or enable cyclic."
                     )
-                self._logger.debug(
-                    f"   {output.graph_id} moved {min_slack} time steps backwards to new time {new_time}"
+                log.debug(
+                    "Output %s moved %d time steps backwards to new time %d",
+                    (output.graph_id, min_slack, new_time),
                 )
-        self._logger.debug("--- Output placement optimization completed ---")
+        log.debug("Output placement optimization completed")
+
+    def _handle_dont_cares(self) -> None:
+        # schedule all dont cares ALAP
+        for dc_op in self._sfg.find_by_type(DontCare):
+            self._schedule.start_times[dc_op.graph_id] = 0
+            self._schedule.place_operation(
+                dc_op, self._schedule.forward_slack(dc_op.graph_id), self._op_laps
+            )
+            self._op_laps[dc_op.graph_id] = 0
+
+    def _handle_sinks(self) -> None:
+        # schedule all sinks ASAP
+        for sink_op in self._sfg.find_by_type(Sink):
+            self._schedule.start_times[sink_op.graph_id] = self._schedule._schedule_time
+            self._schedule.place_operation(
+                sink_op, self._schedule.backward_slack(sink_op.graph_id), self._op_laps
+            )
+            self._op_laps[sink_op.graph_id] = 0
 
 
 class ASAPScheduler(Scheduler):
     """Scheduler that implements the as-soon-as-possible (ASAP) algorithm."""
 
     def apply_scheduling(self, schedule: "Schedule") -> None:
-        """Applies the scheduling algorithm on the given Schedule.
-
-        Parameters
-        ----------
-        schedule : Schedule
-            Schedule to apply the scheduling algorithm on.
-        """
+        # Doc-string inherited
         self._schedule = schedule
         self._sfg = schedule._sfg
         prec_list = schedule.sfg.get_precedence_list()
@@ -188,6 +216,8 @@ class ASAPScheduler(Scheduler):
         if self._input_times:
             self._place_inputs_on_given_times()
 
+        log.debug("ASAP scheduling starting")
+
         # handle the first set in precedence graph (input and delays)
         non_schedulable_ops = []
         for outport in prec_list[0]:
@@ -238,6 +268,7 @@ class ASAPScheduler(Scheduler):
 
                     schedule.start_times[operation.graph_id] = op_start_time
 
+        log.debug("ASAP scheduling completed")
         self._place_outputs_asap(schedule, non_schedulable_ops)
         if self._input_times:
             self._place_outputs_on_given_times()
@@ -250,7 +281,9 @@ class ASAPScheduler(Scheduler):
         elif schedule._schedule_time < max_end_time:
             raise ValueError(f"Too short schedule time. Minimum is {max_end_time}.")
 
-        if self._sort_y_direction:
+        self._handle_dont_cares()
+
+        if self._sort_y_location:
             schedule.sort_y_locations_on_start_times()
 
 
@@ -258,13 +291,7 @@ class ALAPScheduler(Scheduler):
     """Scheduler that implements the as-late-as-possible (ALAP) algorithm."""
 
     def apply_scheduling(self, schedule: "Schedule") -> None:
-        """Applies the scheduling algorithm on the given Schedule.
-
-        Parameters
-        ----------
-        schedule : Schedule
-            Schedule to apply the scheduling algorithm on.
-        """
+        # Doc-string inherited
         self._schedule = schedule
         self._sfg = schedule._sfg
         ASAPScheduler(
@@ -281,6 +308,7 @@ class ALAPScheduler(Scheduler):
             output = cast(Output, output)
             self._op_laps[output.graph_id] = 0
 
+        log.debug("ALAP scheduling starting")
         # move all outputs ALAP before operations
         for output in schedule.sfg.find_by_type(Output):
             output = cast(Output, output)
@@ -302,13 +330,16 @@ class ALAPScheduler(Scheduler):
                     )
                     schedule.move_operation_alap(op.graph_id)
 
+        log.debug("ALAP scheduling completed")
         # adjust the scheduling time if empty time slots have appeared in the start
         slack = min(schedule.start_times.values())
         for op_id in schedule.start_times:
             schedule.move_operation(op_id, -slack)
         schedule.set_schedule_time(schedule._schedule_time - slack)
 
-        if self._sort_y_direction:
+        self._handle_dont_cares()
+
+        if self._sort_y_location:
             schedule.sort_y_locations_on_start_times()
 
 
@@ -376,14 +407,8 @@ class ListScheduler(Scheduler):
         self._max_concurrent_writes = max_concurrent_writes or 0
 
     def apply_scheduling(self, schedule: "Schedule") -> None:
-        """Applies the scheduling algorithm on the given Schedule.
-
-        Parameters
-        ----------
-        schedule : Schedule
-            Schedule to apply the scheduling algorithm on.
-        """
-        self._logger.debug("--- Scheduler initializing ---")
+        # Doc-string inherited
+        log.debug("Scheduler initializing")
         self._initialize_scheduler(schedule)
 
         if self._sfg.loops and self._schedule.cyclic:
@@ -408,9 +433,9 @@ class ListScheduler(Scheduler):
             self._schedule.set_schedule_time(self._schedule.get_max_end_time())
         self._schedule.remove_delays()
         self._handle_dont_cares()
-        if self._sort_y_direction:
+        if self._sort_y_location:
             schedule.sort_y_locations_on_start_times()
-        self._logger.debug("--- Scheduling completed ---")
+        log.debug("Scheduling completed")
 
     def _get_next_op_id(
         self, priority_table: list[tuple["GraphID", int, ...]]
@@ -509,15 +534,18 @@ class ListScheduler(Scheduler):
 
     def _op_satisfies_resource_constraints(self, op: "Operation") -> bool:
         op_type = type(op)
-        for i in range(max(1, op.execution_time)):
-            time_slot = (
-                self._current_time + i
-                if self._schedule._schedule_time is None
-                else (self._current_time + i) % self._schedule._schedule_time
-            )
-            count = self._cached_execution_times_in_time[op_type][time_slot]
-            if count >= self._remaining_resources[op_type]:
-                return False
+        if self._schedule._schedule_time is None:
+            for i in range(max(1, op.execution_time)):
+                time_slot = self._current_time + i
+                count = self._cached_execution_times_in_time[op_type][time_slot]
+                if count >= self._remaining_resources[op_type]:
+                    return False
+        else:
+            for i in range(max(1, op.execution_time)):
+                time_slot = (self._current_time + i) % self._schedule._schedule_time
+                count = self._cached_execution_times_in_time[op_type][time_slot]
+                if count >= self._remaining_resources[op_type]:
+                    return False
         return True
 
     def _op_satisfies_concurrent_writes(self, op: "Operation") -> bool:
@@ -739,7 +767,7 @@ class ListScheduler(Scheduler):
         self._current_time = 0
 
     def _schedule_nonrecursive_ops(self) -> None:
-        self._logger.debug("--- Non-Recursive Operation scheduling starting ---")
+        log.debug("Non-Recursive Operation scheduling starting")
         while self._remaining_ops:
             prio_table = self._get_priority_table(self._remaining_ops)
             while prio_table:
@@ -762,15 +790,25 @@ class ListScheduler(Scheduler):
                     else 0
                 )
 
-                for i in range(max(1, next_op.execution_time)):
-                    time_slot = (
-                        (self._current_time + i) % self._schedule._schedule_time
-                        if self._schedule._schedule_time
-                        else self._current_time + i
-                    )
-                    self._cached_execution_times_in_time[type(next_op)][time_slot] += 1
-
-                self._log_scheduled_op(next_op)
+                if self._schedule._schedule_time is None:
+                    for i in range(max(1, next_op.execution_time)):
+                        time_slot = self._current_time + i
+                        self._cached_execution_times_in_time[type(next_op)][
+                            time_slot
+                        ] += 1
+                else:
+                    for i in range(max(1, next_op.execution_time)):
+                        time_slot = (
+                            self._current_time + i
+                        ) % self._schedule._schedule_time
+                        self._cached_execution_times_in_time[type(next_op)][
+                            time_slot
+                        ] += 1
+
+                log.debug(
+                    "Schedule operation: %s at time: %d",
+                    (self._current_time, next_op.graph_id),
+                )
 
                 prio_table = self._get_priority_table(
                     self._remaining_ops
@@ -779,13 +817,7 @@ class ListScheduler(Scheduler):
 
             self._current_time += 1
         self._current_time -= 1
-        self._logger.debug("--- Non-Recursive Operation scheduling completed ---")
-
-    def _log_scheduled_op(self, next_op: "Operation") -> None:
-        if self._schedule._schedule_time is not None:
-            self._logger.debug(f"  Op: {next_op.graph_id}, time: {self._current_time}")
-        else:
-            self._logger.debug(f"  Op: {next_op.graph_id}, time: {self._current_time}")
+        log.debug("Non-recursive operation scheduling completed")
 
     def _update_port_reads(self, next_op: "Operation") -> None:
         for input_port in next_op.inputs:
@@ -805,14 +837,6 @@ class ListScheduler(Scheduler):
                 else:
                     self._used_reads[time] = 1
 
-    def _handle_dont_cares(self) -> None:
-        # schedule all dont cares ALAP
-        for dc_op in self._sfg.find_by_type(DontCare):
-            self._schedule.start_times[dc_op.graph_id] = 0
-            self._schedule.place_operation(
-                dc_op, self._schedule.forward_slack(dc_op.graph_id), self._op_laps
-            )
-
 
 class RecursiveListScheduler(ListScheduler):
     def __init__(
@@ -830,7 +854,7 @@ class RecursiveListScheduler(ListScheduler):
         )
 
     def apply_scheduling(self, schedule: "Schedule") -> None:
-        self._logger.debug("--- Scheduler initializing ---")
+        log.debug("Scheduler initializing")
         self._initialize_scheduler(schedule)
 
         if self._input_times:
@@ -856,9 +880,9 @@ class RecursiveListScheduler(ListScheduler):
         if loops:
             self._retime_ops(period_bound)
         self._handle_dont_cares()
-        if self._sort_y_direction:
+        if self._sort_y_location:
             schedule.sort_y_locations_on_start_times()
-        self._logger.debug("--- Scheduling completed ---")
+        log.debug("Scheduling completed")
 
     def _get_recursive_ops(self, loops: list[list["GraphID"]]) -> list["GraphID"]:
         recursive_ops = []
@@ -965,11 +989,11 @@ class RecursiveListScheduler(ListScheduler):
         saved_sched_time = self._schedule._schedule_time
         self._schedule._schedule_time = None
 
-        self._logger.debug("--- Scheduling of recursive loops starting ---")
+        log.debug("Scheduling of recursive loops starting")
         self._recursive_ops = self._get_recursive_ops(loops)
         self._recursive_ops_set = set(self._recursive_ops)
         self._remaining_recursive_ops = self._recursive_ops.copy()
-        self._logger.debug("--- Generating initial recursive priority table ---")
+        log.debug("Generating initial recursive priority table")
         prio_table = self._get_recursive_priority_table()
         while prio_table:
             op = self._get_next_recursive_op(prio_table)
@@ -994,17 +1018,15 @@ class RecursiveListScheduler(ListScheduler):
 
             self._schedule.place_operation(op, op_sched_time, self._op_laps)
             self._op_laps[op.graph_id] = 0
-            self._logger.debug(f"   Op: {op.graph_id} time: {op_sched_time}")
+            log.debug(
+                "Schedule operation: %s at time: %d", (op.graph_id, op_sched_time)
+            )
             self._remaining_recursive_ops.remove(op.graph_id)
             self._remaining_ops.remove(op.graph_id)
             self._remaining_ops_set.remove(op.graph_id)
 
             for i in range(max(1, op.execution_time)):
-                time_slot = (
-                    (self._current_time + i) % self._schedule._schedule_time
-                    if self._schedule._schedule_time
-                    else self._current_time + i
-                )
+                time_slot = self._current_time + i
                 self._cached_execution_times_in_time[op_type][time_slot] += 1
 
             prio_table = self._get_recursive_priority_table()
@@ -1013,7 +1035,7 @@ class RecursiveListScheduler(ListScheduler):
 
         if saved_sched_time and saved_sched_time > self._schedule._schedule_time:
             self._schedule._schedule_time = saved_sched_time
-        self._logger.debug("--- Scheduling of recursive loops completed ---")
+        log.debug("Scheduling of recursive loops completed")
 
     def _get_next_recursive_op(
         self, priority_table: list[tuple["GraphID", int, ...]]
diff --git a/b_asic/scheduler_gui/compile.py b/b_asic/scheduler_gui/compile.py
index c88ec3e6..bdd29cb3 100644
--- a/b_asic/scheduler_gui/compile.py
+++ b/b_asic/scheduler_gui/compile.py
@@ -112,7 +112,7 @@ def compile_rc(*filenames: str) -> None:
 
         else:  # other OS
             if log is not None:
-                log.error(f"{os_} RC compiler not supported")
+                log.error("%s RC compiler not supported", os_)
             else:
                 print(f"{os_} RC compiler not supported")
             raise NotImplementedError
@@ -187,7 +187,7 @@ def compile_ui(*filenames: str) -> None:
                 raise NotImplementedError
 
             else:
-                log.error(f"{os_} UI compiler not supported")
+                log.error("%s UI compiler not supported", os_)
                 raise NotImplementedError
 
         replace_qt_bindings(outfile)  # replace qt-bindings with qtpy
diff --git a/b_asic/scheduler_gui/main_window.py b/b_asic/scheduler_gui/main_window.py
index 3f616c66..21283440 100644
--- a/b_asic/scheduler_gui/main_window.py
+++ b/b_asic/scheduler_gui/main_window.py
@@ -48,7 +48,7 @@ from qtpy.QtWidgets import (
 )
 
 # B-ASIC
-import b_asic.logger as logger
+import b_asic.logger
 from b_asic._version import __version__
 from b_asic.graph_component import GraphComponent, GraphID
 from b_asic.gui_utils.about_window import AboutWindow
@@ -70,8 +70,8 @@ from b_asic.scheduler_gui.ui_main_window import Ui_MainWindow
 if TYPE_CHECKING:
     from logging import Logger
 
-log: "Logger" = logger.getLogger(__name__)
-sys.excepthook = logger.handle_exceptions
+log: "Logger" = b_asic.logger.getLogger(__name__)
+sys.excepthook = b_asic.logger.handle_exceptions
 
 
 # Debug stuff
@@ -83,18 +83,18 @@ if __debug__:
     from qtpy import QtCore
 
     QT_API = os.environ.get("QT_API", "")
-    log.debug(f"Qt version (runtime):      {QtCore.qVersion()}")
-    log.debug(f"Qt version (compile time): {QtCore.__version__}")
-    log.debug(f"QT_API:                    {QT_API}")
+    log.debug("Qt version (runtime):      %s", QtCore.qVersion())
+    log.debug("Qt version (compile time): %s", QtCore.__version__)
+    log.debug("QT_API:                    %s", QT_API)
     if QT_API.lower().startswith("pyside"):
         import PySide6
 
-        log.debug(f"PySide version:           {PySide6.__version__}")
+        log.debug("PySide version:           %s", PySide6.__version__)
     if QT_API.lower().startswith("pyqt"):
         from qtpy.QtCore import PYQT_VERSION_STR
 
-        log.debug(f"PyQt version:             {PYQT_VERSION_STR}")
-    log.debug(f"QtPy version:             {qtpy.__version__}")
+        log.debug("PyQt version:             %s", PYQT_VERSION_STR)
+    log.debug("QtPy version:             %s", qtpy.__version__)
 
 
 # The following QCoreApplication values is used for QSettings among others
@@ -337,19 +337,19 @@ class ScheduleMainWindow(QMainWindow, Ui_MainWindow):
         Load a python script as a module and search for a Schedule object. If
         found, opens it.
         """
-        log.debug(f"abs_path_filename = {abs_path_filename}.")
+        log.debug("abs_path_filename = %s", abs_path_filename)
 
         module_name = inspect.getmodulename(abs_path_filename)
         if not module_name:  # return if empty module name
-            log.error(f"Could not load module from file '{abs_path_filename}'.")
+            log.error("Could not load module from file '%s'", abs_path_filename)
             return
 
         try:
             module = SourceFileLoader(module_name, abs_path_filename).load_module()
         except Exception as e:
             log.exception(
-                "Exception occurred. Could not load module from file"
-                f" '{abs_path_filename}'.\n\n{e}"
+                "Exception occurred. Could not load module from file '%s'.\n\n%s",
+                (abs_path_filename, e),
             )
             return
 
@@ -363,12 +363,13 @@ class ScheduleMainWindow(QMainWindow, Ui_MainWindow):
             QMessageBox.warning(
                 self,
                 self.tr("File not found"),
-                self.tr("Cannot find any Schedule object in file '{}'.").format(
+                self.tr("Cannot find any Schedule object in file {}").format(
                     os.path.basename(abs_path_filename)
                 ),
             )
             log.info(
-                f"Cannot find any Schedule object in file '{os.path.basename(abs_path_filename)}'."
+                "Cannot find any Schedule object in file %s",
+                os.path.basename(abs_path_filename),
             )
             del module
             return
@@ -640,7 +641,7 @@ class ScheduleMainWindow(QMainWindow, Ui_MainWindow):
             if not hide_dialog:
                 settings.setValue("scheduler/hide_exit_dialog", checkbox.isChecked())
             self._write_settings()
-            log.info(f"Exit: {os.path.basename(__file__)}")
+            log.info("Exit: %s", os.path.basename(__file__))
             if self._ports_accesses_for_storage:
                 self._ports_accesses_for_storage.close()
             if self._execution_time_for_variables:
@@ -734,7 +735,7 @@ class ScheduleMainWindow(QMainWindow, Ui_MainWindow):
         settings.sync()
 
         if settings.isWritable():
-            log.debug(f"Settings written to '{settings.fileName()}'.")
+            log.debug("Settings written to '%s'", settings.fileName())
         else:
             log.warning("Settings cant be saved to file, read-only.")
 
@@ -766,7 +767,7 @@ class ScheduleMainWindow(QMainWindow, Ui_MainWindow):
 
         settings.endGroup()
         settings.sync()
-        log.debug(f"Settings read from '{settings.fileName()}'.")
+        log.debug("Settings read from '%s'", settings.fileName())
 
     def info_table_fill_schedule(self, schedule: Schedule) -> None:
         """
diff --git a/pyproject.toml b/pyproject.toml
index 62db843a..80fa5ce8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -101,7 +101,21 @@ precision = 2
 exclude = ["examples", "docs_sphinx"]
 
 [tool.ruff.lint]
-select = ["E4", "E7", "E9", "F", "SIM", "B", "NPY", "C4", "UP", "RUF", "A", "I"]
+select = [
+  "E4",
+  "E7",
+  "E9",
+  "F",
+  "SIM",
+  "B",
+  "NPY",
+  "C4",
+  "UP",
+  "RUF",
+  "A",
+  "I",
+  "G004"
+]
 ignore = ["F403", "B008", "B021", "B006", "UP038", "RUF023", "A005"]
 
 [tool.typos]
diff --git a/test/conftest.py b/test/conftest.py
index 4b2af5a5..9e4797d0 100644
--- a/test/conftest.py
+++ b/test/conftest.py
@@ -3,6 +3,7 @@ from distutils import dir_util
 
 import pytest
 
+from test.fixtures.integration import *
 from test.fixtures.operation_tree import *
 from test.fixtures.port import *
 from test.fixtures.resources import *
diff --git a/test/fixtures/integration.py b/test/fixtures/integration.py
new file mode 100644
index 00000000..4f243966
--- /dev/null
+++ b/test/fixtures/integration.py
@@ -0,0 +1,98 @@
+import pytest
+
+from b_asic.architecture import ProcessingElement
+from b_asic.core_operations import Butterfly, ConstantMultiplication
+from b_asic.list_schedulers import HybridScheduler
+from b_asic.schedule import Schedule
+from b_asic.sfg_generators import radix_2_dif_fft
+from b_asic.special_operations import Input, Output
+
+
+@pytest.fixture
+def mem_variables_fft16():
+    POINTS = 16
+    sfg = radix_2_dif_fft(POINTS)
+    sfg.set_latency_of_type(Butterfly, 1)
+    sfg.set_latency_of_type(ConstantMultiplication, 3)
+    sfg.set_execution_time_of_type(Butterfly, 1)
+    sfg.set_execution_time_of_type(ConstantMultiplication, 1)
+
+    resources = {
+        Butterfly.type_name(): 2,
+        ConstantMultiplication.type_name(): 2,
+        Input.type_name(): 1,
+        Output.type_name(): 1,
+    }
+    schedule = Schedule(
+        sfg,
+        scheduler=HybridScheduler(
+            resources, max_concurrent_reads=4, max_concurrent_writes=4
+        ),
+    )
+
+    operations = schedule.get_operations()
+    bfs = operations.get_by_type_name(Butterfly.type_name())
+    bfs = bfs.split_on_execution_time()
+    const_muls = operations.get_by_type_name(ConstantMultiplication.type_name())
+    inputs = operations.get_by_type_name(Input.type_name())
+    outputs = operations.get_by_type_name(Output.type_name())
+
+    bf_pe_1 = ProcessingElement(bfs[0], entity_name="bf1")
+    bf_pe_2 = ProcessingElement(bfs[1], entity_name="bf2")
+
+    mul_pe_1 = ProcessingElement(const_muls, entity_name="mul1")
+
+    pe_in = ProcessingElement(inputs, entity_name="input")
+    pe_out = ProcessingElement(outputs, entity_name="output")
+
+    processing_elements = [bf_pe_1, bf_pe_2, mul_pe_1, pe_in, pe_out]
+
+    mem_vars = schedule.get_memory_variables()
+    direct, mem_vars = mem_vars.split_on_length()
+    return direct, mem_vars, processing_elements
+
+
+@pytest.fixture
+def mem_variables_fft32():
+    POINTS = 32
+    sfg = radix_2_dif_fft(POINTS)
+    sfg.set_latency_of_type(Butterfly, 1)
+    sfg.set_latency_of_type(ConstantMultiplication, 3)
+    sfg.set_execution_time_of_type(Butterfly, 1)
+    sfg.set_execution_time_of_type(ConstantMultiplication, 1)
+
+    resources = {
+        Butterfly.type_name(): 2,
+        ConstantMultiplication.type_name(): 2,
+        Input.type_name(): 1,
+        Output.type_name(): 1,
+    }
+    schedule = Schedule(
+        sfg,
+        scheduler=HybridScheduler(
+            resources, max_concurrent_reads=4, max_concurrent_writes=4
+        ),
+    )
+
+    operations = schedule.get_operations()
+    bfs = operations.get_by_type_name(Butterfly.type_name())
+    bfs = bfs.split_on_execution_time()
+    const_muls = operations.get_by_type_name(ConstantMultiplication.type_name())
+    const_muls = const_muls.split_on_execution_time()
+    inputs = operations.get_by_type_name(Input.type_name())
+    outputs = operations.get_by_type_name(Output.type_name())
+
+    bf_pe_1 = ProcessingElement(bfs[0], entity_name="bf1")
+    bf_pe_2 = ProcessingElement(bfs[1], entity_name="bf2")
+
+    mul_pe_1 = ProcessingElement(const_muls[0], entity_name="mul1")
+    mul_pe_2 = ProcessingElement(const_muls[1], entity_name="mul2")
+
+    pe_in = ProcessingElement(inputs, entity_name="input")
+    pe_out = ProcessingElement(outputs, entity_name="output")
+
+    processing_elements = [bf_pe_1, bf_pe_2, mul_pe_1, mul_pe_2, pe_in, pe_out]
+
+    mem_vars = schedule.get_memory_variables()
+    direct, mem_vars = mem_vars.split_on_length()
+    return direct, mem_vars, processing_elements
diff --git a/test/integration/test_sfg_to_architecture.py b/test/integration/test_sfg_to_architecture.py
index 10a5ce0d..7ac22550 100644
--- a/test/integration/test_sfg_to_architecture.py
+++ b/test/integration/test_sfg_to_architecture.py
@@ -156,48 +156,8 @@ def test_pe_and_memory_constrained_schedule():
     assert arch.schedule_time == schedule.schedule_time
 
 
-def test_heuristic_resource_algorithms():
-    POINTS = 32
-    sfg = radix_2_dif_fft(POINTS)
-    sfg.set_latency_of_type(Butterfly, 1)
-    sfg.set_latency_of_type(ConstantMultiplication, 3)
-    sfg.set_execution_time_of_type(Butterfly, 1)
-    sfg.set_execution_time_of_type(ConstantMultiplication, 1)
-
-    resources = {
-        Butterfly.type_name(): 2,
-        ConstantMultiplication.type_name(): 2,
-        Input.type_name(): 1,
-        Output.type_name(): 1,
-    }
-    schedule = Schedule(
-        sfg,
-        scheduler=HybridScheduler(
-            resources, max_concurrent_reads=4, max_concurrent_writes=4
-        ),
-    )
-
-    operations = schedule.get_operations()
-    bfs = operations.get_by_type_name(Butterfly.type_name())
-    bfs = bfs.split_on_execution_time()
-    const_muls = operations.get_by_type_name(ConstantMultiplication.type_name())
-    const_muls = const_muls.split_on_execution_time()
-    inputs = operations.get_by_type_name(Input.type_name())
-    outputs = operations.get_by_type_name(Output.type_name())
-
-    bf_pe_1 = ProcessingElement(bfs[0], entity_name="bf1")
-    bf_pe_2 = ProcessingElement(bfs[1], entity_name="bf2")
-
-    mul_pe_1 = ProcessingElement(const_muls[0], entity_name="mul1")
-    mul_pe_2 = ProcessingElement(const_muls[1], entity_name="mul2")
-
-    pe_in = ProcessingElement(inputs, entity_name="input")
-    pe_out = ProcessingElement(outputs, entity_name="output")
-
-    processing_elements = [bf_pe_1, bf_pe_2, mul_pe_1, mul_pe_2, pe_in, pe_out]
-
-    mem_vars = schedule.get_memory_variables()
-    direct, mem_vars = mem_vars.split_on_length()
+def test_left_edge(mem_variables_fft32):
+    direct, mem_vars, processing_elements = mem_variables_fft32
 
     # LEFT-EDGE
     mem_vars_set = mem_vars.split_on_ports(
@@ -222,6 +182,10 @@ def test_heuristic_resource_algorithms():
     assert len(arch.processing_elements) == 6
     assert len(arch.memories) == 6
 
+
+def test_min_pe_to_mem(mem_variables_fft32):
+    direct, mem_vars, processing_elements = mem_variables_fft32
+
     # MIN-PE-TO-MEM
     mem_vars_set = mem_vars.split_on_ports(
         read_ports=1,
@@ -245,6 +209,10 @@ def test_heuristic_resource_algorithms():
     assert len(arch.processing_elements) == 6
     assert len(arch.memories) == 6
 
+
+def test_min_mem_to_pe(mem_variables_fft32):
+    direct, mem_vars, processing_elements = mem_variables_fft32
+
     # MIN-MEM-TO-PE
     mem_vars_set = mem_vars.split_on_ports(
         read_ports=1,
@@ -268,6 +236,10 @@ def test_heuristic_resource_algorithms():
     assert len(arch.processing_elements) == 6
     assert len(arch.memories) == 6
 
+
+def test_greedy_graph_coloring(mem_variables_fft32):
+    direct, mem_vars, processing_elements = mem_variables_fft32
+
     # GREEDY GRAPH COLORING
     mem_vars_set = mem_vars.split_on_ports(
         read_ports=1,
@@ -291,6 +263,10 @@ def test_heuristic_resource_algorithms():
     assert len(arch.processing_elements) == 6
     assert len(arch.memories) == 4
 
+
+def test_equitable_color(mem_variables_fft32):
+    direct, mem_vars, processing_elements = mem_variables_fft32
+
     # EQUITABLE COLOR
     mem_vars_set = mem_vars.split_on_ports(
         read_ports=1,
@@ -315,47 +291,8 @@ def test_heuristic_resource_algorithms():
     assert len(arch.memories) == 7
 
 
-def test_ilp_resource_algorithms():
-    POINTS = 16
-    sfg = radix_2_dif_fft(POINTS)
-    sfg.set_latency_of_type(Butterfly, 1)
-    sfg.set_latency_of_type(ConstantMultiplication, 3)
-    sfg.set_execution_time_of_type(Butterfly, 1)
-    sfg.set_execution_time_of_type(ConstantMultiplication, 1)
-
-    resources = {
-        Butterfly.type_name(): 2,
-        ConstantMultiplication.type_name(): 2,
-        Input.type_name(): 1,
-        Output.type_name(): 1,
-    }
-    schedule = Schedule(
-        sfg,
-        scheduler=HybridScheduler(
-            resources, max_concurrent_reads=4, max_concurrent_writes=4
-        ),
-    )
-
-    operations = schedule.get_operations()
-    bfs = operations.get_by_type_name(Butterfly.type_name())
-    bfs = bfs.split_on_execution_time()
-    const_muls = operations.get_by_type_name(ConstantMultiplication.type_name())
-    inputs = operations.get_by_type_name(Input.type_name())
-    outputs = operations.get_by_type_name(Output.type_name())
-
-    bf_pe_1 = ProcessingElement(bfs[0], entity_name="bf1")
-    bf_pe_2 = ProcessingElement(bfs[1], entity_name="bf2")
-
-    mul_pe_1 = ProcessingElement(const_muls, entity_name="mul1")
-
-    pe_in = ProcessingElement(inputs, entity_name="input")
-    pe_out = ProcessingElement(outputs, entity_name="output")
-
-    processing_elements = [bf_pe_1, bf_pe_2, mul_pe_1, pe_in, pe_out]
-
-    mem_vars = schedule.get_memory_variables()
-    direct, mem_vars = mem_vars.split_on_length()
-
+def test_ilp_color(mem_variables_fft16):
+    direct, mem_vars, processing_elements = mem_variables_fft16
     # ILP COLOR
     mem_vars_set = mem_vars.split_on_ports(
         read_ports=1,
@@ -379,6 +316,9 @@ def test_ilp_resource_algorithms():
     assert len(arch.processing_elements) == 5
     assert len(arch.memories) == 4
 
+
+def test_ilp_color_with_colors_given(mem_variables_fft16):
+    direct, mem_vars, processing_elements = mem_variables_fft16
     # ILP COLOR (amount of colors given)
     mem_vars_set = mem_vars.split_on_ports(
         read_ports=1,
@@ -403,6 +343,9 @@ def test_ilp_resource_algorithms():
     assert len(arch.processing_elements) == 5
     assert len(arch.memories) == 4
 
+
+def test_ilp_color_input_mux(mem_variables_fft16):
+    direct, mem_vars, processing_elements = mem_variables_fft16
     # ILP COLOR MIN INPUT MUX
     mem_vars_set = mem_vars.split_on_ports(
         read_ports=1,
@@ -427,6 +370,9 @@ def test_ilp_resource_algorithms():
     assert len(arch.processing_elements) == 5
     assert len(arch.memories) == 4
 
+
+def test_ilp_color_output_mux(mem_variables_fft16):
+    direct, mem_vars, processing_elements = mem_variables_fft16
     # ILP COLOR MIN OUTPUT MUX
     mem_vars_set = mem_vars.split_on_ports(
         read_ports=1,
@@ -451,6 +397,10 @@ def test_ilp_resource_algorithms():
     assert len(arch.processing_elements) == 5
     assert len(arch.memories) == 4
 
+
+def test_ilp_color_total_mux(mem_variables_fft16):
+    direct, mem_vars, processing_elements = mem_variables_fft16
+
     # ILP COLOR MIN TOTAL MUX
     mem_vars_set = mem_vars.split_on_ports(
         read_ports=1,
diff --git a/test/unit/test_graph_id_generator.py b/test/unit/test_graph_id_generator.py
index 5c57c10a..5660c61b 100644
--- a/test/unit/test_graph_id_generator.py
+++ b/test/unit/test_graph_id_generator.py
@@ -19,7 +19,7 @@ class TestGetNextId:
         assert graph_id_generator.next_id("") == "1"
 
     def test_normal_string_generator(self, graph_id_generator):
-        """ "Test the graph id generator for a normal string type."""
+        """Test the graph id generator for a normal string type."""
         assert graph_id_generator.next_id("add") == "add0"
         assert graph_id_generator.next_id("add") == "add1"
 
-- 
GitLab