Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
B-ASIC - Better ASIC Toolbox
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Iterations
Requirements
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Package registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Computer Engineering
B-ASIC - Better ASIC Toolbox
Commits
047cf9d8
Commit
047cf9d8
authored
2 years ago
by
Oscar Gustafsson
Browse files
Options
Downloads
Patches
Plain Diff
Add basic functionality for saving schedules
parent
0be589c8
No related branches found
Branches containing commit
No related tags found
Tags containing commit
1 merge request
!223
Add basic functionality for saving schedules
Pipeline
#90257
passed
2 years ago
Stage: test
Stage: deploy
Changes
3
Pipelines
2
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
b_asic/save_load_structure.py
+55
-39
55 additions, 39 deletions
b_asic/save_load_structure.py
b_asic/schedule.py
+74
-43
74 additions, 43 deletions
b_asic/schedule.py
b_asic/scheduler_gui/scheduler_item.py
+1
-1
1 addition, 1 deletion
b_asic/scheduler_gui/scheduler_item.py
with
130 additions
and
83 deletions
b_asic/save_load_structure.py
+
55
−
39
View file @
047cf9d8
...
...
@@ -11,11 +11,12 @@ from typing import Dict, Optional, Tuple, cast
from
b_asic.graph_component
import
GraphComponent
from
b_asic.port
import
InputPort
from
b_asic.schedule
import
Schedule
from
b_asic.signal_flow_graph
import
SFG
def
sfg_to_python
(
sfg
:
SFG
,
counter
:
int
=
0
,
suffix
:
Optional
[
str
]
=
None
sfg
:
SFG
,
counter
:
int
=
0
,
suffix
:
Optional
[
str
]
=
None
,
schedule
=
False
)
->
str
:
"""
Given an SFG structure try to serialize it for saving to a file.
...
...
@@ -23,15 +24,20 @@ def sfg_to_python(
Parameters
==========
sfg : SFG
The SFG to serialize
The SFG to serialize
.
counter : int, default: 0
Number used for naming the SFG. Enables SFGs in SFGs.
suffix : str, optional
String to append at the end of the result.
schedule : bool, default: False
True if printing a schedule.
"""
_type
=
"
Schedule
"
if
schedule
else
"
SFG
"
result
=
(
'
\n
"""
\n
B-ASIC automatically generated SFG file.
\n
'
'
\n
"""
\n
'
+
f
"
B-ASIC automatically generated
{
_type
}
file.
\n
"
+
"
Name:
"
+
f
"
{
sfg
.
name
}
"
+
"
\n
"
...
...
@@ -44,6 +50,8 @@ def sfg_to_python(
result
+=
"
\n
from b_asic import SFG, Signal, Input, Output
"
for
op_type
in
{
type
(
op
)
for
op
in
sfg
.
operations
}:
result
+=
f
"
,
{
op_type
.
__name__
}
"
if
schedule
:
result
+=
"
, Schedule
"
def
kwarg_unpacker
(
comp
:
GraphComponent
,
params
=
None
)
->
str
:
if
params
is
None
:
...
...
@@ -61,56 +69,51 @@ def sfg_to_python(
params
=
{
k
:
v
for
k
,
v
in
params
.
items
()
if
v
}
if
params
.
get
(
"
latency_offsets
"
,
None
)
is
not
None
:
params
[
"
latency_offsets
"
]
=
{
k
:
v
for
k
,
v
in
params
[
"
latency_offsets
"
].
items
()
if
v
is
not
None
k
:
v
for
k
,
v
in
params
[
"
latency_offsets
"
].
items
()
if
v
is
not
None
}
if
not
params
[
"
latency_offsets
"
]:
del
params
[
"
latency_offsets
"
]
return
"
,
"
.
join
(
[
f
"
{
param
}
=
{
value
}
"
for
param
,
value
in
params
.
items
()]
)
return
"
,
"
.
join
([
f
"
{
param
}
=
{
value
}
"
for
param
,
value
in
params
.
items
()])
# No need to redefined I/Os
io_ops
=
[
*
sfg
.
_
input_operations
,
*
sfg
.
_
output_operations
]
io_ops
=
[
*
sfg
.
input_operations
,
*
sfg
.
output_operations
]
result
+=
"
\n
# Inputs:
\n
"
for
input_op
in
sfg
.
_
input_operations
:
for
input_op
in
sfg
.
input_operations
:
result
+=
f
"
{
input_op
.
graph_id
}
= Input(
{
kwarg_unpacker
(
input_op
)
}
)
\n
"
result
+=
"
\n
# Outputs:
\n
"
for
output_op
in
sfg
.
_output_operations
:
result
+=
(
f
"
{
output_op
.
graph_id
}
= Output(
{
kwarg_unpacker
(
output_op
)
}
)
\n
"
)
for
output_op
in
sfg
.
output_operations
:
result
+=
f
"
{
output_op
.
graph_id
}
= Output(
{
kwarg_unpacker
(
output_op
)
}
)
\n
"
result
+=
"
\n
# Operations:
\n
"
for
op
in
sfg
.
split
():
if
op
in
io_ops
:
for
op
eration
in
sfg
.
split
():
if
op
eration
in
io_ops
:
continue
if
isinstance
(
op
,
SFG
):
if
isinstance
(
op
eration
,
SFG
):
counter
+=
1
result
=
sfg_to_python
(
op
,
counter
)
+
result
result
=
sfg_to_python
(
op
eration
,
counter
)
+
result
continue
result
+=
(
f
"
{
op
.
graph_id
}
=
{
op
.
__class__
.
__name__
}
(
{
kwarg_unpacker
(
op
)
}
)
\n
"
f
"
{
operation
.
graph_id
}
=
"
f
"
{
operation
.
__class__
.
__name__
}
(
{
kwarg_unpacker
(
operation
)
}
)
\n
"
)
result
+=
"
\n
# Signals:
\n
"
# Keep track of already existing connections to avoid adding duplicates
connections
=
[]
for
op
in
sfg
.
split
():
for
out
in
op
.
outputs
:
for
op
eration
in
sfg
.
split
():
for
out
in
op
eration
.
outputs
:
for
signal
in
out
.
signals
:
destination
=
cast
(
InputPort
,
signal
.
destination
)
dest_op
=
destination
.
operation
connection
=
(
f
"
\n
Signal(source=
{
op
.
graph_id
}
.
"
f
"
output(
{
op
.
outputs
.
index
(
signal
.
source
)
}
),
"
f
"
Signal(source=
{
op
eration
.
graph_id
}
.
"
f
"
output(
{
op
eration
.
outputs
.
index
(
signal
.
source
)
}
),
"
f
"
destination=
{
dest_op
.
graph_id
}
.
"
f
"
input(
{
dest_op
.
inputs
.
index
(
destination
)
}
))
"
f
"
input(
{
dest_op
.
inputs
.
index
(
destination
)
}
))
\n
"
)
if
connection
in
connections
:
continue
...
...
@@ -119,20 +122,14 @@ def sfg_to_python(
connections
.
append
(
connection
)
inputs
=
"
[
"
+
"
,
"
.
join
(
op
.
graph_id
for
op
in
sfg
.
input_operations
)
+
"
]
"
outputs
=
(
"
[
"
+
"
,
"
.
join
(
op
.
graph_id
for
op
in
sfg
.
output_operations
)
+
"
]
"
)
sfg_name
=
(
sfg
.
name
if
sfg
.
name
else
f
"
sfg
{
counter
}
"
if
counter
>
0
else
"
sfg
"
)
sfg_name_var
=
sfg_name
.
replace
(
"
"
,
"
_
"
)
result
+=
(
f
"
\n
{
sfg_name_var
}
= SFG(inputs=
{
inputs
}
, outputs=
{
outputs
}
,
"
f
"
name=
'
{
sfg_name
}
'
)
\n
"
)
outputs
=
"
[
"
+
"
,
"
.
join
(
op
.
graph_id
for
op
in
sfg
.
output_operations
)
+
"
]
"
sfg_name
=
sfg
.
name
if
sfg
.
name
else
f
"
sfg
{
counter
}
"
if
counter
>
0
else
"
sfg
"
sfg_name_var
=
sfg_name
.
replace
(
"
"
,
"
_
"
).
replace
(
"
-
"
,
"
_
"
)
result
+=
"
\n
# Signal flow graph:
\n
"
result
+=
(
"
\n
# SFG Properties:
\n
"
+
"
prop = {
'
name
'
:
"
+
f
"
{
sfg_name_var
}
"
+
"
}
"
f
"
{
sfg_name_var
}
= SFG(inputs=
{
inputs
}
, outputs=
{
outputs
}
, name=
'
{
sfg_name
}
'
)
\n
"
)
result
+=
"
\n
# SFG Properties:
\n
"
+
"
prop = {
'
name
'
:
"
+
f
"
{
sfg_name_var
}
"
+
"
}
\n
"
if
suffix
is
not
None
:
result
+=
"
\n
"
+
suffix
+
"
\n
"
...
...
@@ -149,8 +146,8 @@ def python_to_sfg(path: str) -> Tuple[SFG, Dict[str, Tuple[int, int]]]:
path : str
Path to file to read and deserialize.
"""
with
open
(
path
)
as
f
:
code
=
compile
(
f
.
read
(),
path
,
"
exec
"
)
with
open
(
path
)
as
f
ile
:
code
=
compile
(
f
ile
.
read
(),
path
,
"
exec
"
)
exec
(
code
,
globals
(),
locals
())
return
(
...
...
@@ -159,3 +156,22 @@ def python_to_sfg(path: str) -> Tuple[SFG, Dict[str, Tuple[int, int]]]:
else
[
v
for
k
,
v
in
locals
().
items
()
if
isinstance
(
v
,
SFG
)][
0
],
locals
()[
"
positions
"
]
if
"
positions
"
in
locals
()
else
{},
)
def
schedule_to_python
(
schedule
:
Schedule
):
"""
Given a schedule structure try to serialize it for saving to a file.
Parameters
==========
schedule : Schedule
The schedule to serialize.
"""
sfg_name
=
schedule
.
sfg
.
name
.
replace
(
"
"
,
"
_
"
).
replace
(
"
-
"
,
"
_
"
)
result
=
"
\n
# Schedule:
\n
"
result
+=
(
f
"
{
sfg_name
}
_schedule = Schedule(
{
sfg_name
}
,
{
schedule
.
schedule_time
}
,
"
f
"
{
schedule
.
cyclic
}
,
'
provided
'
,
{
schedule
.
start_times
}
,
"
f
"
{
dict
(
schedule
.
laps
)
}
)
\n
"
)
return
sfg_to_python
(
schedule
.
sfg
,
schedule
=
True
)
+
result
This diff is collapsed.
Click to expand it.
b_asic/schedule.py
+
74
−
43
View file @
047cf9d8
...
...
@@ -55,8 +55,15 @@ class Schedule:
algorithm.
cyclic : bool, default: False
If the schedule is cyclic.
scheduling_algorithm : {
'
ASAP
'
}, optional
scheduling_algorithm : {
'
ASAP
'
,
'
provided
'
}, optional
The scheduling algorithm to use. Currently, only
"
ASAP
"
is supported.
If
'
provided
'
, use provided *start_times* and *laps* dictionaries.
start_times : dict, optional
Dictionary with GraphIDs as keys and start times as values.
Used when *scheduling_algorithm* is
'
provided
'
.
laps : dict, optional
Dictionary with GraphIDs as keys and laps as values.
Used when *scheduling_algorithm* is
'
provided
'
.
"""
_sfg
:
SFG
...
...
@@ -72,8 +79,11 @@ class Schedule:
schedule_time
:
Optional
[
int
]
=
None
,
cyclic
:
bool
=
False
,
scheduling_algorithm
:
str
=
"
ASAP
"
,
start_times
:
Dict
[
GraphID
,
int
]
=
None
,
laps
:
Dict
[
GraphID
,
int
]
=
None
,
):
"""
Construct a Schedule from an SFG.
"""
self
.
_original_sfg
=
sfg
()
# Make a copy
self
.
_sfg
=
sfg
self
.
_start_times
=
{}
self
.
_laps
=
defaultdict
(
lambda
:
0
)
...
...
@@ -81,6 +91,10 @@ class Schedule:
self
.
_y_locations
=
defaultdict
(
lambda
:
None
)
if
scheduling_algorithm
==
"
ASAP
"
:
self
.
_schedule_asap
()
elif
scheduling_algorithm
==
"
provided
"
:
self
.
_start_times
=
start_times
self
.
_laps
.
update
(
laps
)
self
.
_remove_delays_no_laps
()
else
:
raise
NotImplementedError
(
f
"
No algorithm with name:
{
scheduling_algorithm
}
defined.
"
...
...
@@ -107,8 +121,8 @@ class Schedule:
"""
Return the current maximum end time among all operations.
"""
max_end_time
=
0
for
graph_id
,
op_start_time
in
self
.
_start_times
.
items
():
op
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
for
outport
in
op
.
outputs
:
op
eration
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
for
outport
in
op
eration
.
outputs
:
max_end_time
=
max
(
max_end_time
,
op_start_time
+
cast
(
int
,
outport
.
latency_offset
),
...
...
@@ -149,8 +163,8 @@ class Schedule:
)
->
Dict
[
"
OutputPort
"
,
Dict
[
"
Signal
"
,
int
]]:
ret
=
{}
start_time
=
self
.
_start_times
[
graph_id
]
op
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
for
output_port
in
op
.
outputs
:
op
eration
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
for
output_port
in
op
eration
.
outputs
:
output_slacks
=
{}
available_time
=
start_time
+
cast
(
int
,
output_port
.
latency_offset
)
...
...
@@ -200,8 +214,8 @@ class Schedule:
def
_backward_slacks
(
self
,
graph_id
:
GraphID
)
->
Dict
[
InputPort
,
Dict
[
Signal
,
int
]]:
ret
=
{}
start_time
=
self
.
_start_times
[
graph_id
]
op
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
for
input_port
in
op
.
inputs
:
op
eration
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
for
input_port
in
op
eration
.
inputs
:
input_slacks
=
{}
usage_time
=
start_time
+
cast
(
int
,
input_port
.
latency_offset
)
...
...
@@ -270,14 +284,19 @@ class Schedule:
@property
def
sfg
(
self
)
->
SFG
:
return
self
.
_sfg
"""
The SFG of the current schedule.
"""
return
self
.
_original_sfg
@property
def
start_times
(
self
)
->
Dict
[
GraphID
,
int
]:
"""
The start times of the operations in the current schedule.
"""
return
self
.
_start_times
@property
def
laps
(
self
)
->
Dict
[
GraphID
,
int
]:
"""
The number of laps for the start times of the operations in the current schedule.
"""
return
self
.
_laps
@property
...
...
@@ -317,8 +336,11 @@ class Schedule:
ret
=
[
self
.
_schedule_time
,
*
self
.
_start_times
.
values
()]
# Loop over operations
for
graph_id
in
self
.
_start_times
:
op
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
ret
+=
[
cast
(
int
,
op
.
execution_time
),
*
op
.
latency_offsets
.
values
()]
operation
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
ret
+=
[
cast
(
int
,
operation
.
execution_time
),
*
operation
.
latency_offsets
.
values
(),
]
# Remove not set values (None)
ret
=
[
v
for
v
in
ret
if
v
is
not
None
]
return
ret
...
...
@@ -535,7 +557,16 @@ class Schedule:
self
.
_start_times
[
graph_id
]
=
new_start
return
self
def
_remove_delays_no_laps
(
self
)
->
None
:
"""
Remove delay elements without updating laps. Used when loading schedule.
"""
delay_list
=
self
.
_sfg
.
find_by_type_name
(
Delay
.
type_name
())
while
delay_list
:
delay_op
=
cast
(
Delay
,
delay_list
[
0
])
self
.
_sfg
=
cast
(
SFG
,
self
.
_sfg
.
remove_operation
(
delay_op
.
graph_id
))
delay_list
=
self
.
_sfg
.
find_by_type_name
(
Delay
.
type_name
())
def
_remove_delays
(
self
)
->
None
:
"""
Remove delay elements and update laps. Used after scheduling algorithm.
"""
delay_list
=
self
.
_sfg
.
find_by_type_name
(
Delay
.
type_name
())
while
delay_list
:
delay_op
=
cast
(
Delay
,
delay_list
[
0
])
...
...
@@ -549,35 +580,35 @@ class Schedule:
def
_schedule_asap
(
self
)
->
None
:
"""
Schedule the operations using as-soon-as-possible scheduling.
"""
p
l
=
self
.
_sfg
.
get_precedence_list
()
p
recedence_list
=
self
.
_sfg
.
get_precedence_list
()
if
len
(
p
l
)
<
2
:
if
len
(
p
recedence_list
)
<
2
:
print
(
"
Empty signal flow graph cannot be scheduled.
"
)
return
non_schedulable_ops
=
set
()
for
outport
in
p
l
[
0
]:
op
=
outport
.
operation
if
op
.
type_name
()
not
in
[
Delay
.
type_name
()]:
if
op
.
graph_id
not
in
self
.
_start_times
:
for
outport
in
p
recedence_list
[
0
]:
op
eration
=
outport
.
operation
if
op
eration
.
type_name
()
not
in
[
Delay
.
type_name
()]:
if
op
eration
.
graph_id
not
in
self
.
_start_times
:
# Set start time of all operations in the first iter to 0
self
.
_start_times
[
op
.
graph_id
]
=
0
self
.
_start_times
[
op
eration
.
graph_id
]
=
0
else
:
non_schedulable_ops
.
add
(
op
.
graph_id
)
non_schedulable_ops
.
add
(
op
eration
.
graph_id
)
for
outport
in
p
l
[
1
]:
op
=
outport
.
operation
if
op
.
graph_id
not
in
self
.
_start_times
:
for
outport
in
p
recedence_list
[
1
]:
op
eration
=
outport
.
operation
if
op
eration
.
graph_id
not
in
self
.
_start_times
:
# Set start time of all operations in the first iter to 0
self
.
_start_times
[
op
.
graph_id
]
=
0
self
.
_start_times
[
op
eration
.
graph_id
]
=
0
for
outports
in
p
l
[
2
:]:
for
outports
in
p
recedence_list
[
2
:]:
for
outport
in
outports
:
op
=
outport
.
operation
if
op
.
graph_id
not
in
self
.
_start_times
:
op
eration
=
outport
.
operation
if
op
eration
.
graph_id
not
in
self
.
_start_times
:
# Schedule the operation if it does not have a start time yet.
op_start_time
=
0
for
inport
in
op
.
inputs
:
for
inport
in
op
eration
.
inputs
:
if
len
(
inport
.
signals
)
!=
1
:
raise
ValueError
(
"
Error in scheduling, dangling input port detected.
"
...
...
@@ -617,7 +648,7 @@ class Schedule:
op_start_time_from_in
=
source_end_time
-
inport
.
latency_offset
op_start_time
=
max
(
op_start_time
,
op_start_time_from_in
)
self
.
_start_times
[
op
.
graph_id
]
=
op_start_time
self
.
_start_times
[
op
eration
.
graph_id
]
=
op_start_time
for
output
in
self
.
_sfg
.
find_by_type_name
(
Output
.
type_name
()):
output
=
cast
(
Output
,
output
)
source_port
=
cast
(
OutputPort
,
output
.
inputs
[
0
].
signals
[
0
].
source
)
...
...
@@ -722,7 +753,7 @@ class Schedule:
line_cache
.
append
(
start
)
elif
end
[
0
]
==
start
[
0
]:
p
=
Path
(
p
ath
=
Path
(
[
start
,
[
start
[
0
]
+
SPLINE_OFFSET
,
start
[
1
]],
...
...
@@ -742,16 +773,16 @@ class Schedule:
Path
.
CURVE4
,
],
)
p
p
=
PathPatch
(
p
,
p
ath_patch
=
PathPatch
(
p
ath
,
fc
=
'
none
'
,
ec
=
_SIGNAL_COLOR
,
lw
=
SIGNAL_LINEWIDTH
,
zorder
=
10
,
)
ax
.
add_patch
(
p
p
)
ax
.
add_patch
(
p
ath_patch
)
else
:
p
=
Path
(
p
ath
=
Path
(
[
start
,
[(
start
[
0
]
+
end
[
0
])
/
2
,
start
[
1
]],
...
...
@@ -760,14 +791,14 @@ class Schedule:
],
[
Path
.
MOVETO
,
Path
.
CURVE4
,
Path
.
CURVE4
,
Path
.
CURVE4
],
)
p
p
=
PathPatch
(
p
,
p
ath_patch
=
PathPatch
(
p
ath
,
fc
=
'
none
'
,
ec
=
_SIGNAL_COLOR
,
lw
=
SIGNAL_LINEWIDTH
,
zorder
=
10
,
)
ax
.
add_patch
(
p
p
)
ax
.
add_patch
(
p
ath_patch
)
def
_draw_offset_arrow
(
start
,
end
,
start_offset
,
end_offset
,
name
=
""
,
laps
=
0
):
"""
Draw an arrow from *start* to *end*, but with an offset.
"""
...
...
@@ -784,12 +815,12 @@ class Schedule:
ax
.
grid
()
for
graph_id
,
op_start_time
in
self
.
_start_times
.
items
():
y_pos
=
self
.
_get_y_position
(
graph_id
,
operation_gap
=
operation_gap
)
op
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
op
eration
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
# Rewrite to make better use of NumPy
(
latency_coordinates
,
execution_time_coordinates
,
)
=
op
.
get_plot_coordinates
()
)
=
op
eration
.
get_plot_coordinates
()
_x
,
_y
=
zip
(
*
latency_coordinates
)
x
=
np
.
array
(
_x
)
y
=
np
.
array
(
_y
)
...
...
@@ -809,11 +840,11 @@ class Schedule:
yticklabels
.
append
(
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
)).
name
)
for
graph_id
,
op_start_time
in
self
.
_start_times
.
items
():
op
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
out_coordinates
=
op
.
get_output_coordinates
()
op
eration
=
cast
(
Operation
,
self
.
_sfg
.
find_by_id
(
graph_id
))
out_coordinates
=
op
eration
.
get_output_coordinates
()
source_y_pos
=
self
.
_get_y_position
(
graph_id
,
operation_gap
=
operation_gap
)
for
output_port
in
op
.
outputs
:
for
output_port
in
op
eration
.
outputs
:
for
output_signal
in
output_port
.
signals
:
destination
=
cast
(
InputPort
,
output_signal
.
destination
)
destination_op
=
destination
.
operation
...
...
@@ -911,7 +942,7 @@ class Schedule:
"""
fig
,
ax
=
plt
.
subplots
()
self
.
_plot_schedule
(
ax
)
f
=
io
.
StringIO
()
fig
.
savefig
(
f
,
format
=
"
svg
"
)
buffer
=
io
.
StringIO
()
fig
.
savefig
(
buffer
,
format
=
"
svg
"
)
return
f
.
getvalue
()
return
buffer
.
getvalue
()
This diff is collapsed.
Click to expand it.
b_asic/scheduler_gui/scheduler_item.py
+
1
−
1
View file @
047cf9d8
...
...
@@ -257,7 +257,7 @@ class SchedulerItem(SchedulerEvent, QGraphicsItemGroup): # PySide2 / PyQt5
"""
Make a new graph out of the stored attributes.
"""
# build components
for
graph_id
in
self
.
schedule
.
start_times
.
keys
():
operation
=
cast
(
Operation
,
self
.
schedule
.
sfg
.
find_by_id
(
graph_id
))
operation
=
cast
(
Operation
,
self
.
schedule
.
_
sfg
.
find_by_id
(
graph_id
))
component
=
OperationItem
(
operation
,
height
=
OPERATION_HEIGHT
,
parent
=
self
)
self
.
_operation_items
[
graph_id
]
=
component
self
.
_set_position
(
graph_id
)
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment