Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
59 commits
Select commit Hold shift + click to select a range
d5bad29
SqlLiteDatabase handles file path
Christian-B Oct 18, 2022
6535fd6
store vertex labels in buffer sql
Christian-B Oct 18, 2022
36c01ab
save extra chip power monitor stuff to the database
Christian-B Oct 18, 2022
d7957ed
chip_active_report
Christian-B Oct 18, 2022
d0556bd
fix import
Christian-B Oct 19, 2022
f701a29
write_chip_active_report
Christian-B Oct 19, 2022
0178b79
flake8
Christian-B Oct 19, 2022
66bcaf8
totals and track monitors
Christian-B Oct 19, 2022
8db050d
totals and track monitors
Christian-B Oct 19, 2022
d3ce5af
write_chip_active_report standalone
Christian-B Oct 19, 2022
235d16c
flake8
Christian-B Oct 19, 2022
39fce52
Dont rat reports created in unittests
Christian-B Oct 19, 2022
dcc1d04
reset number
Christian-B Oct 24, 2022
7a72814
new buffer sqllite file after reset
Christian-B Oct 24, 2022
615e517
remove clear
Christian-B Oct 24, 2022
68ff94f
read_only when reading
Christian-B Oct 24, 2022
1d37458
buffer sqlite path as a param
Christian-B Oct 24, 2022
d44e988
file name to match class name
Christian-B Oct 24, 2022
d59aa88
Add BaseDatabase
Christian-B Oct 24, 2022
af28dd6
dont use read only as database may not have been created if no extrac…
Christian-B Oct 25, 2022
f6131ac
single underscore for inherited methods
Christian-B Oct 25, 2022
0398988
with BufferDatabase()
Christian-B Oct 25, 2022
a0f3043
with ProvenanceReader() as db
Christian-B Oct 25, 2022
96ff3e7
flake8
Christian-B Oct 25, 2022
58af9ed
global provenance
Christian-B Oct 25, 2022
13a2ba3
NoProvenanceDatabaseException
Christian-B Oct 25, 2022
a07f1bb
flake8
Christian-B Oct 25, 2022
c8c7582
flake8
Christian-B Oct 25, 2022
2df110d
dot use row factory for consitency
Christian-B Oct 25, 2022
4b8674d
write global provenance outside of the run directories
Christian-B Oct 25, 2022
2f5ed91
split DDL files
Christian-B Oct 25, 2022
9d93884
removed other as never used.
Christian-B Oct 25, 2022
7a5b89a
flake8
Christian-B Oct 25, 2022
7569845
merge
Christian-B Oct 26, 2022
8111675
Merge branch 'global_provenance' into merge_sqlite
Christian-B Oct 26, 2022
4e79504
skip_reason in view
Christian-B Oct 26, 2022
eb4e769
Merge branch 'global_provenance' into merge_sqlite
Christian-B Oct 26, 2022
db5e881
combine buffer.sqlite and provenance.sqlite3 into data.sqlite3
Christian-B Oct 26, 2022
a435b43
flake8
Christian-B Oct 26, 2022
cfda518
use data.sqlite3 not buffer.sqlite3
Christian-B Oct 26, 2022
ce2a652
merge core_mapping table into cores
Christian-B Oct 26, 2022
17b005b
normalise core_provenance table
Christian-B Oct 26, 2022
018d5eb
flake8
Christian-B Oct 26, 2022
f289464
Merge branch 'merge_sqlite' into power_direct2
Christian-B Oct 27, 2022
28430cf
fixes after merge
Christian-B Oct 27, 2022
ecc956d
get_reset_str
Christian-B Oct 27, 2022
9d95e42
Merge branch 'merge_sqlite' into power_direct2
Christian-B Oct 27, 2022
df44eea
new report each reset
Christian-B Oct 27, 2022
d2b334d
flake8
Christian-B Oct 27, 2022
4c21120
Merge branch 'merge_sqlite' into power_direct2
Christian-B Oct 27, 2022
5ba870a
replace buffer.sqlite3 with data.sqlite3 in tests
Christian-B Oct 27, 2022
85a28e0
flake8
Christian-B Oct 27, 2022
0a1f4d6
pass in Class type to avoid ugly import
Christian-B Oct 28, 2022
3cb34f3
doc spacing
Christian-B Oct 28, 2022
0b27d38
doc spacing
Christian-B Oct 28, 2022
ffd1866
merged in master
Christian-B Nov 4, 2022
096d7a4
merged in master
Christian-B Nov 11, 2022
d7a69ac
merged in master
Christian-B Nov 28, 2022
e24a886
merge
Christian-B Nov 29, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .ratexcludes
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,4 @@
**/PACMAN/**
**/DataSpecification/**
**/spalloc/**
**/unittests/**/*.rpt
46 changes: 46 additions & 0 deletions spinn_front_end_common/data/fec_data_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ class _FecDataModel(object):
"_notification_protocol",
"_max_run_time_steps",
"_monitor_map",
"_reset_number",
"_run_number",
"_run_step",
"_simulation_time_step_ms",
Expand Down Expand Up @@ -111,6 +112,7 @@ def _clear(self):
self._n_boards_required = None
self._n_chips_required = None
self._none_labelled_edge_count = 0
self._reset_number = 0
self._run_number = None
self._simulation_time_step_ms = None
self._simulation_time_step_per_ms = None
Expand Down Expand Up @@ -414,6 +416,50 @@ def has_time_scale_factor(cls):
"""
return cls.__fec_data._time_scale_factor is not None

# reset number

@classmethod
def get_reset_number(cls):
"""
Get the number of times a reset has happened.

Only counts the first reset after each run.

So resets that are first soft then hard are ignored.
Double reset calls without a run and resets before run are ignored.

Reset numbers start at zero

:return:
:raises ~spinn_utilities.exceptions.SpiNNUtilsException:
If the run_number is currently unavailable
"""
if cls.__fec_data._reset_number is None:
raise cls._exception("run_number")
return cls.__fec_data._reset_number

@classmethod
def get_reset_str(cls):
"""
Get the number of times a reset has happene as a string. Zero as ""

Only counts the first reset after each run.

So resets that are first soft then hard are ignored.
Double reset calls without a run and resets before run are ignored.

Reset numbers start at zero

:raises ~spinn_utilities.exceptions.SpiNNUtilsException:
If the run_number is currently unavailable
"""
if cls.__fec_data._reset_number is None:
raise cls._exception("run_number")
if cls.__fec_data._reset_number:
return str(cls.__fec_data._reset_number)
else:
return ""

# run number

@classmethod
Expand Down
4 changes: 4 additions & 0 deletions spinn_front_end_common/data/fec_data_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,13 +85,17 @@ def finish_run(self):

@overrides(PacmanDataWriter._hard_reset)
def _hard_reset(self):
if self.is_ran_last():
self.__fec_data._reset_number += 1
PacmanDataWriter._hard_reset(self)
SpiNNManDataWriter._local_hard_reset(self)
self.__fec_data._hard_reset()
self.__create_run_dir_path()

@overrides(PacmanDataWriter._soft_reset)
def _soft_reset(self):
if self.is_ran_last():
self.__fec_data._reset_number += 1
PacmanDataWriter._soft_reset(self)
SpiNNManDataWriter._local_soft_reset(self)
self.__fec_data._soft_reset()
Expand Down
57 changes: 48 additions & 9 deletions spinn_front_end_common/interface/abstract_spinnaker_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,8 @@
AbstractVertexWithEdgeToDependentVertices,
AbstractCanReset)
from spinn_front_end_common.interface.buffer_management import BufferManager
from spinn_front_end_common.interface.buffer_management.storage_objects \
import BufferDatabase
from spinn_front_end_common.interface.config_handler import ConfigHandler
from spinn_front_end_common.interface.interface_functions import (
application_finisher, application_runner,
Expand Down Expand Up @@ -98,7 +100,7 @@
host_no_bitfield_router_compression import (
ordered_covering_compression, pair_compression)
from spinn_front_end_common.interface.provenance import (
FecTimer, ProvenanceWriter, TimerCategory, TimerWork)
FecTimer, GlobalProvenance, TimerCategory, TimerWork)
from spinn_front_end_common.interface.splitter_selectors import (
splitter_selector)
from spinn_front_end_common.interface.java_caller import JavaCaller
Expand All @@ -109,12 +111,12 @@
memory_map_on_host_chip_report, network_specification,
router_collision_potential_report,
routing_table_from_machine_report, tags_from_machine_report,
write_json_machine, write_json_placements,
write_chip_active_report, write_json_machine, write_json_placements,
write_json_routing_tables, drift_report)
from spinn_front_end_common.utilities.iobuf_extractor import IOBufExtractor
from spinn_front_end_common.utilities.utility_objs import ExecutableType
from spinn_front_end_common.utility_models import (
DataSpeedUpPacketGatherMachineVertex)
DataSpeedUpPacketGatherMachineVertex, ChipPowerMonitorMachineVertex)
from spinn_front_end_common.utilities.report_functions.reports import (
generate_comparison_router_report, partitioner_report,
placer_reports_with_application_graph,
Expand Down Expand Up @@ -439,6 +441,9 @@ def __run(self, run_time, sync_time):

self._do_mapping(total_run_time)

if not self._data_writer.is_ran_last():
self._execute_record_core_names()

# Check if anything has per-timestep SDRAM usage
is_per_timestep_sdram = self._is_per_timestep_sdram()

Expand Down Expand Up @@ -721,7 +726,7 @@ def _get_machine(self):
def _create_version_provenance(self):
""" Add the version information to the provenance data at the start.
"""
with ProvenanceWriter() as db:
with GlobalProvenance() as db:
db.insert_version("spinn_utilities_version", spinn_utils_version)
db.insert_version("spinn_machine_version", spinn_machine_version)
db.insert_version("spalloc_version", spalloc_version)
Expand Down Expand Up @@ -888,6 +893,12 @@ def _do_placer(self, system_placements):
raise ConfigurationException(
f"Unexpected cfg setting placer: {name}")

def _execute_record_core_names(self):
with FecTimer(
"Record core names to databse", TimerWork.REPORT):
with BufferDatabase() as db:
db.store_vertex_labels()

def _execute_system_multicast_routing_generator(self):
"""
Runs, times and logs the SystemMulticastRoutingGenerator is required
Expand Down Expand Up @@ -2271,23 +2282,44 @@ def _print_iobuf(errors, warnings):
for error in errors:
logger.error(error)

def _execute_prepare_chip_power(self):
with FecTimer("Prepare Chip Power", TimerWork.REPORT) as timer:
if timer.skip_if_cfg_false("Reports", "write_energy_report"):
return
if timer.skip_if_virtual_board():
return
db = BufferDatabase()
db.store_chip_power_monitors(ChipPowerMonitorMachineVertex)
db.close()

def _report_chip_active(self):
with FecTimer("Prepare Chip Power", TimerWork.REPORT) as timer:
if timer.skip_if_cfg_false("Reports", "write_energy_report"):
return
if timer.skip_if_virtual_board():
return
write_chip_active_report()

def _do_end_of_run(self):
if not self._data_writer.is_ran_last():
return
self._execute_prepare_chip_power()
self._report_chip_active()

def reset(self):
""" Code that puts the simulation back at time zero
"""
FecTimer.start_category(TimerCategory.RESETTING)
if not self._data_writer.is_ran_last():
if not self._data_writer.is_ran_ever():
logger.error("Ignoring the reset before the run")
else:
logger.error("Ignoring the repeated reset call")
return

FecTimer.start_category(TimerCategory.RESETTING)
logger.info("Resetting")

# rewind the buffers from the buffer manager, to start at the beginning
# of the simulation again and clear buffered out
if self._data_writer.has_buffer_manager():
self._data_writer.get_buffer_manager().reset()
self._do_end_of_run()

if self._data_writer.get_user_accessed_machine():
logger.warning(
Expand All @@ -2297,6 +2329,11 @@ def reset(self):
else:
self._data_writer.soft_reset()

# rewind the buffers from the buffer manager, to start at the beginning
# of the simulation again and clear buffered out
if self._data_writer.has_buffer_manager():
self._data_writer.get_buffer_manager().reset()

# Reset the graph off the machine, to set things to time 0
self.__reset_graph_elements()
FecTimer.end_category(TimerCategory.RESETTING)
Expand Down Expand Up @@ -2368,6 +2405,8 @@ def stop(self):
set_config("Reports", "read_provenance_data", "True")
self._do_read_provenance()

self._do_end_of_run()

except Exception as e:
self._recover_from_error(e)
self.write_errored_file()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,6 @@ class BufferManager(object):
# Dictionary of sender vertex -> buffers sent
"_sent_messages",

# storage area for received data from cores
"_db",

# Lock to avoid multiple messages being processed at the same time
"_thread_lock_buffer_out",

Expand Down Expand Up @@ -121,9 +118,6 @@ def __init__(self):
# Dictionary of sender vertex -> buffers sent
self._sent_messages = dict()

# storage area for received data from cores
self._db = BufferDatabase()

# Lock to avoid multiple messages being processed at the same time
self._thread_lock_buffer_out = threading.RLock()
self._thread_lock_buffer_in = threading.RLock()
Expand Down Expand Up @@ -303,9 +297,6 @@ def reset(self):
beginning of its expected regions and clears the buffered out\
data files.
"""
#
self._db.reset()

# rewind buffered in
for vertex in self._sender_vertices:
for region in vertex.get_regions():
Expand All @@ -328,7 +319,8 @@ def clear_recorded_data(self, x, y, p, recording_region_id):
:param int p: placement p coordinate
:param int recording_region_id: the recording region ID
"""
self._db.clear_region(x, y, p, recording_region_id)
with BufferDatabase() as db:
db.clear_region(x, y, p, recording_region_id)

def _create_message_to_send(self, size, vertex, region):
""" Creates a single message to send with the given boundaries.
Expand Down Expand Up @@ -575,14 +567,15 @@ def __python_get_data_for_placements(self, recording_placements):
"""
:param ~pacman.model.placements.Placements recording_placements:
Where to get the data from.
"""
"""
# get data
progress = ProgressBar(
len(recording_placements),
"Extracting buffers from the last run")

for placement in progress.over(recording_placements):
self._retreive_by_placement(placement)
with BufferDatabase() as db:
for placement in progress.over(recording_placements):
self._retreive_by_placement(db, placement)

def get_data_by_placement(self, placement, recording_region_id):
""" Get the data container for all the data retrieved\
Expand All @@ -602,12 +595,14 @@ def get_data_by_placement(self, placement, recording_region_id):
"so no data read".format(placement.vertex))
with self._thread_lock_buffer_out:
# data flush has been completed - return appropriate data
return self._db.get_region_data(
placement.x, placement.y, placement.p, recording_region_id)
with BufferDatabase() as db:
return db.get_region_data(
placement.x, placement.y, placement.p, recording_region_id)

def _retreive_by_placement(self, placement):
def _retreive_by_placement(self, db, placement):
""" Retrieve the data for a vertex; must be locked first.

:param db BufferDatabase: dtabase to store into
:param ~pacman.model.placements.Placement placement:
the placement to get the data from
:param int recording_region_id: desired recording data region
Expand All @@ -623,7 +618,7 @@ def _retreive_by_placement(self, placement):
size, addr, missing = sizes_and_addresses[region]
data = self._request_data(
placement.x, placement.y, addr, size)
self._db.store_data_in_region_buffer(
db.store_data_in_region_buffer(
placement.x, placement.y, placement.p, region, missing, data)

def _get_region_information(self, addr, x, y, p):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,6 @@

from .buffered_sending_region import BufferedSendingRegion
from .buffers_sent_deque import BuffersSentDeque
from .sqllite_database import BufferDatabase
from .buffer_database import BufferDatabase

__all__ = ["BufferedSendingRegion", "BuffersSentDeque", "BufferDatabase"]
Loading