From d5bad2952c2c0da0b8f6f7a6369ccd5e3ef29dcf Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 18 Oct 2022 11:56:36 +0100 Subject: [PATCH 01/49] SqlLiteDatabase handles file path --- .../buffered_receiving_data.py | 19 +++++-------------- .../storage_objects/sqllite_database.py | 14 ++++++++++++-- 2 files changed, 17 insertions(+), 16 deletions(-) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffered_receiving_data.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffered_receiving_data.py index 18667499fa..a165c39e0d 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffered_receiving_data.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffered_receiving_data.py @@ -14,10 +14,6 @@ # along with this program. If not, see . import os from .sqllite_database import SqlLiteDatabase -from spinn_front_end_common.data import FecDataView - -#: Name of the database in the data folder -DB_FILE_NAME = "buffer.sqlite3" class BufferedReceivingData(object): @@ -29,9 +25,6 @@ class BufferedReceivingData(object): #: the AbstractDatabase holding the data to store "_db", - #: the path to the database - "_db_file", - #: the (size, address) of each region "__sizes_and_addresses", @@ -40,8 +33,6 @@ class BufferedReceivingData(object): ] def __init__(self): - self._db_file = os.path.join( - FecDataView.get_run_dir_path(), DB_FILE_NAME) self._db = None self.__sizes_and_addresses = None self.__data_flushed = None @@ -50,11 +41,11 @@ def __init__(self): def reset(self): """ Perform tasks to restart recording from time=0 """ - if os.path.exists(self._db_file): - if self._db: - self._db.close() - os.remove(self._db_file) - self._db = SqlLiteDatabase(self._db_file) + if self._db: + self._db.close() + if os.path.exists(self._db.default_database_file()): + os.remove(self._db.default_database_file()) + self._db = SqlLiteDatabase() self.__sizes_and_addresses = dict() self.__data_flushed = set() diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py index 2f9448e828..0d15ef6bb6 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py @@ -18,12 +18,15 @@ import time from spinn_utilities.abstract_context_manager import AbstractContextManager from spinn_utilities.overrides import overrides +from spinn_front_end_common.data import FecDataView from spinn_front_end_common.utilities.sqlite_db import SQLiteDB from .abstract_database import AbstractDatabase _DDL_FILE = os.path.join(os.path.dirname(__file__), "db.sql") _SECONDS_TO_MICRO_SECONDS_CONVERSION = 1000 +#: Name of the database in the data folder +DB_FILE_NAME = "buffer.sqlite3" def _timestamp(): return int(time.time() * _SECONDS_TO_MICRO_SECONDS_CONVERSION) @@ -43,11 +46,18 @@ def __init__(self, database_file=None): """ :param str database_file: The name of a file that contains (or will contain) an SQLite - database holding the data. If omitted, an unshared in-memory - database will be used. + database holding the data. + If omitted the default location will be used """ + if database_file is None: + database_file = self.default_database_file + super().__init__(database_file, ddl_file=_DDL_FILE) + def default_database_file(self): + return os.path.join( + FecDataView.get_run_dir_path(), DB_FILE_NAME) + @overrides(AbstractDatabase.clear) def clear(self): with self.transaction() as cursor: From 6535fd65268d07d1dc2eea38c4e34e1a19b6dfc6 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 18 Oct 2022 12:17:07 +0100 Subject: [PATCH 02/49] store vertex labels in buffer sql --- .../interface/abstract_spinnaker_base.py | 31 ++++++++++++++++++- .../buffer_management/storage_objects/db.sql | 3 +- .../storage_objects/sqllite_database.py | 12 ++++++- 3 files changed, 43 insertions(+), 3 deletions(-) diff --git a/spinn_front_end_common/interface/abstract_spinnaker_base.py b/spinn_front_end_common/interface/abstract_spinnaker_base.py index b23d8d3160..5e24d3f616 100644 --- a/spinn_front_end_common/interface/abstract_spinnaker_base.py +++ b/spinn_front_end_common/interface/abstract_spinnaker_base.py @@ -67,6 +67,8 @@ from spinn_front_end_common.abstract_models import ( AbstractVertexWithEdgeToDependentVertices, AbstractCanReset) +from spinn_front_end_common.interface.buffer_management.storage_objects \ + import (SqlLiteDatabase) from spinn_front_end_common.interface.config_handler import ConfigHandler from spinn_front_end_common.interface.interface_functions import ( application_finisher, application_runner, @@ -2270,10 +2272,32 @@ def _print_iobuf(errors, warnings): for error in errors: logger.error(error) + def _execute_prepare_chip_power(self): + with FecTimer("Prepare Chip Power", TimerWork.REPORT) as timer: + if timer.skip_if_cfg_false("Reports", "write_energy_report"): + return + if timer.skip_if_virtual_board(): + return + db = SqlLiteDatabase() + db.store_placements() + + def _report_chip_active(self): + with FecTimer("Prepare Chip Power", TimerWork.REPORT) as timer: + if timer.skip_if_cfg_false("Reports", "write_energy_report"): + return + if timer.skip_if_virtual_board(): + return + write_chip_active_report() + + def _do_end_of_run(self): + if not self._data_writer.is_ran_last(): + return + self._execute_prepare_chip_power() + #self._report_chip_active() + def reset(self): """ Code that puts the simulation back at time zero """ - FecTimer.start_category(TimerCategory.RESETTING) if not self._data_writer.is_ran_last(): if not self._data_writer.is_ran_ever(): logger.error("Ignoring the reset before the run") @@ -2281,8 +2305,11 @@ def reset(self): logger.error("Ignoring the repeated reset call") return + FecTimer.start_category(TimerCategory.RESETTING) logger.info("Resetting") + self._do_end_of_run() + # rewind the buffers from the buffer manager, to start at the beginning # of the simulation again and clear buffered out if self._data_writer.has_buffer_manager(): @@ -2367,6 +2394,8 @@ def stop(self): set_config("Reports", "read_provenance_data", "True") self._do_read_provenance() + self._do_end_of_run() + except Exception as e: self._recover_from_error(e) self.write_errored_file() diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql b/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql index b75bd93cb6..2789f5be6a 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql @@ -22,7 +22,8 @@ CREATE TABLE IF NOT EXISTS core( core_id INTEGER PRIMARY KEY AUTOINCREMENT, x INTEGER NOT NULL, y INTEGER NOT NULL, - processor INTEGER NOT NULL); + processor INTEGER NOT NULL, + label STRING); -- Every processor has a unique ID CREATE UNIQUE INDEX IF NOT EXISTS coreSanity ON core( x ASC, y ASC, processor ASC); diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py index 0d15ef6bb6..7342b56cdb 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py @@ -50,7 +50,7 @@ def __init__(self, database_file=None): If omitted the default location will be used """ if database_file is None: - database_file = self.default_database_file + database_file = self.default_database_file() super().__init__(database_file, ddl_file=_DDL_FILE) @@ -246,3 +246,13 @@ def get_region_data(self, x, y, p, region): return data, False except LookupError: return memoryview(b''), True + + def store_placements(self): + with self.transaction() as cursor: + for placement in FecDataView.iterate_placemements(): + core_id = self.__get_core_id( + cursor, placement.x, placement.y, placement.p) + cursor.execute( + "UPDATE core SET label = ? WHERE core_id = ?", + (placement.vertex.label, core_id)) + assert cursor.rowcount == 1 From 36c01ab8739467438652e6d0f531cd1e4fab7acd Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 18 Oct 2022 14:56:02 +0100 Subject: [PATCH 03/49] save extra chip power monitor stuff to the database --- .../interface/abstract_spinnaker_base.py | 9 ++- .../buffer_management/storage_objects/db.sql | 3 +- .../storage_objects/sqllite_database.py | 60 ++++++++++++++++++- .../utilities/report_functions/__init__.py | 2 + 4 files changed, 68 insertions(+), 6 deletions(-) diff --git a/spinn_front_end_common/interface/abstract_spinnaker_base.py b/spinn_front_end_common/interface/abstract_spinnaker_base.py index 5e24d3f616..f6ea838dd6 100644 --- a/spinn_front_end_common/interface/abstract_spinnaker_base.py +++ b/spinn_front_end_common/interface/abstract_spinnaker_base.py @@ -110,7 +110,7 @@ memory_map_on_host_chip_report, network_specification, router_collision_potential_report, routing_table_from_machine_report, tags_from_machine_report, - write_json_machine, write_json_placements, + write_chip_active_report, write_json_machine, write_json_placements, write_json_routing_tables, drift_report) from spinn_front_end_common.utilities.iobuf_extractor import IOBufExtractor from spinn_front_end_common.utilities.utility_objs import ExecutableType @@ -2280,6 +2280,9 @@ def _execute_prepare_chip_power(self): return db = SqlLiteDatabase() db.store_placements() + db.store_chip_power_monitors() + #data = list(db.iterate_chip_power_monitor_cores()) + db.close() def _report_chip_active(self): with FecTimer("Prepare Chip Power", TimerWork.REPORT) as timer: @@ -2287,13 +2290,13 @@ def _report_chip_active(self): return if timer.skip_if_virtual_board(): return - write_chip_active_report() + #write_chip_active_report() def _do_end_of_run(self): if not self._data_writer.is_ran_last(): return self._execute_prepare_chip_power() - #self._report_chip_active() + self._report_chip_active() def reset(self): """ Code that puts the simulation back at time zero diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql b/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql index 2789f5be6a..b75bd93cb6 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql @@ -22,8 +22,7 @@ CREATE TABLE IF NOT EXISTS core( core_id INTEGER PRIMARY KEY AUTOINCREMENT, x INTEGER NOT NULL, y INTEGER NOT NULL, - processor INTEGER NOT NULL, - label STRING); + processor INTEGER NOT NULL); -- Every processor has a unique ID CREATE UNIQUE INDEX IF NOT EXISTS coreSanity ON core( x ASC, y ASC, processor ASC); diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py index 7342b56cdb..a0672b319a 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py @@ -220,7 +220,7 @@ def store_data_in_region_buffer(self, x, y, p, region, missing, data): region_id, content, content_len) VALUES (?, CAST(? AS BLOB), ?) """, (region_id, datablob, len(data))) - assert cursor.rowcount == 1 + assert cursor.rowcount == 1 def __use_main_table(self, cursor, region_id): """ @@ -248,7 +248,18 @@ def get_region_data(self, x, y, p, region): return memoryview(b''), True def store_placements(self): + exists = False with self.transaction() as cursor: + for row in cursor.execute("PRAGMA TABLE_INFO(core)"): + if row["name"] == "label": + exists = True + + if exists: + return + # already done so no need to repeat + + cursor.execute("ALTER TABLE core ADD COLUMN label STRING") + for placement in FecDataView.iterate_placemements(): core_id = self.__get_core_id( cursor, placement.x, placement.y, placement.p) @@ -256,3 +267,50 @@ def store_placements(self): "UPDATE core SET label = ? WHERE core_id = ?", (placement.vertex.label, core_id)) assert cursor.rowcount == 1 + + def store_chip_power_monitors(self): + # delayed import due to circular refrences + from spinn_front_end_common.utility_models.\ + chip_power_monitor_machine_vertex import ( + ChipPowerMonitorMachineVertex) + + with self.transaction() as cursor: + for row in cursor.execute( + """ + SELECT name FROM sqlite_master + WHERE type='table' AND name='chip_power_monitor' + """): + # Already exists so no need to run again + return + + cursor.execute( + """ + CREATE TABLE chip_power_monitors( + cpm_id INTEGER PRIMARY KEY autoincrement, + core_id INTEGER NOT NULL + REFERENCES core(core_id) ON DELETE RESTRICT, + sampling_frequency FLOAT NOT NULL) + """) + + for placement in FecDataView.iterate_placements_by_vertex_type( + ChipPowerMonitorMachineVertex): + core_id = self.__get_core_id( + cursor, placement.x, placement.y, placement.p) + cursor.execute( + """ + INSERT INTO chip_power_monitors( + core_id, sampling_frequency) + VALUES (?, ?) + """, (core_id, placement.vertex.sampling_frequency)) + assert cursor.rowcount == 1 + + def iterate_chip_power_monitor_cores(self): + with self.transaction() as cursor: + for row in cursor.execute( + """ + SELECT core_id, sampling_frequency + FROM chip_power_monitors + ORDER BY core_id + """): + yield row + diff --git a/spinn_front_end_common/utilities/report_functions/__init__.py b/spinn_front_end_common/utilities/report_functions/__init__.py index 2a28071ecc..c66d0aa9e2 100644 --- a/spinn_front_end_common/utilities/report_functions/__init__.py +++ b/spinn_front_end_common/utilities/report_functions/__init__.py @@ -16,6 +16,7 @@ from .bit_field_compressor_report import bitfield_compressor_report from .bit_field_summary import BitFieldSummary from .board_chip_report import board_chip_report +from .chip_active_report import write_chip_active_report from .energy_report import EnergyReport from .fixed_route_from_machine_report import fixed_route_from_machine_report from .memory_map_on_host_chip_report import memory_map_on_host_chip_report @@ -44,6 +45,7 @@ "router_collision_potential_report", "routing_table_from_machine_report", "tags_from_machine_report", + "write_chip_active_report", "write_json_machine", "write_json_placements", "write_json_routing_tables", From d7957edaa65cecc5763a0134b885fc3a8596d5f9 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 18 Oct 2022 16:33:23 +0100 Subject: [PATCH 04/49] chip_active_report --- .../interface/abstract_spinnaker_base.py | 3 +- .../storage_objects/sqllite_database.py | 68 ++++++++++++++++++- 2 files changed, 67 insertions(+), 4 deletions(-) diff --git a/spinn_front_end_common/interface/abstract_spinnaker_base.py b/spinn_front_end_common/interface/abstract_spinnaker_base.py index f6ea838dd6..b7deb832b0 100644 --- a/spinn_front_end_common/interface/abstract_spinnaker_base.py +++ b/spinn_front_end_common/interface/abstract_spinnaker_base.py @@ -2281,7 +2281,6 @@ def _execute_prepare_chip_power(self): db = SqlLiteDatabase() db.store_placements() db.store_chip_power_monitors() - #data = list(db.iterate_chip_power_monitor_cores()) db.close() def _report_chip_active(self): @@ -2290,7 +2289,7 @@ def _report_chip_active(self): return if timer.skip_if_virtual_board(): return - #write_chip_active_report() + write_chip_active_report() def _do_end_of_run(self): if not self._data_writer.is_ran_last(): diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py index a0672b319a..55b1ea5729 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py @@ -141,6 +141,52 @@ def __read_contents(self, cursor, x, y, p, region): data = c_buffer return memoryview(data) + def __read_contents(self, cursor, x, y, p, region): + """ + :param ~sqlite3.Cursor cursor: + :param int x: + :param int y: + :param int p: + :param int region: + :rtype: memoryview + """ + for row in cursor.execute( + """ + SELECT region_id, content, have_extra + FROM region_view + WHERE x = ? AND y = ? AND processor = ? + AND local_region_index = ? LIMIT 1 + """, (x, y, p, region)): + r_id, data, extra = ( + row["region_id"], row["content"], row["have_extra"]) + break + else: + raise LookupError("no record for region ({},{},{}:{})".format( + x, y, p, region)) + if extra: + c_buffer = None + for row in cursor.execute( + """ + SELECT r.content_len + ( + SELECT SUM(x.content_len) + FROM region_extra AS x + WHERE x.region_id = r.region_id) AS len + FROM region AS r WHERE region_id = ? LIMIT 1 + """, (r_id, )): + c_buffer = bytearray(row["len"]) + c_buffer[:len(data)] = data + idx = len(data) + for row in cursor.execute( + """ + SELECT content FROM region_extra + WHERE region_id = ? ORDER BY extra_id ASC + """, (r_id, )): + item = row["content"] + c_buffer[idx:idx + len(item)] = item + idx += len(item) + data = c_buffer + return memoryview(data) + @staticmethod def __get_core_id(cursor, x, y, p): """ @@ -268,6 +314,17 @@ def store_placements(self): (placement.vertex.label, core_id)) assert cursor.rowcount == 1 + def get_label(self, x, y, p): + with self.transaction() as cursor: + for row in cursor.execute( + """ + SELECT label + FROM core + WHERE x = ? AND y = ? and processor = ? + """, (x, y, p)): + return str(row["label"], 'utf8') + return "" + def store_chip_power_monitors(self): # delayed import due to circular refrences from spinn_front_end_common.utility_models.\ @@ -292,6 +349,13 @@ def store_chip_power_monitors(self): sampling_frequency FLOAT NOT NULL) """) + cursor.execute( + """ + CREATE VIEW chip_power_monitors_view AS + SELECT core_id, x, y, processor, sampling_frequency + FROM core NATURAL JOIN chip_power_monitors + """) + for placement in FecDataView.iterate_placements_by_vertex_type( ChipPowerMonitorMachineVertex): core_id = self.__get_core_id( @@ -308,8 +372,8 @@ def iterate_chip_power_monitor_cores(self): with self.transaction() as cursor: for row in cursor.execute( """ - SELECT core_id, sampling_frequency - FROM chip_power_monitors + SELECT x, y, processor, sampling_frequency + FROM chip_power_monitors_view ORDER BY core_id """): yield row From d0556bd9b80f9cda1140890bd0b264c33057a1c0 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 19 Oct 2022 06:28:14 +0100 Subject: [PATCH 05/49] fix import --- .../buffer_management/test_buffered_receiver_with_db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unittests/interface/buffer_management/test_buffered_receiver_with_db.py b/unittests/interface/buffer_management/test_buffered_receiver_with_db.py index 50d30f0c60..d2ebe0d463 100644 --- a/unittests/interface/buffer_management/test_buffered_receiver_with_db.py +++ b/unittests/interface/buffer_management/test_buffered_receiver_with_db.py @@ -19,7 +19,7 @@ from spinn_front_end_common.interface.buffer_management.storage_objects \ import BufferedReceivingData from spinn_front_end_common.interface.buffer_management.storage_objects\ - .buffered_receiving_data import DB_FILE_NAME + .sqllite_database import DB_FILE_NAME from spinn_front_end_common.interface.config_setup import unittest_setup From f701a29654a0e6cb6380685851597048c0ce834d Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 19 Oct 2022 06:36:41 +0100 Subject: [PATCH 06/49] write_chip_active_report --- .../report_functions/chip_active_report.py | 92 +++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 spinn_front_end_common/utilities/report_functions/chip_active_report.py diff --git a/spinn_front_end_common/utilities/report_functions/chip_active_report.py b/spinn_front_end_common/utilities/report_functions/chip_active_report.py new file mode 100644 index 0000000000..3fd05249db --- /dev/null +++ b/spinn_front_end_common/utilities/report_functions/chip_active_report.py @@ -0,0 +1,92 @@ +# Copyright (c) 2017-2019 The University of Manchester +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from collections import defaultdict +import logging +import numpy +import os +from spinn_utilities.config_holder import (get_config_int, get_config_str) +from spinn_utilities.exceptions import SpiNNUtilsException +from spinn_utilities.log import FormatAdapter +from spinn_front_end_common.data import FecDataView +from spinn_front_end_common.interface.buffer_management.storage_objects \ + import (SqlLiteDatabase) +from spinn_front_end_common.interface.provenance import ( + FecTimer, ProvenanceReader, TimerCategory) +from spinn_front_end_common.utility_models import ChipPowerMonitorMachineVertex +from spinn_front_end_common.utilities.exceptions import ConfigurationException +from spinn_front_end_common.interface.interface_functions.compute_energy_used\ + import (JOULES_PER_SPIKE, MILLIWATTS_PER_CHIP_ACTIVE_OVERHEAD, + MILLIWATTS_PER_FRAME_ACTIVE_COST, MILLIWATTS_PER_FPGA, + MILLIWATTS_PER_IDLE_CHIP) +from spinn_machine.machine import Machine + +logger = FormatAdapter(logging.getLogger(__name__)) + +#: converter between joules to kilowatt hours +JOULES_TO_KILOWATT_HOURS = 3600000 + +# energy report file name +CHIP_ACTIVE_FILENAME = "chip_active_report.rpt" + +def write_chip_active_report(report_path=None, buffer_path=None): + """ Writes the report. + + :param report_path: Where to write the report if not using the default + :type report_path: None or str + :param buffer_path: Where the provenance sqlite3 files is located + if not using the default. + :type buffer_path: None or str + :rtype: None + """ + if report_path is None: + try: + report_dir = FecDataView.get_run_dir_path() + report_path = os.path.join( + report_dir, CHIP_ACTIVE_FILENAME) + except SpiNNUtilsException: + report_path = os.path.join( + os.path.curdir, CHIP_ACTIVE_FILENAME) + logger.warning(f"no report_path so writing to {report_path}") + + # create detailed report + with open(report_path, "w", encoding="utf-8") as f: + __write_report(f, buffer_path) + +def __write_report(f, buffer_path): + db = SqlLiteDatabase(buffer_path) + n_samples_per_recording = get_config_int( + "EnergyMonitor", "n_samples_per_recording_entry") + + for row in db.iterate_chip_power_monitor_cores(): + record_raw, data_missing = db.get_region_data( + row["x"], row["y"], row["processor"], 0) + results = ( + numpy.frombuffer(record_raw, dtype="uint32").reshape(-1, 18) / + n_samples_per_recording) + active_sums = numpy.sum(results, axis=0) + activity_count = numpy.sum(results) + time_for_recorded_sample =\ + (row["sampling_frequency"] * n_samples_per_recording) / 1000 + + for core in range(0, 18): + label = db.get_label(row["x"], row["y"], core) + if (active_sums[core] > 0) or label: + energy = (active_sums[core] * time_for_recorded_sample * + MILLIWATTS_PER_CHIP_ACTIVE_OVERHEAD / 18) + f.write( + f"processor {row['x']}:{row['y']}:{core}({label})" + f" was active for {active_sums[core]} " + f" using {energy} Joules\n") From 0178b7967ad1301f7f19e7127cb0ec0217b9aa22 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 19 Oct 2022 06:45:24 +0100 Subject: [PATCH 07/49] flake8 --- .../storage_objects/sqllite_database.py | 52 ++----------------- .../report_functions/chip_active_report.py | 9 +--- 2 files changed, 4 insertions(+), 57 deletions(-) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py index 55b1ea5729..951d5d56b5 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py @@ -28,6 +28,7 @@ #: Name of the database in the data folder DB_FILE_NAME = "buffer.sqlite3" + def _timestamp(): return int(time.time() * _SECONDS_TO_MICRO_SECONDS_CONVERSION) @@ -141,52 +142,6 @@ def __read_contents(self, cursor, x, y, p, region): data = c_buffer return memoryview(data) - def __read_contents(self, cursor, x, y, p, region): - """ - :param ~sqlite3.Cursor cursor: - :param int x: - :param int y: - :param int p: - :param int region: - :rtype: memoryview - """ - for row in cursor.execute( - """ - SELECT region_id, content, have_extra - FROM region_view - WHERE x = ? AND y = ? AND processor = ? - AND local_region_index = ? LIMIT 1 - """, (x, y, p, region)): - r_id, data, extra = ( - row["region_id"], row["content"], row["have_extra"]) - break - else: - raise LookupError("no record for region ({},{},{}:{})".format( - x, y, p, region)) - if extra: - c_buffer = None - for row in cursor.execute( - """ - SELECT r.content_len + ( - SELECT SUM(x.content_len) - FROM region_extra AS x - WHERE x.region_id = r.region_id) AS len - FROM region AS r WHERE region_id = ? LIMIT 1 - """, (r_id, )): - c_buffer = bytearray(row["len"]) - c_buffer[:len(data)] = data - idx = len(data) - for row in cursor.execute( - """ - SELECT content FROM region_extra - WHERE region_id = ? ORDER BY extra_id ASC - """, (r_id, )): - item = row["content"] - c_buffer[idx:idx + len(item)] = item - idx += len(item) - data = c_buffer - return memoryview(data) - @staticmethod def __get_core_id(cursor, x, y, p): """ @@ -329,7 +284,7 @@ def store_chip_power_monitors(self): # delayed import due to circular refrences from spinn_front_end_common.utility_models.\ chip_power_monitor_machine_vertex import ( - ChipPowerMonitorMachineVertex) + ChipPowerMonitorMachineVertex) with self.transaction() as cursor: for row in cursor.execute( @@ -352,7 +307,7 @@ def store_chip_power_monitors(self): cursor.execute( """ CREATE VIEW chip_power_monitors_view AS - SELECT core_id, x, y, processor, sampling_frequency + SELECT core_id, x, y, processor, sampling_frequency FROM core NATURAL JOIN chip_power_monitors """) @@ -377,4 +332,3 @@ def iterate_chip_power_monitor_cores(self): ORDER BY core_id """): yield row - diff --git a/spinn_front_end_common/utilities/report_functions/chip_active_report.py b/spinn_front_end_common/utilities/report_functions/chip_active_report.py index 3fd05249db..61cc29c61f 100644 --- a/spinn_front_end_common/utilities/report_functions/chip_active_report.py +++ b/spinn_front_end_common/utilities/report_functions/chip_active_report.py @@ -23,15 +23,8 @@ from spinn_front_end_common.data import FecDataView from spinn_front_end_common.interface.buffer_management.storage_objects \ import (SqlLiteDatabase) -from spinn_front_end_common.interface.provenance import ( - FecTimer, ProvenanceReader, TimerCategory) -from spinn_front_end_common.utility_models import ChipPowerMonitorMachineVertex -from spinn_front_end_common.utilities.exceptions import ConfigurationException from spinn_front_end_common.interface.interface_functions.compute_energy_used\ - import (JOULES_PER_SPIKE, MILLIWATTS_PER_CHIP_ACTIVE_OVERHEAD, - MILLIWATTS_PER_FRAME_ACTIVE_COST, MILLIWATTS_PER_FPGA, - MILLIWATTS_PER_IDLE_CHIP) -from spinn_machine.machine import Machine + import (MILLIWATTS_PER_CHIP_ACTIVE_OVERHEAD) logger = FormatAdapter(logging.getLogger(__name__)) From 66bcaf8865dd67836c07d065b486ec8f355303ea Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 19 Oct 2022 07:40:17 +0100 Subject: [PATCH 08/49] totals and track monitors --- .../storage_objects/sqllite_database.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py index 951d5d56b5..eb0d87b944 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py @@ -269,6 +269,18 @@ def store_placements(self): (placement.vertex.label, core_id)) assert cursor.rowcount == 1 + for chip in FecDataView.get_machine().chips: + for processor in chip.processors: + if processor.is_monitor: + core_id = self.__get_core_id( + cursor, chip.x, chip.y, processor.processor_id) + cursor.execute( + """ + UPDATE core SET label = 'MONITOR' + WHERE core_id = ? + """, [core_id]) + assert cursor.rowcount == 1 + def get_label(self, x, y, p): with self.transaction() as cursor: for row in cursor.execute( @@ -287,7 +299,7 @@ def store_chip_power_monitors(self): ChipPowerMonitorMachineVertex) with self.transaction() as cursor: - for row in cursor.execute( + for _ in cursor.execute( """ SELECT name FROM sqlite_master WHERE type='table' AND name='chip_power_monitor' From 8db050df54d0370c1f550d19ec520145a3a27fe4 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 19 Oct 2022 07:44:20 +0100 Subject: [PATCH 09/49] totals and track monitors --- .../storage_objects/sqllite_database.py | 2 +- .../report_functions/chip_active_report.py | 28 +++++++++++++++---- 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py index eb0d87b944..8cc50aee12 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py @@ -276,7 +276,7 @@ def store_placements(self): cursor, chip.x, chip.y, processor.processor_id) cursor.execute( """ - UPDATE core SET label = 'MONITOR' + UPDATE core SET label = 'MONITOR' WHERE core_id = ? """, [core_id]) assert cursor.rowcount == 1 diff --git a/spinn_front_end_common/utilities/report_functions/chip_active_report.py b/spinn_front_end_common/utilities/report_functions/chip_active_report.py index 61cc29c61f..297970be5a 100644 --- a/spinn_front_end_common/utilities/report_functions/chip_active_report.py +++ b/spinn_front_end_common/utilities/report_functions/chip_active_report.py @@ -13,11 +13,10 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from collections import defaultdict import logging import numpy import os -from spinn_utilities.config_holder import (get_config_int, get_config_str) +from spinn_utilities.config_holder import get_config_int from spinn_utilities.exceptions import SpiNNUtilsException from spinn_utilities.log import FormatAdapter from spinn_front_end_common.data import FecDataView @@ -34,6 +33,7 @@ # energy report file name CHIP_ACTIVE_FILENAME = "chip_active_report.rpt" + def write_chip_active_report(report_path=None, buffer_path=None): """ Writes the report. @@ -58,11 +58,16 @@ def write_chip_active_report(report_path=None, buffer_path=None): with open(report_path, "w", encoding="utf-8") as f: __write_report(f, buffer_path) + def __write_report(f, buffer_path): db = SqlLiteDatabase(buffer_path) n_samples_per_recording = get_config_int( "EnergyMonitor", "n_samples_per_recording_entry") + milliwatts = MILLIWATTS_PER_CHIP_ACTIVE_OVERHEAD / 18 + activity_total = 0 + energy_total = 0 + for row in db.iterate_chip_power_monitor_cores(): record_raw, data_missing = db.get_region_data( row["x"], row["y"], row["processor"], 0) @@ -73,13 +78,24 @@ def __write_report(f, buffer_path): activity_count = numpy.sum(results) time_for_recorded_sample =\ (row["sampling_frequency"] * n_samples_per_recording) / 1000 + energy_factor = time_for_recorded_sample * milliwatts for core in range(0, 18): label = db.get_label(row["x"], row["y"], core) if (active_sums[core] > 0) or label: - energy = (active_sums[core] * time_for_recorded_sample * - MILLIWATTS_PER_CHIP_ACTIVE_OVERHEAD / 18) f.write( f"processor {row['x']}:{row['y']}:{core}({label})" - f" was active for {active_sums[core]} " - f" using {energy} Joules\n") + f" was active for {active_sums[core]}ms " + f" using { active_sums[core] * energy_factor} Joules\n") + + energy = activity_count * energy_factor + activity_total += activity_count + energy_total += energy + f.write( + f"Total for chip {row['x']}:{row['y']} " + f" was {activity_count}ms of activity " + f" using {energy} Joules\n\n") + f.write( + f"Total " + f" was {activity_total}ms of activity " + f" using {energy_total} Joules\n\n") From d3ce5afac32522b597a73deb10f9ec8899a285c2 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 19 Oct 2022 09:49:22 +0100 Subject: [PATCH 10/49] write_chip_active_report standalone --- unittests/utilities/.gitignore | 1 + unittests/utilities/buffer.sqlite3 | Bin 0 -> 32768 bytes unittests/utilities/test_chip_active.py | 63 ++++++++++++++++++++++++ 3 files changed, 64 insertions(+) create mode 100644 unittests/utilities/.gitignore create mode 100644 unittests/utilities/buffer.sqlite3 create mode 100644 unittests/utilities/test_chip_active.py diff --git a/unittests/utilities/.gitignore b/unittests/utilities/.gitignore new file mode 100644 index 0000000000..b911337edd --- /dev/null +++ b/unittests/utilities/.gitignore @@ -0,0 +1 @@ +*.rpt \ No newline at end of file diff --git a/unittests/utilities/buffer.sqlite3 b/unittests/utilities/buffer.sqlite3 new file mode 100644 index 0000000000000000000000000000000000000000..ff14f71d5850c34fb3dda823e78a84bfa9c1728e GIT binary patch literal 32768 zcmeI4O>7%Q6o7Yj*Iw_o_8=-_(n6W2ifXG?aR?x_P*gYFL;=T6Z3hS!ck65$3&(aH zw@r>o8*$*&3*rU`-~vb-;07n)zyZMpsh1u=P=wS9A-Dh+-t3RPan^Q9)2e|tQZ#>W z-h1=So3Y~^ZQ)d=(lXiEdUMfeu`%HRfe_(1V}c+=;V%S#ZqlG2yA{ZZr`)R;6%If0 zQJ9_$|RLH`Gs+Z=ewq58GR{;@A7_15aiLYO)XGhB(X6$S{-bRu?cv#6k!5Ey8DC)zA+Jy9c|AL>7c3NUhiZc5vg{;) zh6T#&g<^hcyy$OnGi)H6$)&-rV>_{Wh$yjWl&l@G@qv%rjJOdHwAe%(7us|1B1q2` zVU1wmw4Mcr)aAC6YYiG#{2JTwvAL5~jWcGI@mXdk6Ke-UN_2QQbmdVVyykMNX;@+0 zDR~38)9Qw5cX&3m)N84YM|1YE>4^ zBy$8Ug@?=aS_>iq9Dp^niq2R+sZ!Ofc_evN>#-B;;Tv~{SLJG5QKAP9gf1sIDW|8a z=w;=R;IPy_q&DqoA3n@Qqp7MpZx;ZcR+61HTjd3_iycVEoeuUK8%=N+3&usWWWnsu zVoeMxv5^sSdD4nxFAQ&4*$~;@p17vELC}uF4e?zVXWcRGjQ1Q0Zmh#pJp0#{i?_x7 z_ppnKoyg=)wD&0N^@;RsrpOK+gbn1-v_i!_h+KhGq7eWFLuB>w3J+7;T)}_?f5k2b zo!QT3r%ui4FwK+t%TB=BV*G4zoaMHxV;ojtJjph%GH*|;y&@~Ip&_zPJ=kpx0Uy)= z+3xIhkaiPxeQ@XM&zDQ}KOa>e|GRYpUMFtd0O$b!*J}bQ+>q|J*IwBax7;~HTldv@_E83|NsjSH=K>gW?s9!c!c>_)j!Z zAJzF!rqlKlfW0AZvPYl7?fdr)2P{KrSLcjTe${ME8m$GhX*T1jWGb;==4RDn6i2>{fT~dKTjNeLjp(u2_OL^fCP{L z56ldPD3OZfO`)l3>nmRE07Tq*k$LkrhqIVq6#^*iy>&| zb;a(Nf_5$jxK5SGT&GGTu2Usq0FwUnkx_zFw9(Q7*ak2aoSBu~YR7 z+TH*6d2JV;d!@&T@@Lk|VkgSQt*+v|@)G^op3_9^ucrOkGC*wLd&0K5UiZo$C(55* zKSMOQmu-Jz@j5xkk$ynDdNLq*xSw#JKChmKL2l6I>ZQu7=b>IgTWH^@dIoXZ>b!18 z&%Qo9Z%5CAcc8}!@|5*bKYBx;f5ZL%TTsA+1dsp{Kmter2_OL^fCP{L5_a01`j~NB{{S0VIF~kN^@u0!RP}Ac6ld0qg!h z{a&Dd(%. + +import os +import unittest +from spinn_utilities.exceptions import InvalidDirectory +from spinn_front_end_common.data.fec_data_writer import FecDataWriter +from spinn_front_end_common.interface.config_setup import unittest_setup +from spinn_front_end_common.utilities.report_functions import ( + write_chip_active_report) + + +class TestChipActive(unittest.TestCase): + + def setUp(cls): + unittest_setup() + + def test_no_params(self): + try: + write_chip_active_report() + failed = False + except Exception as ex: + self.assertIn("no such table", str(ex)) + failed = True + self.assertTrue(failed) + + def test_db_only(self): + # make sure there is not run_dir_path so falls back on default + writer = FecDataWriter.setup() + try: + writer.set_run_dir_path("THIS DIRECTORY DOES NOT EXIST") + except InvalidDirectory: + pass + db_path = os.path.join(os.path.dirname(__file__), "buffer.sqlite3") + write_chip_active_report(buffer_path=db_path) + + + def test_all_params(self): + # make sure there is not run_dir_path so falls back on default + writer = FecDataWriter.setup() + try: + writer.set_run_dir_path("THIS DIRECTORY DOES NOT EXIST") + except InvalidDirectory: + pass + db_path = os.path.join(os.path.dirname(__file__), "buffer.sqlite3") + report = os.path.join(os.path.dirname(__file__), "my_active.rpt") + write_chip_active_report(report_path=report, buffer_path=db_path) + + +if __name__ == '__main__': + unittest.main() From 235d16cbd31201dd6500a29eeafcbef9cc7b1c5d Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 19 Oct 2022 11:04:22 +0100 Subject: [PATCH 11/49] flake8 --- unittests/utilities/test_chip_active.py | 1 - 1 file changed, 1 deletion(-) diff --git a/unittests/utilities/test_chip_active.py b/unittests/utilities/test_chip_active.py index 5b43c4c6c6..422396e9c1 100644 --- a/unittests/utilities/test_chip_active.py +++ b/unittests/utilities/test_chip_active.py @@ -46,7 +46,6 @@ def test_db_only(self): db_path = os.path.join(os.path.dirname(__file__), "buffer.sqlite3") write_chip_active_report(buffer_path=db_path) - def test_all_params(self): # make sure there is not run_dir_path so falls back on default writer = FecDataWriter.setup() From 39fce52eeef9e440fb19ff6e9833f96af5e817fb Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 19 Oct 2022 15:13:21 +0100 Subject: [PATCH 12/49] Dont rat reports created in unittests --- .ratexcludes | 1 + 1 file changed, 1 insertion(+) diff --git a/.ratexcludes b/.ratexcludes index 8acb1c02ea..964a183a24 100644 --- a/.ratexcludes +++ b/.ratexcludes @@ -17,3 +17,4 @@ **/PACMAN/** **/DataSpecification/** **/spalloc/** +**/unittests/**/*.rpt From dcc1d04ee9dd6be8dfdd3c05e9d9bc125b72e1b0 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Mon, 24 Oct 2022 12:04:57 +0100 Subject: [PATCH 13/49] reset number --- spinn_front_end_common/data/fec_data_view.py | 24 +++++++++++++++++++ .../data/fec_data_writer.py | 4 ++++ unittests/data/test_simulator_data.py | 6 +++++ 3 files changed, 34 insertions(+) diff --git a/spinn_front_end_common/data/fec_data_view.py b/spinn_front_end_common/data/fec_data_view.py index 764900a625..f0d179312b 100644 --- a/spinn_front_end_common/data/fec_data_view.py +++ b/spinn_front_end_common/data/fec_data_view.py @@ -73,6 +73,7 @@ class _FecDataModel(object): "_notification_protocol", "_max_run_time_steps", "_monitor_map", + "_reset_number", "_run_number", "_run_step", "_simulation_time_step_ms", @@ -109,6 +110,7 @@ def _clear(self): self._n_boards_required = None self._n_chips_required = None self._none_labelled_edge_count = 0 + self._reset_number = 0 self._run_number = None self._simulation_time_step_ms = None self._simulation_time_step_per_ms = None @@ -412,6 +414,28 @@ def has_time_scale_factor(cls): """ return cls.__fec_data._time_scale_factor is not None + # reset number + + @classmethod + def get_reset_number(cls): + """ + Get the number of times a reset has happened. + + Only counts the first reset after each run. + + So resets that are first soft then hard are ignored. + Double reset calls without a run and resets before run are ignored. + + Reset numbers start at zero + + :return: + :raises ~spinn_utilities.exceptions.SpiNNUtilsException: + If the run_number is currently unavailable + """ + if cls.__fec_data._reset_number is None: + raise cls._exception("run_number") + return cls.__fec_data._reset_number + # run number @classmethod diff --git a/spinn_front_end_common/data/fec_data_writer.py b/spinn_front_end_common/data/fec_data_writer.py index 49d4fceb8a..b4451a6ca9 100644 --- a/spinn_front_end_common/data/fec_data_writer.py +++ b/spinn_front_end_common/data/fec_data_writer.py @@ -85,6 +85,8 @@ def finish_run(self): @overrides(PacmanDataWriter._hard_reset) def _hard_reset(self): + if self.is_ran_last(): + self.__fec_data._reset_number += 1 PacmanDataWriter._hard_reset(self) SpiNNManDataWriter._local_hard_reset(self) self.__fec_data._hard_reset() @@ -92,6 +94,8 @@ def _hard_reset(self): @overrides(PacmanDataWriter._soft_reset) def _soft_reset(self): + if self.is_ran_last(): + self.__fec_data._reset_number += 1 PacmanDataWriter._soft_reset(self) SpiNNManDataWriter._local_soft_reset(self) self.__fec_data._soft_reset() diff --git a/unittests/data/test_simulator_data.py b/unittests/data/test_simulator_data.py index 6633bd48a8..51695d31cf 100644 --- a/unittests/data/test_simulator_data.py +++ b/unittests/data/test_simulator_data.py @@ -247,6 +247,7 @@ def test_directories_reset(self): writer = FecDataWriter.setup() run_dir = FecDataView.get_run_dir_path() self.assertIn("run_1", run_dir) + self.assertEqual(0, writer.get_reset_number()) writer.start_run() run_dir = FecDataView.get_run_dir_path() self.assertIn("run_1", run_dir) @@ -259,7 +260,9 @@ def test_directories_reset(self): writer.finish_run() run_dir = FecDataView.get_run_dir_path() self.assertIn("run_1", run_dir) + self.assertEqual(0, writer.get_reset_number()) writer.hard_reset() + self.assertEqual(1, writer.get_reset_number()) run_dir = FecDataView.get_run_dir_path() self.assertIn("run_3", run_dir) writer.start_run() @@ -321,11 +324,14 @@ def test_run_number(self): self.assertEqual(3, FecDataView.get_run_number()) # run_dir_path only changed on hard reset self.assertIn("run_1", FecDataView.get_run_dir_path()) + self.assertEqual(0, writer.get_reset_number()) writer.soft_reset() + self.assertEqual(1, writer.get_reset_number()) self.assertEqual(3, FecDataView.get_run_number()) # run_dir_path only changed on hard reset self.assertIn("run_1", FecDataView.get_run_dir_path()) writer.hard_reset() + self.assertEqual(1, writer.get_reset_number()) self.assertEqual(3, FecDataView.get_run_number()) # run_dir_path changed by hard reset self.assertIn("run_3", FecDataView.get_run_dir_path()) From 7a728148cec54039960cf4ee9b9d6fd8611965b1 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Mon, 24 Oct 2022 12:39:34 +0100 Subject: [PATCH 14/49] new buffer sqllite file after reset --- .../interface/abstract_spinnaker_base.py | 10 ++++---- .../buffer_management/buffer_manager.py | 23 +++++++------------ .../storage_objects/sqllite_database.py | 23 ++++--------------- 3 files changed, 18 insertions(+), 38 deletions(-) diff --git a/spinn_front_end_common/interface/abstract_spinnaker_base.py b/spinn_front_end_common/interface/abstract_spinnaker_base.py index f11fac6a12..3ab4198202 100644 --- a/spinn_front_end_common/interface/abstract_spinnaker_base.py +++ b/spinn_front_end_common/interface/abstract_spinnaker_base.py @@ -2283,11 +2283,6 @@ def reset(self): logger.info("Resetting") - # rewind the buffers from the buffer manager, to start at the beginning - # of the simulation again and clear buffered out - if self._data_writer.has_buffer_manager(): - self._data_writer.get_buffer_manager().reset() - if self._data_writer.get_user_accessed_machine(): logger.warning( "A reset after a get machine call is always hard and " @@ -2296,6 +2291,11 @@ def reset(self): else: self._data_writer.soft_reset() + # rewind the buffers from the buffer manager, to start at the beginning + # of the simulation again and clear buffered out + if self._data_writer.has_buffer_manager(): + self._data_writer.get_buffer_manager().reset() + # Reset the graph off the machine, to set things to time 0 self.__reset_graph_elements() FecTimer.end_category(TimerCategory.RESETTING) diff --git a/spinn_front_end_common/interface/buffer_management/buffer_manager.py b/spinn_front_end_common/interface/buffer_management/buffer_manager.py index 9764a7fd94..d71903bf42 100644 --- a/spinn_front_end_common/interface/buffer_management/buffer_manager.py +++ b/spinn_front_end_common/interface/buffer_management/buffer_manager.py @@ -92,9 +92,6 @@ class BufferManager(object): # Dictionary of sender vertex -> buffers sent "_sent_messages", - # storage area for received data from cores - "_db", - # Lock to avoid multiple messages being processed at the same time "_thread_lock_buffer_out", @@ -122,9 +119,6 @@ def __init__(self): # Dictionary of sender vertex -> buffers sent self._sent_messages = dict() - # storage area for received data from cores - self._db = BufferDatabase() - # Lock to avoid multiple messages being processed at the same time self._thread_lock_buffer_out = threading.RLock() self._thread_lock_buffer_in = threading.RLock() @@ -304,9 +298,6 @@ def reset(self): beginning of its expected regions and clears the buffered out\ data files. """ - # - self._db.reset() - # rewind buffered in for vertex in self._sender_vertices: for region in vertex.get_regions(): @@ -329,7 +320,7 @@ def clear_recorded_data(self, x, y, p, recording_region_id): :param int p: placement p coordinate :param int recording_region_id: the recording region ID """ - self._db.clear_region(x, y, p, recording_region_id) + BufferDatabase().clear_region(x, y, p, recording_region_id) def _create_message_to_send(self, size, vertex, region): """ Creates a single message to send with the given boundaries. @@ -576,14 +567,15 @@ def __python_get_data_for_placements(self, recording_placements): """ :param ~pacman.model.placements.Placements recording_placements: Where to get the data from. - """ + """ # get data progress = ProgressBar( len(recording_placements), "Extracting buffers from the last run") + db = BufferDatabase() for placement in progress.over(recording_placements): - self._retreive_by_placement(placement) + self._retreive_by_placement(db, placement) def get_data_by_placement(self, placement, recording_region_id): """ Get the data container for all the data retrieved\ @@ -603,12 +595,13 @@ def get_data_by_placement(self, placement, recording_region_id): "so no data read".format(placement.vertex)) with self._thread_lock_buffer_out: # data flush has been completed - return appropriate data - return self._db.get_region_data( + return BufferDatabase().get_region_data( placement.x, placement.y, placement.p, recording_region_id) - def _retreive_by_placement(self, placement): + def _retreive_by_placement(self, db, placement): """ Retrieve the data for a vertex; must be locked first. + :param db BufferDatabase: dtabase to store into :param ~pacman.model.placements.Placement placement: the placement to get the data from :param int recording_region_id: desired recording data region @@ -624,7 +617,7 @@ def _retreive_by_placement(self, placement): size, addr, missing = sizes_and_addresses[region] data = self._request_data( placement.x, placement.y, addr, size) - self._db.store_data_in_region_buffer( + db.store_data_in_region_buffer( placement.x, placement.y, placement.p, region, missing, data) def _get_region_information(self, addr, x, y, p): diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py index 4bf8db2943..b7670da990 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py @@ -23,7 +23,6 @@ _DDL_FILE = os.path.join(os.path.dirname(__file__), "db.sql") _SECONDS_TO_MICRO_SECONDS_CONVERSION = 1000 #: Name of the database in the data folder -DB_FILE_NAME = "buffer.sqlite3" def _timestamp(): @@ -62,24 +61,12 @@ def __init__(self, database_file=None): @classmethod def default_database_file(cls): + if FecDataView.get_reset_number(): + return os.path.join( + FecDataView.get_run_dir_path(), + f"buffer{FecDataView.get_reset_number()}.sqlite3") return os.path.join( - FecDataView.get_run_dir_path(), DB_FILE_NAME) - - def reset(self): - """ - UGLY SHOULD NOT NEVER DELETE THE FILE! - - .. note:: - This method will be removed when the database moves to - keeping data after reset. - - :rtype: None - """ - database_file = self.default_database_file() - self.close() - if os.path.exists(database_file): - os.remove(database_file) - super().__init__(database_file, ddl_file=_DDL_FILE) + FecDataView.get_run_dir_path(), "buffer.sqlite3") def clear(self): """ Clears the data for all regions. From 615e5171d96a2a2fc22bc74fe194c49c9928ab26 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Mon, 24 Oct 2022 12:40:22 +0100 Subject: [PATCH 15/49] remove clear --- .../storage_objects/sqllite_database.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py index b7670da990..70b0c1021f 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py @@ -68,24 +68,6 @@ def default_database_file(cls): return os.path.join( FecDataView.get_run_dir_path(), "buffer.sqlite3") - def clear(self): - """ Clears the data for all regions. - - .. note:: - This method will be removed when the database moves to - keeping data after reset. - - :rtype: None - """ - with self.transaction() as cursor: - cursor.execute( - """ - UPDATE region SET - content = CAST('' AS BLOB), content_len = 0, - fetches = 0, append_time = NULL - """) - cursor.execute("DELETE FROM region_extra") - def clear_region(self, x, y, p, region): """ Clears the data for a single region. From 68ff94f023ab51639bde0423868676d50f47ac5e Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Mon, 24 Oct 2022 14:56:28 +0100 Subject: [PATCH 16/49] read_only when reading --- .../interface/buffer_management/buffer_manager.py | 6 +++--- .../buffer_management/storage_objects/sqllite_database.py | 8 ++++++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/spinn_front_end_common/interface/buffer_management/buffer_manager.py b/spinn_front_end_common/interface/buffer_management/buffer_manager.py index d71903bf42..f4230d275e 100644 --- a/spinn_front_end_common/interface/buffer_management/buffer_manager.py +++ b/spinn_front_end_common/interface/buffer_management/buffer_manager.py @@ -320,7 +320,7 @@ def clear_recorded_data(self, x, y, p, recording_region_id): :param int p: placement p coordinate :param int recording_region_id: the recording region ID """ - BufferDatabase().clear_region(x, y, p, recording_region_id) + BufferDatabase(read_only=False).clear_region(x, y, p, recording_region_id) def _create_message_to_send(self, size, vertex, region): """ Creates a single message to send with the given boundaries. @@ -573,7 +573,7 @@ def __python_get_data_for_placements(self, recording_placements): len(recording_placements), "Extracting buffers from the last run") - db = BufferDatabase() + db = BufferDatabase(read_only=False) for placement in progress.over(recording_placements): self._retreive_by_placement(db, placement) @@ -595,7 +595,7 @@ def get_data_by_placement(self, placement, recording_region_id): "so no data read".format(placement.vertex)) with self._thread_lock_buffer_out: # data flush has been completed - return appropriate data - return BufferDatabase().get_region_data( + return BufferDatabase(read_only=True).get_region_data( placement.x, placement.y, placement.p, recording_region_id) def _retreive_by_placement(self, db, placement): diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py index 70b0c1021f..f33ea712ab 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py @@ -47,7 +47,7 @@ class BufferDatabase(SQLiteDB, AbstractContextManager): __slots__ = [] - def __init__(self, database_file=None): + def __init__(self, database_file=None, read_only=False): """ :param str database_file: The name of a file that contains (or will contain) an SQLite @@ -57,7 +57,11 @@ def __init__(self, database_file=None): if database_file is None: database_file = self.default_database_file() - super().__init__(database_file, ddl_file=_DDL_FILE) + if read_only: + super().__init__(database_file, read_only=False) + else: + super().__init__( + database_file, read_only=False, ddl_file=_DDL_FILE) @classmethod def default_database_file(cls): From 1d374580f3e1e2ab1ad5976af69aeeca2f8a76bb Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Mon, 24 Oct 2022 16:06:24 +0100 Subject: [PATCH 17/49] buffer sqlite path as a param --- .../interface/java_caller.py | 42 +++++++++---------- 1 file changed, 19 insertions(+), 23 deletions(-) diff --git a/spinn_front_end_common/interface/java_caller.py b/spinn_front_end_common/interface/java_caller.py index fc8a6f4f48..1f8c765ea8 100644 --- a/spinn_front_end_common/interface/java_caller.py +++ b/spinn_front_end_common/interface/java_caller.py @@ -28,7 +28,8 @@ from spinn_front_end_common.utilities.exceptions import ConfigurationException from spinn_front_end_common.interface.buffer_management.buffer_models import ( AbstractReceiveBuffersToHost) - +from spinn_front_end_common.interface.buffer_management.storage_objects \ + import BufferDatabase logger = FormatAdapter(logging.getLogger(__name__)) @@ -44,8 +45,6 @@ class JavaCaller(object): __slots__ = [ "_chipxy_by_ethernet", - # The folder holding sqlite databases etc. - "_report_folder", # The call to get java to work. Including the path if required. "_java_call", # The location of the java jar file @@ -72,7 +71,6 @@ def __init__(self): :raise ConfigurationException: if simple parameter checking fails. """ self._recording = None - self._report_folder = FecDataView.get_run_dir_path() self._java_call = get_config_str("Java", "java_call") result = subprocess.call([self._java_call, '-version']) if result != 0: @@ -175,15 +173,6 @@ def _machine_json(self): self._machine_json_path = write_json_machine(progress_bar=False) return self._machine_json_path - def set_report_folder(self, report_folder): - """ Passes the database file in. - - :param str report_folder: - Path to directory with SQLite databases and into which java will - write. - """ - self._report_folder = report_folder - def set_placements(self, used_placements): """ Passes in the placements leaving this class to decide pass it to Java. @@ -356,13 +345,16 @@ def get_all_data(self): if self._gatherer_iptags is None: result = self._run_java( 'download', self._placement_json, self._machine_json(), - self._report_folder) + BufferDatabase.default_database_file(), + FecDataView.get_run_dir_path()) else: result = self._run_java( 'gather', self._placement_json, self._machine_json(), - self._report_folder) + BufferDatabase.default_database_file(), + FecDataView.get_run_dir_path()) if result != 0: - log_file = os.path.join(self._report_folder, "jspin.log") + log_file = os.path.join( + FecDataView.get_run_dir_path(), "jspin.log") raise PacmanExternalAlgorithmFailedToCompleteException( "Java call exited with value " + str(result) + " see " + str(log_file) + " for logged info") @@ -374,9 +366,10 @@ def execute_data_specification(self): On failure of the Java code. """ result = self._run_java( - 'dse', self._machine_json(), self._report_folder) + 'dse', self._machine_json(), FecDataView.get_run_dir_path()) if result != 0: - log_file = os.path.join(self._report_folder, "jspin.log") + log_file = os.path.join( + FecDataView.get_run_dir_path(), "jspin.log") raise PacmanExternalAlgorithmFailedToCompleteException( "Java call exited with value " + str(result) + " see " + str(log_file) + " for logged info") @@ -389,9 +382,10 @@ def execute_system_data_specification(self): On failure of the Java code. """ result = self._run_java( - 'dse_sys', self._machine_json(), self._report_folder) + 'dse_sys', self._machine_json(), FecDataView.get_run_dir_path()) if result != 0: - log_file = os.path.join(self._report_folder, "jspin.log") + log_file = os.path.join( + FecDataView.get_run_dir_path(), "jspin.log") raise PacmanExternalAlgorithmFailedToCompleteException( "Java call exited with value " + str(result) + " see " + str(log_file) + " for logged info") @@ -411,12 +405,14 @@ def execute_app_data_specification(self, use_monitors): if use_monitors: result = self._run_java( 'dse_app_mon', self._placement_json, self._machine_json(), - self._report_folder, self._report_folder) + FecDataView.get_run_dir_path(), FecDataView.get_run_dir_path()) else: result = self._run_java( - 'dse_app', self._machine_json(), self._report_folder) + 'dse_app', self._machine_json(), + FecDataView.get_run_dir_path()) if result != 0: - log_file = os.path.join(self._report_folder, "jspin.log") + log_file = os.path.join( + FecDataView.get_run_dir_path(), "jspin.log") raise PacmanExternalAlgorithmFailedToCompleteException( "Java call exited with value " + str(result) + " see " + str(log_file) + " for logged info") From d44e9884a031e212edb75d764dd3037d812b9d66 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Mon, 24 Oct 2022 16:31:39 +0100 Subject: [PATCH 18/49] file name to match class name --- .../storage_objects/{sqllite_database.py => buffer_database.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename spinn_front_end_common/interface/buffer_management/storage_objects/{sqllite_database.py => buffer_database.py} (100%) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py similarity index 100% rename from spinn_front_end_common/interface/buffer_management/storage_objects/sqllite_database.py rename to spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py From d59aa88af0e461a61a361836b58bf55ea4af23e2 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Mon, 24 Oct 2022 16:38:18 +0100 Subject: [PATCH 19/49] Add BaseDatabase --- .../storage_objects/base_database.py | 95 +++++++++++++++++++ .../storage_objects/buffer_database.py | 51 +--------- 2 files changed, 97 insertions(+), 49 deletions(-) create mode 100644 spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py new file mode 100644 index 0000000000..d1f7560fe2 --- /dev/null +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py @@ -0,0 +1,95 @@ +# Copyright (c) 2017-2019 The University of Manchester +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +import os +import sqlite3 +import time +from spinn_utilities.abstract_context_manager import AbstractContextManager +from spinn_front_end_common.data import FecDataView +from spinn_front_end_common.utilities.sqlite_db import SQLiteDB + +_DDL_FILE = os.path.join(os.path.dirname(__file__), "db.sql") +_SECONDS_TO_MICRO_SECONDS_CONVERSION = 1000 +#: Name of the database in the data folder + + +def _timestamp(): + return int(time.time() * _SECONDS_TO_MICRO_SECONDS_CONVERSION) + + +class BaseDatabase(SQLiteDB, AbstractContextManager): + """ Specific implementation of the Database for SQLite 3. + + There should only ever be a single Database Object in use at any time. + In the case of application_graph_changed the first should closed and + a new one created. + + If 2 database objects where opened with the database_file they hold the + same data. Unless someone else deletes that file. + + + .. note:: + *Not thread safe on the same database file!* + Threads can access different DBs just fine. + """ + + __slots__ = [] + + def __init__(self, database_file=None, read_only=False): + """ + :param str database_file: + The name of a file that contains (or will contain) an SQLite + database holding the data. + If omitted the default location will be used. + """ + if database_file is None: + database_file = self.default_database_file() + + if read_only: + super().__init__(database_file, read_only=False) + else: + super().__init__( + database_file, read_only=False, ddl_file=_DDL_FILE) + + @classmethod + def default_database_file(cls): + if FecDataView.get_reset_number(): + return os.path.join( + FecDataView.get_run_dir_path(), + f"buffer{FecDataView.get_reset_number()}.sqlite3") + return os.path.join( + FecDataView.get_run_dir_path(), "buffer.sqlite3") + + @staticmethod + def __get_core_id(cursor, x, y, p): + """ + :param ~sqlite3.Cursor cursor: + :param int x: + :param int y: + :param int p: + :rtype: int + """ + for row in cursor.execute( + """ + SELECT core_id FROM region_view + WHERE x = ? AND y = ? AND processor = ? + LIMIT 1 + """, (x, y, p)): + return row["core_id"] + cursor.execute( + """ + INSERT INTO core(x, y, processor) VALUES(?, ?, ?) + """, (x, y, p)) + return cursor.lastrowid diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index f33ea712ab..e74f340b3f 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -16,9 +16,8 @@ import os import sqlite3 import time -from spinn_utilities.abstract_context_manager import AbstractContextManager from spinn_front_end_common.data import FecDataView -from spinn_front_end_common.utilities.sqlite_db import SQLiteDB +from .base_database import BaseDatabase _DDL_FILE = os.path.join(os.path.dirname(__file__), "db.sql") _SECONDS_TO_MICRO_SECONDS_CONVERSION = 1000 @@ -29,7 +28,7 @@ def _timestamp(): return int(time.time() * _SECONDS_TO_MICRO_SECONDS_CONVERSION) -class BufferDatabase(SQLiteDB, AbstractContextManager): +class BufferDatabase(BaseDatabase): """ Specific implementation of the Database for SQLite 3. There should only ever be a single Database Object in use at any time. @@ -47,30 +46,6 @@ class BufferDatabase(SQLiteDB, AbstractContextManager): __slots__ = [] - def __init__(self, database_file=None, read_only=False): - """ - :param str database_file: - The name of a file that contains (or will contain) an SQLite - database holding the data. - If omitted the default location will be used. - """ - if database_file is None: - database_file = self.default_database_file() - - if read_only: - super().__init__(database_file, read_only=False) - else: - super().__init__( - database_file, read_only=False, ddl_file=_DDL_FILE) - - @classmethod - def default_database_file(cls): - if FecDataView.get_reset_number(): - return os.path.join( - FecDataView.get_run_dir_path(), - f"buffer{FecDataView.get_reset_number()}.sqlite3") - return os.path.join( - FecDataView.get_run_dir_path(), "buffer.sqlite3") def clear_region(self, x, y, p, region): """ Clears the data for a single region. @@ -155,28 +130,6 @@ def __read_contents(self, cursor, x, y, p, region): data = c_buffer return memoryview(data) - @staticmethod - def __get_core_id(cursor, x, y, p): - """ - :param ~sqlite3.Cursor cursor: - :param int x: - :param int y: - :param int p: - :rtype: int - """ - for row in cursor.execute( - """ - SELECT core_id FROM region_view - WHERE x = ? AND y = ? AND processor = ? - LIMIT 1 - """, (x, y, p)): - return row["core_id"] - cursor.execute( - """ - INSERT INTO core(x, y, processor) VALUES(?, ?, ?) - """, (x, y, p)) - return cursor.lastrowid - def __get_region_id(self, cursor, x, y, p, region): """ :param ~sqlite3.Cursor cursor: From af28dd6db0ab69b9f6979cbd02c67f27ed5b10ff Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 07:22:57 +0100 Subject: [PATCH 20/49] dont use read only as database may not have been created if no extraction --- .../interface/buffer_management/buffer_manager.py | 6 +++--- .../buffer_management/storage_objects/__init__.py | 2 +- .../buffer_management/storage_objects/base_database.py | 8 ++------ 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/spinn_front_end_common/interface/buffer_management/buffer_manager.py b/spinn_front_end_common/interface/buffer_management/buffer_manager.py index f4230d275e..d71903bf42 100644 --- a/spinn_front_end_common/interface/buffer_management/buffer_manager.py +++ b/spinn_front_end_common/interface/buffer_management/buffer_manager.py @@ -320,7 +320,7 @@ def clear_recorded_data(self, x, y, p, recording_region_id): :param int p: placement p coordinate :param int recording_region_id: the recording region ID """ - BufferDatabase(read_only=False).clear_region(x, y, p, recording_region_id) + BufferDatabase().clear_region(x, y, p, recording_region_id) def _create_message_to_send(self, size, vertex, region): """ Creates a single message to send with the given boundaries. @@ -573,7 +573,7 @@ def __python_get_data_for_placements(self, recording_placements): len(recording_placements), "Extracting buffers from the last run") - db = BufferDatabase(read_only=False) + db = BufferDatabase() for placement in progress.over(recording_placements): self._retreive_by_placement(db, placement) @@ -595,7 +595,7 @@ def get_data_by_placement(self, placement, recording_region_id): "so no data read".format(placement.vertex)) with self._thread_lock_buffer_out: # data flush has been completed - return appropriate data - return BufferDatabase(read_only=True).get_region_data( + return BufferDatabase().get_region_data( placement.x, placement.y, placement.p, recording_region_id) def _retreive_by_placement(self, db, placement): diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/__init__.py b/spinn_front_end_common/interface/buffer_management/storage_objects/__init__.py index 8fe96807f3..e3f4d8211a 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/__init__.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/__init__.py @@ -15,6 +15,6 @@ from .buffered_sending_region import BufferedSendingRegion from .buffers_sent_deque import BuffersSentDeque -from .sqllite_database import BufferDatabase +from .buffer_database import BufferDatabase __all__ = ["BufferedSendingRegion", "BuffersSentDeque", "BufferDatabase"] diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py index d1f7560fe2..4644d672ee 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py @@ -47,7 +47,7 @@ class BaseDatabase(SQLiteDB, AbstractContextManager): __slots__ = [] - def __init__(self, database_file=None, read_only=False): + def __init__(self, database_file=None): """ :param str database_file: The name of a file that contains (or will contain) an SQLite @@ -57,11 +57,7 @@ def __init__(self, database_file=None, read_only=False): if database_file is None: database_file = self.default_database_file() - if read_only: - super().__init__(database_file, read_only=False) - else: - super().__init__( - database_file, read_only=False, ddl_file=_DDL_FILE) + super().__init__(database_file, ddl_file=_DDL_FILE) @classmethod def default_database_file(cls): From f6131ac7d429b44268bb76858eeebedda59384ae Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 07:55:50 +0100 Subject: [PATCH 21/49] single underscore for inherited methods --- .../buffer_management/storage_objects/base_database.py | 2 +- .../buffer_management/storage_objects/buffer_database.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py index 4644d672ee..d87615c906 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py @@ -69,7 +69,7 @@ def default_database_file(cls): FecDataView.get_run_dir_path(), "buffer.sqlite3") @staticmethod - def __get_core_id(cursor, x, y, p): + def _get_core_id(cursor, x, y, p): """ :param ~sqlite3.Cursor cursor: :param int x: diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index e74f340b3f..9c0862f7f6 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -146,7 +146,7 @@ def __get_region_id(self, cursor, x, y, p, region): LIMIT 1 """, (x, y, p, region)): return row["region_id"] - core_id = self.__get_core_id(cursor, x, y, p) + core_id = self._get_core_id(cursor, x, y, p) cursor.execute( """ INSERT INTO region( From 039898845d98f6d934d8eae49236b6f235a3950f Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 08:02:26 +0100 Subject: [PATCH 22/49] with BufferDatabase() --- .../buffer_management/buffer_manager.py | 14 ++++++----- .../test_buffered_database.py | 24 +++++++++---------- 2 files changed, 20 insertions(+), 18 deletions(-) diff --git a/spinn_front_end_common/interface/buffer_management/buffer_manager.py b/spinn_front_end_common/interface/buffer_management/buffer_manager.py index d71903bf42..5ed0684508 100644 --- a/spinn_front_end_common/interface/buffer_management/buffer_manager.py +++ b/spinn_front_end_common/interface/buffer_management/buffer_manager.py @@ -320,7 +320,8 @@ def clear_recorded_data(self, x, y, p, recording_region_id): :param int p: placement p coordinate :param int recording_region_id: the recording region ID """ - BufferDatabase().clear_region(x, y, p, recording_region_id) + with BufferDatabase() as db: + db.clear_region(x, y, p, recording_region_id) def _create_message_to_send(self, size, vertex, region): """ Creates a single message to send with the given boundaries. @@ -573,9 +574,9 @@ def __python_get_data_for_placements(self, recording_placements): len(recording_placements), "Extracting buffers from the last run") - db = BufferDatabase() - for placement in progress.over(recording_placements): - self._retreive_by_placement(db, placement) + with BufferDatabase() as db: + for placement in progress.over(recording_placements): + self._retreive_by_placement(db, placement) def get_data_by_placement(self, placement, recording_region_id): """ Get the data container for all the data retrieved\ @@ -595,8 +596,9 @@ def get_data_by_placement(self, placement, recording_region_id): "so no data read".format(placement.vertex)) with self._thread_lock_buffer_out: # data flush has been completed - return appropriate data - return BufferDatabase().get_region_data( - placement.x, placement.y, placement.p, recording_region_id) + with BufferDatabase() as db: + return db.get_region_data( + placement.x, placement.y, placement.p, recording_region_id) def _retreive_by_placement(self, db, placement): """ Retrieve the data for a vertex; must be locked first. diff --git a/unittests/interface/buffer_management/test_buffered_database.py b/unittests/interface/buffer_management/test_buffered_database.py index d6e0cb0cac..e644f22201 100644 --- a/unittests/interface/buffer_management/test_buffered_database.py +++ b/unittests/interface/buffer_management/test_buffered_database.py @@ -29,19 +29,19 @@ def test_use_database(self): f = BufferDatabase.default_database_file() self.assertFalse(os.path.isfile(f), "no existing DB at first") - brd = BufferDatabase() - self.assertTrue(os.path.isfile(f), "DB now exists") + with BufferDatabase() as brd: + self.assertTrue(os.path.isfile(f), "DB now exists") - # TODO missing - # data, missing = brd.get_region_data(0, 0, 0, 0) - # self.assertTrue(missing, "data should be 'missing'") - # self.assertEqual(data, b"") + # TODO missing + # data, missing = brd.get_region_data(0, 0, 0, 0) + # self.assertTrue(missing, "data should be 'missing'") + # self.assertEqual(data, b"") - brd.store_data_in_region_buffer(0, 0, 0, 0, False, b"abc") - brd.store_data_in_region_buffer(0, 0, 0, 0, False, b"def") - data, missing = brd.get_region_data(0, 0, 0, 0) + brd.store_data_in_region_buffer(0, 0, 0, 0, False, b"abc") + brd.store_data_in_region_buffer(0, 0, 0, 0, False, b"def") + data, missing = brd.get_region_data(0, 0, 0, 0) - self.assertFalse(missing, "data shouldn't be 'missing'") - self.assertEqual(bytes(data), b"abcdef") + self.assertFalse(missing, "data shouldn't be 'missing'") + self.assertEqual(bytes(data), b"abcdef") - self.assertTrue(os.path.isfile(f), "DB still exists") + self.assertTrue(os.path.isfile(f), "DB still exists") From a0f30437f5ca82dd91d7fc639f8b31fa1a81acad Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 10:16:52 +0100 Subject: [PATCH 23/49] with ProvenanceReader() as db --- .../compute_energy_used.py | 39 +++++---- .../interface_functions/spalloc_allocator.py | 3 +- .../interface/provenance/log_store_db.py | 3 +- .../interface/provenance/provenance_reader.py | 87 ++++++------------- .../bit_field_compressor_report.py | 37 ++++---- .../report_functions/energy_report.py | 10 +-- .../provenance/test_provenance_database.py | 80 ++++++++--------- unittests/utilities/test_fec_timer.py | 87 +++++++++---------- 8 files changed, 155 insertions(+), 191 deletions(-) diff --git a/spinn_front_end_common/interface/interface_functions/compute_energy_used.py b/spinn_front_end_common/interface/interface_functions/compute_energy_used.py index 70ad2aec28..c445a7cae6 100644 --- a/spinn_front_end_common/interface/interface_functions/compute_energy_used.py +++ b/spinn_front_end_common/interface/interface_functions/compute_energy_used.py @@ -71,14 +71,14 @@ def compute_energy_used(machine_allocation_controller=None): FecDataView.get_current_run_timesteps() * FecDataView.get_time_scale_factor()) machine = FecDataView.get_machine() - db = ProvenanceReader() - dsg_time = db.get_category_timer_sum(TimerCategory.DATA_GENERATION) - execute_time = db.get_category_timer_sum(TimerCategory.RUN_LOOP) - # NOTE: this extraction time is part of the execution time; it does not - # refer to the time taken in e.g. pop.get_data() or projection.get() - extraction_time = db.get_timer_sum_by_work(TimerWork.EXTRACT_DATA) - load_time = db.get_category_timer_sum(TimerCategory.LOADING) - mapping_time = db.get_category_timer_sum(TimerCategory.MAPPING) + with ProvenanceReader() as db: + dsg_time = db.get_category_timer_sum(TimerCategory.DATA_GENERATION) + execute_time = db.get_category_timer_sum(TimerCategory.RUN_LOOP) + # NOTE: this extraction time is part of the execution time; it does not + # refer to the time taken in e.g. pop.get_data() or projection.get() + extraction_time = db.get_timer_sum_by_work(TimerWork.EXTRACT_DATA) + load_time = db.get_category_timer_sum(TimerCategory.LOADING) + mapping_time = db.get_category_timer_sum(TimerCategory.MAPPING) # TODO get_machine not include here power_used = PowerUsed() @@ -187,13 +187,14 @@ def _router_packet_energy(power_used): :param PowerUsed power_used: """ energy_cost = 0.0 - for name, cost in _COST_PER_TYPE.items(): - data = ProvenanceReader().get_router_by_chip(name) - for (x, y, value) in data: - this_cost = value * cost - energy_cost += this_cost - if this_cost: - power_used.add_router_active_energy(x, y, this_cost) + with ProvenanceReader() as db: + for name, cost in _COST_PER_TYPE.items(): + data = db.get_router_by_chip(name) + for (x, y, value) in data: + this_cost = value * cost + energy_cost += this_cost + if this_cost: + power_used.add_router_active_energy(x, y, this_cost) power_used.packet_joules = energy_cost @@ -359,8 +360,8 @@ def _calculate_loading_energy(machine, load_time_ms, n_monitors, n_frames): # pylint: disable=too-many-arguments # find time in milliseconds - reader = ProvenanceReader() - total_time_ms = reader.get_timer_sum_by_category(TimerCategory.LOADING) + with ProvenanceReader() as db: + total_time_ms = db.get_timer_sum_by_category(TimerCategory.LOADING) # handle monitor core active cost @@ -405,8 +406,8 @@ def _calculate_data_extraction_energy(machine, n_monitors, n_frames): # find time # TODO is this what was desired total_time_ms = 0 - buffer_time_ms = ProvenanceReader().get_timer_sum_by_work( - TimerWork.EXTRACT_DATA) + with ProvenanceReader() as db: + buffer_time_ms = db.get_timer_sum_by_work(TimerWork.EXTRACT_DATA) energy_cost = 0 # NOTE: Buffer time could be None if nothing was set to record diff --git a/spinn_front_end_common/interface/interface_functions/spalloc_allocator.py b/spinn_front_end_common/interface/interface_functions/spalloc_allocator.py index 05b3c15e7b..32de40d6dc 100644 --- a/spinn_front_end_common/interface/interface_functions/spalloc_allocator.py +++ b/spinn_front_end_common/interface/interface_functions/spalloc_allocator.py @@ -166,7 +166,8 @@ def _launch_checked_job(n_boards, spalloc_kw_args): connections = job.connections info = str(connections).replace("{", "[").replace("}", "]") logger.info("boards: " + info) - ProvenanceWriter().insert_board_provenance(connections) + with ProvenanceWriter() as db: + db.insert_board_provenance(connections) if hostname in avoid_boards: avoid_jobs.append(job) logger.warning( diff --git a/spinn_front_end_common/interface/provenance/log_store_db.py b/spinn_front_end_common/interface/provenance/log_store_db.py index a4b1e55e79..80dd449813 100644 --- a/spinn_front_end_common/interface/provenance/log_store_db.py +++ b/spinn_front_end_common/interface/provenance/log_store_db.py @@ -37,7 +37,8 @@ def store_log(self, level, message, timestamp=None): @overrides(LogStore.retreive_log_messages) def retreive_log_messages(self, min_level=0): - return ProvenanceReader().retreive_log_messages(min_level) + with ProvenanceReader() as db: + return db.retreive_log_messages(min_level) @overrides(LogStore.get_location) def get_location(self): diff --git a/spinn_front_end_common/interface/provenance/provenance_reader.py b/spinn_front_end_common/interface/provenance/provenance_reader.py index cb12669d80..e5f3c9cd03 100644 --- a/spinn_front_end_common/interface/provenance/provenance_reader.py +++ b/spinn_front_end_common/interface/provenance/provenance_reader.py @@ -20,7 +20,7 @@ from spinn_front_end_common.utilities.sqlite_db import SQLiteDB -class ProvenanceReader(object): +class ProvenanceReader(SQLiteDB): """ Provides a connection to a database containing provenance for the current run and some convenience methods for extracting provenance data from it. @@ -70,46 +70,12 @@ def __init__(self, provenance_data_path=None): self._provenance_data_path = provenance_data_path else: self._provenance_data_path = self.get_last_run_database_path() - - def get_database_handle(self, read_only=True, use_sqlite_rows=False): - """ - Gets a handle to the open database. - - You *should* use this as a Python context handler. A typical usage - pattern is this:: - - with reader.get_database_handler() as db: - with db.transaction() as cursor: - for row in cursor.execute(...): - # process row - - .. note:: - This method is mainly provided as a support method for the later - methods that return specific data. For new IntergationTests - please add a specific method rather than call this directly. - - .. warning:: - It is the callers responsibility to close the database. - The recommended usage is therefore a ``with`` statement - - :param bool read_only: If true will return a readonly database - :param bool use_sqlite_rows: - If ``True`` the results of :py:meth:`run_query` will be - :py:class:`~sqlite3.Row`\\ s. - If ``False`` the results of :py:meth:`run_query` will be - :py:class:`tuple`\\ s. - :return: an open sqlite3 connection - :rtype: SQLiteDB - """ if not os.path.exists(self._provenance_data_path): raise Exception(f"no such DB: {self._provenance_data_path}") - db = SQLiteDB(self._provenance_data_path, read_only=read_only, - row_factory=(sqlite3.Row if use_sqlite_rows else None), - text_factory=None) - return db + SQLiteDB.__init__(self, self._provenance_data_path, read_only=True, + row_factory=None, text_factory=None) - def run_query( - self, query, params=(), read_only=True, use_sqlite_rows=False): + def run_query(self, query, params=()): """ Opens a connection to the database, runs a query, extracts the results and closes the connection @@ -139,12 +105,9 @@ def run_query( statement :rtype: list(tuple or ~sqlite3.Row) """ - if not os.path.exists(self._provenance_data_path): - raise Exception("no such DB: " + self._provenance_data_path) results = [] - with self.get_database_handle(read_only, use_sqlite_rows) as db: - with db.transaction() as cur: - for row in cur.execute(query, params): + with self.transaction() as cur: + for row in cur.execute(query, params): results.append(row) return results @@ -448,25 +411,25 @@ def demo(): See also unittests/interface/provenance/test_provenance_database.py """ # This uses the example file in the same directory as this script - pr = ProvenanceReader(os.path.join( - os.path.dirname(__file__), "provenance.sqlite3")) - print("DIRECT QUERY:") - query = """ - SELECT x, y, the_value - FROM router_provenance - WHERE description = 'Local_P2P_Packets' - """ - results = pr.run_query(query) - for row in results: - print(row) - print("\nCORES WITH LATE SPIKES:") - print(pr.cores_with_late_spikes()) - print("\nRUN TIME OF BUFFER EXTRACTOR:") - print(pr.get_run_time_of_BufferExtractor()) - print("\nROUETER (0,0) PROVENANCE:") - print(pr.get_provenance_for_router(0, 0)) - print("\nCORES WITH PROVENACE") - print(pr.get_cores_with_provenace()) + with ProvenanceReader(os.path.join( + os.path.dirname(__file__), "provenance.sqlite3")) as pr: + print("DIRECT QUERY:") + query = """ + SELECT x, y, the_value + FROM router_provenance + WHERE description = 'Local_P2P_Packets' + """ + results = pr.run_query(query) + for row in results: + print(row) + print("\nCORES WITH LATE SPIKES:") + print(pr.cores_with_late_spikes()) + print("\nRUN TIME OF BUFFER EXTRACTOR:") + print(pr.get_run_time_of_BufferExtractor()) + print("\nROUETER (0,0) PROVENANCE:") + print(pr.get_provenance_for_router(0, 0)) + print("\nCORES WITH PROVENACE") + print(pr.get_cores_with_provenace()) if __name__ == '__main__': diff --git a/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py b/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py index c5ea95ba8f..bd76079a60 100644 --- a/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py +++ b/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py @@ -84,24 +84,25 @@ def _merged_component(to_merge_per_chip, writer): to_merge_chips = set(to_merge_per_chip.keys()) found = False - for (x, y, merged) in ProvenanceReader().get_router_by_chip( - MERGED_NAME): - if (x, y) not in to_merge_per_chip: - continue - to_merge = to_merge_per_chip[x, y] - to_merge_chips.discard((x, y)) - found = True - writer.write( - "Chip {}:{} has {} bitfields out of {} merged into it." - " Which is {:.2%}\n".format( - x, y, merged, to_merge, merged / to_merge)) - total_bit_fields_merged += int(merged) - if merged > top_bit_field: - top_bit_field = merged - if merged < min_bit_field: - min_bit_field = merged - average_per_chip_merged += merged - n_chips += 1 + with ProvenanceReader() as db: + for (x, y, merged) in db.get_router_by_chip( + MERGED_NAME): + if (x, y) not in to_merge_per_chip: + continue + to_merge = to_merge_per_chip[x, y] + to_merge_chips.discard((x, y)) + found = True + writer.write( + "Chip {}:{} has {} bitfields out of {} merged into it." + " Which is {:.2%}\n".format( + x, y, merged, to_merge, merged / to_merge)) + total_bit_fields_merged += int(merged) + if merged > top_bit_field: + top_bit_field = merged + if merged < min_bit_field: + min_bit_field = merged + average_per_chip_merged += merged + n_chips += 1 if found: average_per_chip_merged = ( diff --git a/spinn_front_end_common/utilities/report_functions/energy_report.py b/spinn_front_end_common/utilities/report_functions/energy_report.py index 43af0ddb65..74b4dbcca5 100644 --- a/spinn_front_end_common/utilities/report_functions/energy_report.py +++ b/spinn_front_end_common/utilities/report_functions/energy_report.py @@ -309,8 +309,8 @@ def _write_load_time_cost(power_used, f): """ # find time in milliseconds - reader = ProvenanceReader() - total_time_ms = reader.get_timer_sum_by_category(TimerCategory.LOADING) + with ProvenanceReader() as db: + total_time_ms = db.get_timer_sum_by_category(TimerCategory.LOADING) # handle active routers etc active_router_cost = ( @@ -336,9 +336,9 @@ def _write_data_extraction_time_cost(power_used, f): """ # find time - reader = ProvenanceReader() - total_time_ms = reader.get_timer_sum_by_algorithm( - FecTimer.APPLICATION_RUNNER) + with ProvenanceReader() as db: + total_time_ms = db.get_timer_sum_by_algorithm( + FecTimer.APPLICATION_RUNNER) # handle active routers etc energy_cost_of_active_router = ( diff --git a/unittests/interface/provenance/test_provenance_database.py b/unittests/interface/provenance/test_provenance_database.py index b2e781cf12..c04702420d 100644 --- a/unittests/interface/provenance/test_provenance_database.py +++ b/unittests/interface/provenance/test_provenance_database.py @@ -47,7 +47,8 @@ def test_version(self): with ProvenanceWriter() as db: db.insert_version("spinn_utilities_version", "1!6.0.1") db.insert_version("numpy_version", "1.17.4") - data = ProvenanceReader().run_query("select * from version_provenance") + with ProvenanceReader() as db: + data = db.run_query("select * from version_provenance") versions = [ (1, 'spinn_utilities_version', '1!6.0.1'), (2, 'numpy_version', '1.17.4')] @@ -57,9 +58,10 @@ def test_power(self): with ProvenanceWriter() as db: db.insert_power("num_cores", 34) db.insert_power("total time (seconds)", 6.81) - data = ProvenanceReader().run_query("select * from power_provenance") - power = [(1, 'num_cores', 34.0), (2, 'total time (seconds)', 6.81)] - self.assertListEqual(data, power) + with ProvenanceReader() as db: + data = db.run_query("select * from power_provenance") + power = [(1, 'num_cores', 34.0), (2, 'total time (seconds)', 6.81)] + self.assertListEqual(data, power) def test_timings(self): with ProvenanceWriter() as db: @@ -80,19 +82,19 @@ def test_timings(self): db.insert_timing( execute_id, "clear", TimerWork.OTHER, timedelta(milliseconds=4), None) - reader = ProvenanceReader() - data = reader.get_timer_sum_by_category(TimerCategory.MAPPING) - self.assertEqual(12 + 123, data) - data = reader.get_timer_sum_by_category(TimerCategory.RUN_LOOP) - self.assertEqual(134 + 344 + 4, data) - data = reader.get_timer_sum_by_category(TimerCategory.SHUTTING_DOWN) - self.assertEquals(0, data) - data = reader.get_timer_sum_by_algorithm("router_report") - self.assertEqual(123, data) - data = reader.get_timer_sum_by_algorithm("clear") - self.assertEqual(4, data) - data = reader.get_timer_sum_by_algorithm("junk") - self.assertEqual(0, data) + with ProvenanceReader() as db: + data = db.get_timer_sum_by_category(TimerCategory.MAPPING) + self.assertEqual(12 + 123, data) + data = db.get_timer_sum_by_category(TimerCategory.RUN_LOOP) + self.assertEqual(134 + 344 + 4, data) + data = db.get_timer_sum_by_category(TimerCategory.SHUTTING_DOWN) + self.assertEquals(0, data) + data = db.get_timer_sum_by_algorithm("router_report") + self.assertEqual(123, data) + data = db.get_timer_sum_by_algorithm("clear") + self.assertEqual(4, data) + data = db.get_timer_sum_by_algorithm("junk") + self.assertEqual(0, data) def test_category_timings(self): with ProvenanceWriter() as db: @@ -108,8 +110,8 @@ def test_category_timings(self): id = db.insert_category(TimerCategory.RUN_LOOP, False) db.insert_category_timing(id, timedelta(milliseconds=344)) - reader = ProvenanceReader() - data = reader.get_category_timer_sum(TimerCategory.MAPPING) + with ProvenanceReader() as db: + data = db.get_category_timer_sum(TimerCategory.MAPPING) self.assertEqual(12 + 123, data) def test_other(self): @@ -122,8 +124,8 @@ def test_gatherer(self): 1, 3, 1715886360, 80, 1, "Extraction_time", 00.234) db.insert_gatherer( 1, 3, 1715886360, 80, 1, "Lost Packets", 12) - reader = ProvenanceReader() - data = reader.run_query("Select * from gatherer_provenance") + with ProvenanceReader() as db: + data = db.run_query("Select * from gatherer_provenance") expected = [(1, 1, 3, 1715886360, 80, 1, 'Extraction_time', 0.234), (2, 1, 3, 1715886360, 80, 1, 'Lost Packets', 12.0)] self.assertListEqual(expected, data) @@ -135,12 +137,12 @@ def test_router(self): db.insert_router(1, 3, "des2", 67) db.insert_router(1, 3, "des1", 48) db.insert_router(5, 5, "des1", 48, False) - reader = ProvenanceReader() - data = set(reader.get_router_by_chip("des1")) - chip_set = {(1, 3, 34), (1, 2, 45), (1, 3, 48), (5, 5, 48)} - self.assertSetEqual(data, chip_set) - data = reader.get_router_by_chip("junk") - self.assertEqual(0, len(data)) + with ProvenanceReader() as db: + data = set(db.get_router_by_chip("des1")) + chip_set = {(1, 3, 34), (1, 2, 45), (1, 3, 48), (5, 5, 48)} + self.assertSetEqual(data, chip_set) + data = db.get_router_by_chip("junk") + self.assertEqual(0, len(data)) def test_monitor(self): with ProvenanceWriter() as db: @@ -148,12 +150,12 @@ def test_monitor(self): db.insert_monitor(1, 2, "des1", 45) db.insert_monitor(1, 3, "des2", 67) db.insert_monitor(1, 3, "des1", 48) - reader = ProvenanceReader() - data = set(reader.get_monitor_by_chip("des1")) - chip_set = {(1, 3, 34), (1, 2, 45), (1, 3, 48)} - self.assertSetEqual(data, chip_set) - data = reader.get_monitor_by_chip("junk") - self.assertEqual(0, len(data)) + with ProvenanceReader() as db: + data = set(db.get_monitor_by_chip("des1")) + chip_set = {(1, 3, 34), (1, 2, 45), (1, 3, 48)} + self.assertSetEqual(data, chip_set) + data = db.get_monitor_by_chip("junk") + self.assertEqual(0, len(data)) def test_cores(self): with ProvenanceWriter() as db: @@ -168,8 +170,8 @@ def test_core_name(self): db.add_core_name(1, 3, 3, "second_core") db.add_core_name(1, 3, 2, "first_core") db.add_core_name(1, 3, 2, "new_name is ignored") - reader = ProvenanceReader() - data = reader.run_query("Select * from core_mapping") + with ProvenanceReader() as db: + data = db.run_query("Select * from core_mapping") self.assertEqual(2, len(data)) def test_messages(self): @@ -182,15 +184,15 @@ def test_messages(self): db.insert_report("vier") self.assertEqual(3, len(lc.records)) - reader = ProvenanceReader() - data = reader.messages() + with ProvenanceReader() as db: + data = db.messages() self.assertEqual(4, len(data)) def test_connector(self): with ProvenanceWriter() as db: db.insert_connector("the pre", "A post", "OneToOne", "foo", 12) - reader = ProvenanceReader() - data = reader.run_query("Select * from connector_provenance") + with ProvenanceReader() as db: + data = db.run_query("Select * from connector_provenance") expected = [(1, 'the pre', 'A post', 'OneToOne', 'foo', 12)] self.assertListEqual(expected, data) diff --git a/unittests/utilities/test_fec_timer.py b/unittests/utilities/test_fec_timer.py index 4874d1be2e..03b7d22f68 100644 --- a/unittests/utilities/test_fec_timer.py +++ b/unittests/utilities/test_fec_timer.py @@ -74,17 +74,15 @@ def test_nested(self): FecTimer.end_category(TimerCategory.GET_MACHINE) FecTimer.end_category(TimerCategory.MAPPING) FecTimer.end_category(TimerCategory.RUN_OTHER) - on, off = ProvenanceReader().get_category_timer_sums( - TimerCategory.RUN_OTHER) - total = ProvenanceReader().get_category_timer_sum( - TimerCategory.RUN_OTHER) - self.assertGreater(on, 0) - self.assertGreater(off, 0) - self.assertEqual(total, on + off) - on, off = ProvenanceReader().get_category_timer_sums( - TimerCategory.MAPPING) - self.assertGreater(on, 0) - self.assertGreater(off, 0) + with ProvenanceReader() as db: + on, off = db.get_category_timer_sums(TimerCategory.RUN_OTHER) + total = db.get_category_timer_sum(TimerCategory.RUN_OTHER) + self.assertGreater(on, 0) + self.assertGreater(off, 0) + self.assertEqual(total, on + off) + on, off = db.get_category_timer_sums(TimerCategory.MAPPING) + self.assertGreater(on, 0) + self.assertGreater(off, 0) def test_repeat_middle(self): FecTimer.start_category(TimerCategory.WAITING) @@ -107,13 +105,14 @@ def test_repeat_stopped(self): FecTimer.start_category(TimerCategory.WAITING) FecTimer.start_category(TimerCategory.SHUTTING_DOWN) FecTimer.start_category(TimerCategory.SHUTTING_DOWN) - total = ProvenanceReader().get_category_timer_sum( - TimerCategory.SHUTTING_DOWN) - self.assertEqual(total, 0) - FecTimer.stop_category_timing() - total = ProvenanceReader().get_category_timer_sum( - TimerCategory.SHUTTING_DOWN) - self.assertGreater(total, 0) + with ProvenanceReader() as db: + total = db.get_category_timer_sum( + TimerCategory.SHUTTING_DOWN) + self.assertEqual(total, 0) + FecTimer.stop_category_timing() + total = db.get_category_timer_sum( + TimerCategory.SHUTTING_DOWN) + self.assertGreater(total, 0) def test_repeat_mess(self): FecTimer.start_category(TimerCategory.WAITING) @@ -136,38 +135,34 @@ def test_mess(self): def test_stop_category_timing_clean(self): FecTimer.start_category(TimerCategory.WAITING) FecTimer.start_category(TimerCategory.RUN_OTHER) - before = ProvenanceReader().get_category_timer_sum( - TimerCategory.WAITING) - FecTimer.start_category(TimerCategory.MAPPING) - FecTimer.end_category(TimerCategory.MAPPING) - FecTimer.end_category(TimerCategory.RUN_OTHER) - FecTimer.stop_category_timing() - total = ProvenanceReader().get_category_timer_sum( - TimerCategory.WAITING) - self.assertGreater(total, before) - other = ProvenanceReader().get_category_timer_sum( - TimerCategory.RUN_OTHER) - self.assertGreater(other, 0) + with ProvenanceReader() as db: + before = db.get_category_timer_sum(TimerCategory.WAITING) + FecTimer.start_category(TimerCategory.MAPPING) + FecTimer.end_category(TimerCategory.MAPPING) + FecTimer.end_category(TimerCategory.RUN_OTHER) + FecTimer.stop_category_timing() + total = db.get_category_timer_sum(TimerCategory.WAITING) + self.assertGreater(total, before) + other = db.get_category_timer_sum(TimerCategory.RUN_OTHER) + self.assertGreater(other, 0) def test_stop_category_timing_messy(self): FecTimer.start_category(TimerCategory.WAITING) FecTimer.start_category(TimerCategory.RUN_OTHER) - before = ProvenanceReader().get_category_timer_sum( - TimerCategory.WAITING) - FecTimer.start_category(TimerCategory.MAPPING) - FecTimer.start_category(TimerCategory.SHUTTING_DOWN) - FecTimer.end_category(TimerCategory.SHUTTING_DOWN) - FecTimer.stop_category_timing() - mapping = ProvenanceReader().get_category_timer_sum( - TimerCategory.MAPPING) - self.assertGreater(mapping, 0) - total = ProvenanceReader().get_category_timer_sum( - TimerCategory.WAITING) - # As we never ended RUN_OTHER we never got back to WAITING - self.assertEqual(total, before) - other = ProvenanceReader().get_category_timer_sum( - TimerCategory.RUN_OTHER) - self.assertGreater(other, 0) + with ProvenanceReader() as db: + before = db.get_category_timer_sum(TimerCategory.WAITING) + FecTimer.start_category(TimerCategory.MAPPING) + FecTimer.start_category(TimerCategory.SHUTTING_DOWN) + FecTimer.end_category(TimerCategory.SHUTTING_DOWN) + FecTimer.stop_category_timing() + mapping = db.get_category_timer_sum(TimerCategory.MAPPING) + self.assertGreater(mapping, 0) + total = db.get_category_timer_sum(TimerCategory.WAITING) + # As we never ended RUN_OTHER we never got back to WAITING + self.assertEqual(total, before) + other = ProvenanceReader().get_category_timer_sum( + TimerCategory.RUN_OTHER) + self.assertGreater(other, 0) def test_stop_last_category_blocked(self): FecTimer.start_category(TimerCategory.WAITING) From 96ff3e743fb516e9d1660b792fe68064f798ba2e Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 10:27:10 +0100 Subject: [PATCH 24/49] flake8 --- .../interface/provenance/provenance_reader.py | 37 +++++++++---------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/spinn_front_end_common/interface/provenance/provenance_reader.py b/spinn_front_end_common/interface/provenance/provenance_reader.py index e5f3c9cd03..3ea58f4a10 100644 --- a/spinn_front_end_common/interface/provenance/provenance_reader.py +++ b/spinn_front_end_common/interface/provenance/provenance_reader.py @@ -14,7 +14,6 @@ # along with this program. If not, see . import os -import sqlite3 from spinn_front_end_common.data import FecDataView from spinn_front_end_common.utilities.constants import PROVENANCE_DB from spinn_front_end_common.utilities.sqlite_db import SQLiteDB @@ -108,7 +107,7 @@ def run_query(self, query, params=()): results = [] with self.transaction() as cur: for row in cur.execute(query, params): - results.append(row) + results.append(row) return results def cores_with_late_spikes(self): @@ -413,23 +412,23 @@ def demo(): # This uses the example file in the same directory as this script with ProvenanceReader(os.path.join( os.path.dirname(__file__), "provenance.sqlite3")) as pr: - print("DIRECT QUERY:") - query = """ - SELECT x, y, the_value - FROM router_provenance - WHERE description = 'Local_P2P_Packets' - """ - results = pr.run_query(query) - for row in results: - print(row) - print("\nCORES WITH LATE SPIKES:") - print(pr.cores_with_late_spikes()) - print("\nRUN TIME OF BUFFER EXTRACTOR:") - print(pr.get_run_time_of_BufferExtractor()) - print("\nROUETER (0,0) PROVENANCE:") - print(pr.get_provenance_for_router(0, 0)) - print("\nCORES WITH PROVENACE") - print(pr.get_cores_with_provenace()) + print("DIRECT QUERY:") + query = """ + SELECT x, y, the_value + FROM router_provenance + WHERE description = 'Local_P2P_Packets' + """ + results = pr.run_query(query) + for row in results: + print(row) + print("\nCORES WITH LATE SPIKES:") + print(pr.cores_with_late_spikes()) + print("\nRUN TIME OF BUFFER EXTRACTOR:") + print(pr.get_run_time_of_BufferExtractor()) + print("\nROUETER (0,0) PROVENANCE:") + print(pr.get_provenance_for_router(0, 0)) + print("\nCORES WITH PROVENACE") + print(pr.get_cores_with_provenace()) if __name__ == '__main__': From 58af9ed645094d4e3457ed9814a5615fc90a52fe Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 12:49:53 +0100 Subject: [PATCH 25/49] global provenance --- .../interface/abstract_spinnaker_base.py | 4 +- .../compute_energy_used.py | 8 +- .../interface/provenance/__init__.py | 2 + .../interface/provenance/fec_timer.py | 13 +- .../interface/provenance/global_provenance.py | 416 ++++++++++++++++++ .../interface/provenance/log_store_db.py | 8 +- .../interface/provenance/provenance_reader.py | 185 -------- .../interface/provenance/provenance_writer.py | 98 ----- .../report_functions/energy_report.py | 6 +- .../provenance/test_provenance_database.py | 21 +- unittests/utilities/test_fec_timer.py | 13 +- 11 files changed, 452 insertions(+), 322 deletions(-) create mode 100644 spinn_front_end_common/interface/provenance/global_provenance.py diff --git a/spinn_front_end_common/interface/abstract_spinnaker_base.py b/spinn_front_end_common/interface/abstract_spinnaker_base.py index f11fac6a12..a23c4a448d 100644 --- a/spinn_front_end_common/interface/abstract_spinnaker_base.py +++ b/spinn_front_end_common/interface/abstract_spinnaker_base.py @@ -98,7 +98,7 @@ host_no_bitfield_router_compression import ( ordered_covering_compression, pair_compression) from spinn_front_end_common.interface.provenance import ( - FecTimer, ProvenanceWriter, TimerCategory, TimerWork) + FecTimer, GlobalProvenance, TimerCategory, TimerWork) from spinn_front_end_common.interface.splitter_selectors import ( splitter_selector) from spinn_front_end_common.interface.java_caller import JavaCaller @@ -721,7 +721,7 @@ def _get_machine(self): def _create_version_provenance(self): """ Add the version information to the provenance data at the start. """ - with ProvenanceWriter() as db: + with GlobalProvenance() as db: db.insert_version("spinn_utilities_version", spinn_utils_version) db.insert_version("spinn_machine_version", spinn_machine_version) db.insert_version("spalloc_version", spalloc_version) diff --git a/spinn_front_end_common/interface/interface_functions/compute_energy_used.py b/spinn_front_end_common/interface/interface_functions/compute_energy_used.py index c445a7cae6..15456051d0 100644 --- a/spinn_front_end_common/interface/interface_functions/compute_energy_used.py +++ b/spinn_front_end_common/interface/interface_functions/compute_energy_used.py @@ -17,7 +17,7 @@ from spinn_utilities.config_holder import (get_config_int, get_config_str) from spinn_front_end_common.data import FecDataView from spinn_front_end_common.interface.provenance import ( - ProvenanceReader, TimerCategory, TimerWork) + GlobalProvenance, ProvenanceReader, TimerCategory, TimerWork) from spinn_front_end_common.utilities.utility_objs import PowerUsed from spinn_front_end_common.utility_models import ( ChipPowerMonitorMachineVertex) @@ -71,7 +71,7 @@ def compute_energy_used(machine_allocation_controller=None): FecDataView.get_current_run_timesteps() * FecDataView.get_time_scale_factor()) machine = FecDataView.get_machine() - with ProvenanceReader() as db: + with GlobalProvenance() as db: dsg_time = db.get_category_timer_sum(TimerCategory.DATA_GENERATION) execute_time = db.get_category_timer_sum(TimerCategory.RUN_LOOP) # NOTE: this extraction time is part of the execution time; it does not @@ -360,7 +360,7 @@ def _calculate_loading_energy(machine, load_time_ms, n_monitors, n_frames): # pylint: disable=too-many-arguments # find time in milliseconds - with ProvenanceReader() as db: + with GlobalProvenance() as db: total_time_ms = db.get_timer_sum_by_category(TimerCategory.LOADING) # handle monitor core active cost @@ -406,7 +406,7 @@ def _calculate_data_extraction_energy(machine, n_monitors, n_frames): # find time # TODO is this what was desired total_time_ms = 0 - with ProvenanceReader() as db: + with GlobalProvenance() as db: buffer_time_ms = db.get_timer_sum_by_work(TimerWork.EXTRACT_DATA) energy_cost = 0 diff --git a/spinn_front_end_common/interface/provenance/__init__.py b/spinn_front_end_common/interface/provenance/__init__.py index 7139b535a6..96a307f0ec 100644 --- a/spinn_front_end_common/interface/provenance/__init__.py +++ b/spinn_front_end_common/interface/provenance/__init__.py @@ -18,6 +18,7 @@ from .abstract_provides_provenance_data_from_machine import ( AbstractProvidesProvenanceDataFromMachine) from .fec_timer import FecTimer +from .global_provenance import GlobalProvenance from .log_store_db import LogStoreDB from .provenance_reader import ProvenanceReader from .provides_provenance_data_from_machine_impl import ( @@ -27,6 +28,7 @@ from .timer_work import TimerWork __all__ = ["AbstractProvidesLocalProvenanceData", "FecTimer", + "GlobalProvenance", "AbstractProvidesProvenanceDataFromMachine", "LogStoreDB", "ProvenanceReader", "ProvenanceWriter", "ProvidesProvenanceDataFromMachineImpl", diff --git a/spinn_front_end_common/interface/provenance/fec_timer.py b/spinn_front_end_common/interface/provenance/fec_timer.py index 7056bc4095..3c20ce08ab 100644 --- a/spinn_front_end_common/interface/provenance/fec_timer.py +++ b/spinn_front_end_common/interface/provenance/fec_timer.py @@ -20,8 +20,7 @@ from spinn_utilities.config_holder import (get_config_bool) from spinn_utilities.log import FormatAdapter from spinn_front_end_common.data import FecDataView -from spinn_front_end_common.interface.provenance.provenance_writer import ( - ProvenanceWriter) +from .global_provenance import GlobalProvenance logger = FormatAdapter(logging.getLogger(__name__)) @@ -108,7 +107,7 @@ def _report(self, message): def skip(self, reason): message = f"{self._algorithm} skipped as {reason}" timedelta = self._stop_timer() - with ProvenanceWriter() as db: + with GlobalProvenance() as db: db.insert_timing(self._category_id, self._algorithm, self._work, timedelta, reason) self._report(message) @@ -157,7 +156,7 @@ def skip_if_cfgs_false(self, section, option1, option2): def error(self, reason): timedelta = self._stop_timer() message = f"{self._algorithm} failed after {timedelta} as {reason}" - with ProvenanceWriter() as db: + with GlobalProvenance() as db: db.insert_timing(self._category_id, self._algorithm, self._work, timedelta, reason) self._report(message) @@ -190,7 +189,7 @@ def __exit__(self, exc_type, exc_value, traceback): f"after {timedelta}" skip = f"Exception {ex}" - with ProvenanceWriter() as db: + with GlobalProvenance() as db: db.insert_timing(self._category_id, self._algorithm, self._work, timedelta, skip) self._report(message) @@ -205,7 +204,7 @@ def __stop_category(cls): """ time_now = _now() if cls._category_id: - with ProvenanceWriter() as db: + with GlobalProvenance() as db: diff = _convert_to_timedelta(time_now - cls._category_time) db.insert_category_timing(cls._category_id, diff) return time_now @@ -218,7 +217,7 @@ def _change_category(cls, category): :param TimerCategory category: Category to switch to """ time_now = cls.__stop_category() - with ProvenanceWriter() as db: + with GlobalProvenance() as db: cls._category_id = db.insert_category(category, cls._machine_on) cls._category = category cls._category_time = time_now diff --git a/spinn_front_end_common/interface/provenance/global_provenance.py b/spinn_front_end_common/interface/provenance/global_provenance.py new file mode 100644 index 0000000000..537b5b38af --- /dev/null +++ b/spinn_front_end_common/interface/provenance/global_provenance.py @@ -0,0 +1,416 @@ +# Copyright (c) 2017-2022 The University of Manchester +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +from datetime import datetime +import logging +import os +import re +from spinn_utilities.config_holder import get_config_int +from spinn_utilities.log import FormatAdapter +from spinn_front_end_common.data import FecDataView +from spinn_front_end_common.utilities.constants import ( + MICRO_TO_MILLISECOND_CONVERSION, PROVENANCE_DB) +from spinn_front_end_common.utilities.sqlite_db import SQLiteDB + +logger = FormatAdapter(logging.getLogger(__name__)) + +_DDL_FILE = os.path.join(os.path.dirname(__file__), "db.sql") +_RE = re.compile(r"(\d+)([_,:])(\d+)(?:\2(\d+))?") + + +class GlobalProvenance(SQLiteDB): + """ Specific implementation of the Database for SQLite 3. + + .. note:: + *Not thread safe on the same database file.* + Threads can access different DBs just fine. + + .. note:: + This totally relies on the way SQLite's type affinities function. + You can't port to a different database engine without a lot of work. + """ + + __slots__ = [ + "_database_file" + ] + + @classmethod + def get_global_provenace_path(cls): + """ Get the path of the current provenance database of the last run + + .. warning:: + Calling this method between start/reset and run may result in a + path to a database not yet created. + + :raises ValueError: + if the system is in a state where path can't be retrieved, + for example before run is called + """ + return os.path.join( + FecDataView.get_provenance_dir_path(), + "global_provenance.sqlite3") + + def __init__(self, database_file=None, memory=False): + """ + :param database_file: + The name of a file that contains (or will contain) an SQLite + database holding the data. + If omitted, either the default file path or an unshared in-memory + database will be used (suitable only for testing). + :type database_file: str or None + :param bool memory: + Flag to say unshared in-memory can be used. + Otherwise a None file will mean the default should be used + + """ + if database_file is None and not memory: + database_file = self.get_global_provenace_path() + self._database_file = database_file + SQLiteDB.__init__(self, database_file, ddl_file=_DDL_FILE, + row_factory=None, text_factory=None) + + def insert_version(self, description, the_value): + """ + Inserts data into the version_provenance table + + :param str description: The package for which the version applies + :param str the_value: The version to be recorded + """ + with self.transaction() as cur: + cur.execute( + """ + INSERT INTO version_provenance( + description, the_value) + VALUES(?, ?) + """, [description, the_value]) + + def insert_category(self, category, machine_on): + """ + Inserts category into the category_timer_provenance returning id + + :param TimerCategory category: Name of Category starting + :param bool machine_on: If the machine was done during all + or some of the time + """ + with self.transaction() as cur: + cur.execute( + """ + INSERT INTO category_timer_provenance( + category, machine_on, n_run, n_loop) + VALUES(?, ?, ?, ?) + """, + [category.category_name, machine_on, + FecDataView.get_run_number(), + FecDataView.get_run_step()]) + return cur.lastrowid + + def insert_category_timing(self, category_id, timedelta): + """ + Inserts run time into the category + + :param int category_id: id of the Category finished + :param ~datetime.timedelta timedelta: Time to be recorded + """ + time_taken = ( + (timedelta.seconds * MICRO_TO_MILLISECOND_CONVERSION) + + (timedelta.microseconds / MICRO_TO_MILLISECOND_CONVERSION)) + + with self.transaction() as cur: + cur.execute( + """ + UPDATE category_timer_provenance + SET + time_taken = ? + WHERE category_id = ? + """, (time_taken, category_id)) + + def insert_timing( + self, category, algorithm, work, timedelta, skip_reason): + """ + Inserts algorithms run times into the timer_provenance table + + :param int category: Category Id of the Algorithm + :param str algorithm: Algorithm name + :param TimerWork work: Type of work being done + :param ~datetime.timedelta timedelta: Time to be recorded + :param skip_reason: The reason the algorthm was skipped or None if + it was not skipped + :tpye skip_reason: str or None + """ + time_taken = ( + (timedelta.seconds * MICRO_TO_MILLISECOND_CONVERSION) + + (timedelta.microseconds / MICRO_TO_MILLISECOND_CONVERSION)) + with self.transaction() as cur: + cur.execute( + """ + INSERT INTO timer_provenance( + category_id, algorithm, work, time_taken, skip_reason) + VALUES(?, ?, ?, ?, ?) + """, + [category, algorithm, work.work_name, time_taken, skip_reason]) + + def store_log(self, level, message, timestamp=None): + """ + Stores log messages into the database + + :param int level: + :param str message: + """ + if timestamp is None: + timestamp = datetime.now() + with self.transaction() as cur: + cur.execute( + """ + INSERT INTO p_log_provenance( + timestamp, level, message) + VALUES(?, ?, ?) + """, + [timestamp, level, message]) + + def _test_log_locked(self, text): + """ + THIS IS A TESTING METHOD. + + This will lock the database and then try to do a log + """ + with self.transaction() as cur: + # lock the database + cur.execute( + """ + INSERT INTO reports(message) + VALUES(?) + """, [text]) + cur.lastrowid + # try logging and storing while locked. + logger.warning(text) + + def run_query(self, query, params=()): + """ + Opens a connection to the database, runs a query, extracts the results + and closes the connection + + The return type depends on the use_sqlite_rows param. + By default this method returns tuples (lookup by index) but the + advanced tuple type can be used instead, which supports lookup by name + used in the query (use ``AS name`` in the query to set). + + This method will not allow queries that change the database unless the + read_only flag is set to False. + + .. note:: + This method is mainly provided as a support method for the later + methods that return specific data. For new IntergationTests + please add a specific method rather than call this directly. + + :param str query: The SQL query to be run. May include ``?`` wildcards + :param ~collections.abc.Iterable(str or int) params: + The values to replace the ``?`` wildcards with. + The number and types must match what the query expects + :param bool read_only: see :py:meth:`get_database_handle` + :param bool use_sqlite_rows: see :py:meth:`get_database_handle` + :return: A list possibly empty of tuples/rows + (one for each row in the database) + where the number and type of the values corresponds to the where + statement + :rtype: list(tuple or ~sqlite3.Row) + """ + results = [] + with self.transaction() as cur: + for row in cur.execute(query, params): + results.append(row) + return results + + def get_timer_provenance(self, algorithm): + """ + Gets the timer provenance item(s) from the last run + + :param str algorithm: + The value to LIKE search for in the algorithm column. + Can be the full name, or have ``%`` and ``_`` wildcards. + :return: + A possibly multiline string with for each row which matches the + like a line ``algorithm: value`` + :rtype: str + """ + query = """ + SELECT algorithm, time_taken + FROM timer_provenance + WHERE algorithm LIKE ? + """ + return "\n".join( + f"{row[0]}: {row[1]}" + for row in self.run_query(query, [algorithm])) + + def get_run_times(self): + """ + Gets the algorithm running times from the last run. If an algorithm is + invoked multiple times in the run, its times are summed. + + :return: + A possibly multiline string with for each row which matches the + like a line ``description_name: time``. The times are in seconds. + :rtype: str + """ + # We know the database actually stores microseconds for durations + query = """ + SELECT description, SUM(time_taken) / 1000000.0 + FROM timer_provenance + GROUP BY description + ORDER BY the_value + """ + return "\n".join( + f"{row[0].replace('_', ' ')}: {row[1]} s" + for row in self.run_query(query)) + + def get_run_time_of_BufferExtractor(self): + """ + Gets the BufferExtractor provenance item(s) from the last run + + :return: + A possibly multiline string with for each row which matches the + like %BufferExtractor description_name: value + :rtype: str + """ + return self.get_timer_provenance("%BufferExtractor") + + def get_category_timer_sum(self, category): + """ + Get the total runtime for one category of algorithms + + :param TimerCategory category: + :return: total off all runtimes with this category + :rtype: int + """ + query = """ + SELECT sum(time_taken) + FROM category_timer_provenance + WHERE category = ? + """ + data = self.run_query(query, [category.category_name]) + try: + info = data[0][0] + if info is None: + return 0 + return info + except IndexError: + return 0 + + def get_category_timer_sums(self, category): + """ + Get the runtime for one category of algorithms + split machine on, machine off + + :param TimerCategory category: + :return: total on and off time of instances with this category + :rtype: int + """ + on = 0 + off = 0 + query = """ + SELECT sum(time_taken), machine_on + FROM category_timer_provenance + WHERE category = ? + GROUP BY machine_on + """ + try: + for data in self.run_query(query, [category.category_name]): + if data[1]: + on = data[0] + else: + off = data[0] + except IndexError: + pass + return on, off + + def get_timer_sum_by_category(self, category): + """ + Get the total runtime for one category of algorithms + + :param TimerCategory category: + :return: total off all runtimes with this category + :rtype: int + """ + query = """ + SELECT sum(time_taken) + FROM full_timer_view + WHERE category = ? + """ + data = self.run_query(query, [category.category_name]) + try: + info = data[0][0] + if info is None: + return 0 + return info + except IndexError: + return 0 + + def get_timer_sum_by_work(self, work): + """ + Get the total runtime for one work type of algorithms + + :param TimerWork work: + :return: total off all runtimes with this category + :rtype: int + """ + query = """ + SELECT sum(time_taken) + FROM full_timer_view + WHERE work = ? + """ + data = self.run_query(query, [work.work_name]) + try: + info = data[0][0] + if info is None: + return 0 + return info + except IndexError: + return 0 + + def get_timer_sum_by_algorithm(self, algorithm): + """ + Get the total runtime for one algorithm + + :param str algorithm: + :return: total off all runtimes with this algorithm + :rtype: int + """ + query = """ + SELECT sum(time_taken) + FROM timer_provenance + WHERE algorithm = ? + """ + data = self.run_query(query, [algorithm]) + try: + info = data[0][0] + if info is None: + return 0 + return info + except IndexError: + return 0 + + def retreive_log_messages(self, min_level=0): + """ + Retrieves all log messages at or above the min_level + + :param int min_level: + :rtype: list(tuple(int, str)) + """ + query = """ + SELECT message + FROM p_log_provenance + WHERE level >= ? + """ + messages = self.run_query(query, [min_level]) + return list(map(lambda x: x[0], messages)) diff --git a/spinn_front_end_common/interface/provenance/log_store_db.py b/spinn_front_end_common/interface/provenance/log_store_db.py index 80dd449813..91fea177fb 100644 --- a/spinn_front_end_common/interface/provenance/log_store_db.py +++ b/spinn_front_end_common/interface/provenance/log_store_db.py @@ -16,7 +16,7 @@ import sqlite3 from spinn_utilities.log_store import LogStore from spinn_utilities.overrides import overrides -from .provenance_writer import ProvenanceWriter +from .global_provenance import GlobalProvenance from .provenance_reader import ProvenanceReader @@ -25,7 +25,7 @@ class LogStoreDB(LogStore): @overrides(LogStore.store_log) def store_log(self, level, message, timestamp=None): try: - with ProvenanceWriter() as db: + with GlobalProvenance() as db: db.store_log(level, message, timestamp) except sqlite3.OperationalError as ex: if "database is locked" in ex.args: @@ -37,9 +37,9 @@ def store_log(self, level, message, timestamp=None): @overrides(LogStore.retreive_log_messages) def retreive_log_messages(self, min_level=0): - with ProvenanceReader() as db: + with GlobalProvenance() as db: return db.retreive_log_messages(min_level) @overrides(LogStore.get_location) def get_location(self): - return ProvenanceReader.get_last_run_database_path() + return GlobalProvenance.get_global_provenace_path() diff --git a/spinn_front_end_common/interface/provenance/provenance_reader.py b/spinn_front_end_common/interface/provenance/provenance_reader.py index 3ea58f4a10..e9a9338d72 100644 --- a/spinn_front_end_common/interface/provenance/provenance_reader.py +++ b/spinn_front_end_common/interface/provenance/provenance_reader.py @@ -128,59 +128,6 @@ def cores_with_late_spikes(self): """ return self.run_query(query) - def get_timer_provenance(self, algorithm): - """ - Gets the timer provenance item(s) from the last run - - :param str algorithm: - The value to LIKE search for in the algorithm column. - Can be the full name, or have ``%`` and ``_`` wildcards. - :return: - A possibly multiline string with for each row which matches the - like a line ``algorithm: value`` - :rtype: str - """ - query = """ - SELECT algorithm, time_taken - FROM timer_provenance - WHERE algorithm LIKE ? - """ - return "\n".join( - f"{row[0]}: {row[1]}" - for row in self.run_query(query, [algorithm])) - - def get_run_times(self): - """ - Gets the algorithm running times from the last run. If an algorithm is - invoked multiple times in the run, its times are summed. - - :return: - A possibly multiline string with for each row which matches the - like a line ``description_name: time``. The times are in seconds. - :rtype: str - """ - # We know the database actually stores microseconds for durations - query = """ - SELECT description, SUM(time_taken) / 1000000.0 - FROM timer_provenance - GROUP BY description - ORDER BY the_value - """ - return "\n".join( - f"{row[0].replace('_', ' ')}: {row[1]} s" - for row in self.run_query(query)) - - def get_run_time_of_BufferExtractor(self): - """ - Gets the BufferExtractor provenance item(s) from the last run - - :return: - A possibly multiline string with for each row which matches the - like %BufferExtractor description_name: value - :rtype: str - """ - return self.get_timer_provenance("%BufferExtractor") - def get_provenance_for_router(self, x, y): """ Gets the provenance item(s) from the last run relating to a chip @@ -260,121 +207,6 @@ def get_monitor_by_chip(self, description): except IndexError: return None - def get_category_timer_sum(self, category): - """ - Get the total runtime for one category of algorithms - - :param TimerCategory category: - :return: total off all runtimes with this category - :rtype: int - """ - query = """ - SELECT sum(time_taken) - FROM category_timer_provenance - WHERE category = ? - """ - data = self.run_query(query, [category.category_name]) - try: - info = data[0][0] - if info is None: - return 0 - return info - except IndexError: - return 0 - - def get_category_timer_sums(self, category): - """ - Get the runtime for one category of algorithms - split machine on, machine off - - :param TimerCategory category: - :return: total on and off time of instances with this category - :rtype: int - """ - on = 0 - off = 0 - query = """ - SELECT sum(time_taken), machine_on - FROM category_timer_provenance - WHERE category = ? - GROUP BY machine_on - """ - try: - for data in self.run_query(query, [category.category_name]): - if data[1]: - on = data[0] - else: - off = data[0] - except IndexError: - pass - return on, off - - def get_timer_sum_by_category(self, category): - """ - Get the total runtime for one category of algorithms - - :param TimerCategory category: - :return: total off all runtimes with this category - :rtype: int - """ - query = """ - SELECT sum(time_taken) - FROM full_timer_view - WHERE category = ? - """ - data = self.run_query(query, [category.category_name]) - try: - info = data[0][0] - if info is None: - return 0 - return info - except IndexError: - return 0 - - def get_timer_sum_by_work(self, work): - """ - Get the total runtime for one work type of algorithms - - :param TimerWork work: - :return: total off all runtimes with this category - :rtype: int - """ - query = """ - SELECT sum(time_taken) - FROM full_timer_view - WHERE work = ? - """ - data = self.run_query(query, [work.work_name]) - try: - info = data[0][0] - if info is None: - return 0 - return info - except IndexError: - return 0 - - def get_timer_sum_by_algorithm(self, algorithm): - """ - Get the total runtime for one algorithm - - :param str algorithm: - :return: total off all runtimes with this algorithm - :rtype: int - """ - query = """ - SELECT sum(time_taken) - FROM timer_provenance - WHERE algorithm = ? - """ - data = self.run_query(query, [algorithm]) - try: - info = data[0][0] - if info is None: - return 0 - return info - except IndexError: - return 0 - def messages(self): """ List all the provenance messages @@ -388,21 +220,6 @@ def messages(self): """ return self.run_query(query, []) - def retreive_log_messages(self, min_level=0): - """ - Retrieves all log messages at or above the min_level - - :param int min_level: - :rtype: list(tuple(int, str)) - """ - query = """ - SELECT message - FROM p_log_provenance - WHERE level >= ? - """ - messages = self.run_query(query, [min_level]) - return list(map(lambda x: x[0], messages)) - @staticmethod def demo(): """ A demonstration of how to use this class. @@ -423,8 +240,6 @@ def demo(): print(row) print("\nCORES WITH LATE SPIKES:") print(pr.cores_with_late_spikes()) - print("\nRUN TIME OF BUFFER EXTRACTOR:") - print(pr.get_run_time_of_BufferExtractor()) print("\nROUETER (0,0) PROVENANCE:") print(pr.get_provenance_for_router(0, 0)) print("\nCORES WITH PROVENACE") diff --git a/spinn_front_end_common/interface/provenance/provenance_writer.py b/spinn_front_end_common/interface/provenance/provenance_writer.py index a2796b60cc..eed0d60ea4 100644 --- a/spinn_front_end_common/interface/provenance/provenance_writer.py +++ b/spinn_front_end_common/interface/provenance/provenance_writer.py @@ -65,21 +65,6 @@ def __init__(self, database_file=None, memory=False): self._database_file = database_file SQLiteDB.__init__(self, database_file, ddl_file=_DDL_FILE) - def insert_version(self, description, the_value): - """ - Inserts data into the version_provenance table - - :param str description: The package for which the version applies - :param str the_value: The version to be recorded - """ - with self.transaction() as cur: - cur.execute( - """ - INSERT INTO version_provenance( - description, the_value) - VALUES(?, ?) - """, [description, the_value]) - def insert_power(self, description, the_value): """ Inserts a general power value into the power_provenane table @@ -95,71 +80,6 @@ def insert_power(self, description, the_value): VALUES(?, ?) """, [description, the_value]) - def insert_category(self, category, machine_on): - """ - Inserts category into the category_timer_provenance returning id - - :param TimerCategory category: Name of Category starting - :param bool machine_on: If the machine was done during all - or some of the time - """ - with self.transaction() as cur: - cur.execute( - """ - INSERT INTO category_timer_provenance( - category, machine_on, n_run, n_loop) - VALUES(?, ?, ?, ?) - """, - [category.category_name, machine_on, - FecDataView.get_run_number(), - FecDataView.get_run_step()]) - return cur.lastrowid - - def insert_category_timing(self, category_id, timedelta): - """ - Inserts run time into the category - - :param int category_id: id of the Category finished - :param ~datetime.timedelta timedelta: Time to be recorded - """ - time_taken = ( - (timedelta.seconds * MICRO_TO_MILLISECOND_CONVERSION) + - (timedelta.microseconds / MICRO_TO_MILLISECOND_CONVERSION)) - - with self.transaction() as cur: - cur.execute( - """ - UPDATE category_timer_provenance - SET - time_taken = ? - WHERE category_id = ? - """, (time_taken, category_id)) - - def insert_timing( - self, category, algorithm, work, timedelta, skip_reason): - """ - Inserts algorithms run times into the timer_provenance table - - :param int category: Category Id of the Algorithm - :param str algorithm: Algorithm name - :param TimerWork work: Type of work being done - :param ~datetime.timedelta timedelta: Time to be recorded - :param skip_reason: The reason the algorthm was skipped or None if - it was not skipped - :tpye skip_reason: str or None - """ - time_taken = ( - (timedelta.seconds * MICRO_TO_MILLISECOND_CONVERSION) + - (timedelta.microseconds / MICRO_TO_MILLISECOND_CONVERSION)) - with self.transaction() as cur: - cur.execute( - """ - INSERT INTO timer_provenance( - category_id, algorithm, work, time_taken, skip_reason) - VALUES(?, ?, ?, ?, ?) - """, - [category, algorithm, work.work_name, time_taken, skip_reason]) - def insert_other(self, category, description, the_value): """ Insert unforeseen provenance into the other_provenace_table @@ -338,24 +258,6 @@ def insert_board_provenance(self, connections): """, ((x, y, ipaddress) for ((x, y), ipaddress) in connections.items())) - def store_log(self, level, message, timestamp=None): - """ - Stores log messages into the database - - :param int level: - :param str message: - """ - if timestamp is None: - timestamp = datetime.now() - with self.transaction() as cur: - cur.execute( - """ - INSERT INTO p_log_provenance( - timestamp, level, message) - VALUES(?, ?, ?) - """, - [timestamp, level, message]) - def _test_log_locked(self, text): """ THIS IS A TESTING METHOD. diff --git a/spinn_front_end_common/utilities/report_functions/energy_report.py b/spinn_front_end_common/utilities/report_functions/energy_report.py index 74b4dbcca5..f42beedc3b 100644 --- a/spinn_front_end_common/utilities/report_functions/energy_report.py +++ b/spinn_front_end_common/utilities/report_functions/energy_report.py @@ -20,7 +20,7 @@ from spinn_utilities.log import FormatAdapter from spinn_front_end_common.data import FecDataView from spinn_front_end_common.interface.provenance import ( - FecTimer, ProvenanceReader, TimerCategory) + FecTimer, GlobalProvenance, TimerCategory) from spinn_front_end_common.utility_models import ChipPowerMonitorMachineVertex from spinn_front_end_common.utilities.exceptions import ConfigurationException from spinn_front_end_common.interface.interface_functions.compute_energy_used\ @@ -309,7 +309,7 @@ def _write_load_time_cost(power_used, f): """ # find time in milliseconds - with ProvenanceReader() as db: + with GlobalProvenance() as db: total_time_ms = db.get_timer_sum_by_category(TimerCategory.LOADING) # handle active routers etc @@ -336,7 +336,7 @@ def _write_data_extraction_time_cost(power_used, f): """ # find time - with ProvenanceReader() as db: + with GlobalProvenance() as db: total_time_ms = db.get_timer_sum_by_algorithm( FecTimer.APPLICATION_RUNNER) diff --git a/unittests/interface/provenance/test_provenance_database.py b/unittests/interface/provenance/test_provenance_database.py index c04702420d..928c84a3f7 100644 --- a/unittests/interface/provenance/test_provenance_database.py +++ b/unittests/interface/provenance/test_provenance_database.py @@ -22,7 +22,7 @@ from spinn_utilities.config_holder import set_config from spinn_front_end_common.interface.config_setup import unittest_setup from spinn_front_end_common.interface.provenance import ( - LogStoreDB, ProvenanceWriter, ProvenanceReader, TimerCategory, TimerWork) + LogStoreDB, GlobalProvenance, ProvenanceWriter, ProvenanceReader, TimerCategory, TimerWork) logger = FormatAdapter(logging.getLogger(__name__)) @@ -44,15 +44,14 @@ def as_set(self, items): return results def test_version(self): - with ProvenanceWriter() as db: + with GlobalProvenance() as db: db.insert_version("spinn_utilities_version", "1!6.0.1") db.insert_version("numpy_version", "1.17.4") - with ProvenanceReader() as db: data = db.run_query("select * from version_provenance") - versions = [ - (1, 'spinn_utilities_version', '1!6.0.1'), - (2, 'numpy_version', '1.17.4')] - self.assertListEqual(data, versions) + versions = [ + (1, 'spinn_utilities_version', '1!6.0.1'), + (2, 'numpy_version', '1.17.4')] + self.assertListEqual(data, versions) def test_power(self): with ProvenanceWriter() as db: @@ -64,7 +63,7 @@ def test_power(self): self.assertListEqual(data, power) def test_timings(self): - with ProvenanceWriter() as db: + with GlobalProvenance() as db: mapping_id = db.insert_category(TimerCategory.MAPPING, False) db.insert_timing( mapping_id, "compressor", TimerWork.OTHER, @@ -82,7 +81,6 @@ def test_timings(self): db.insert_timing( execute_id, "clear", TimerWork.OTHER, timedelta(milliseconds=4), None) - with ProvenanceReader() as db: data = db.get_timer_sum_by_category(TimerCategory.MAPPING) self.assertEqual(12 + 123, data) data = db.get_timer_sum_by_category(TimerCategory.RUN_LOOP) @@ -97,7 +95,7 @@ def test_timings(self): self.assertEqual(0, data) def test_category_timings(self): - with ProvenanceWriter() as db: + with GlobalProvenance() as db: id = db.insert_category(TimerCategory.MAPPING, False) db.insert_category_timing(id, timedelta(milliseconds=12)) @@ -110,7 +108,6 @@ def test_category_timings(self): id = db.insert_category(TimerCategory.RUN_LOOP, False) db.insert_category_timing(id, timedelta(milliseconds=344)) - with ProvenanceReader() as db: data = db.get_category_timer_sum(TimerCategory.MAPPING) self.assertEqual(12 + 123, data) @@ -219,7 +216,7 @@ def test_database_locked(self): ls = LogStoreDB() logger.set_log_store(ls) logger.warning("this works") - with ProvenanceWriter() as db: + with GlobalProvenance() as db: db._test_log_locked("locked") logger.warning("not locked") logger.warning("this wis fine") diff --git a/unittests/utilities/test_fec_timer.py b/unittests/utilities/test_fec_timer.py index 03b7d22f68..20a32eb320 100644 --- a/unittests/utilities/test_fec_timer.py +++ b/unittests/utilities/test_fec_timer.py @@ -17,7 +17,7 @@ import unittest from testfixtures import LogCapture from spinn_front_end_common.interface.provenance import ( - FecTimer, ProvenanceReader, TimerCategory, TimerWork) + FecTimer, GlobalProvenance, TimerCategory, TimerWork) from spinn_front_end_common.interface.config_setup import unittest_setup @@ -74,7 +74,7 @@ def test_nested(self): FecTimer.end_category(TimerCategory.GET_MACHINE) FecTimer.end_category(TimerCategory.MAPPING) FecTimer.end_category(TimerCategory.RUN_OTHER) - with ProvenanceReader() as db: + with GlobalProvenance() as db: on, off = db.get_category_timer_sums(TimerCategory.RUN_OTHER) total = db.get_category_timer_sum(TimerCategory.RUN_OTHER) self.assertGreater(on, 0) @@ -105,7 +105,7 @@ def test_repeat_stopped(self): FecTimer.start_category(TimerCategory.WAITING) FecTimer.start_category(TimerCategory.SHUTTING_DOWN) FecTimer.start_category(TimerCategory.SHUTTING_DOWN) - with ProvenanceReader() as db: + with GlobalProvenance() as db: total = db.get_category_timer_sum( TimerCategory.SHUTTING_DOWN) self.assertEqual(total, 0) @@ -135,7 +135,7 @@ def test_mess(self): def test_stop_category_timing_clean(self): FecTimer.start_category(TimerCategory.WAITING) FecTimer.start_category(TimerCategory.RUN_OTHER) - with ProvenanceReader() as db: + with GlobalProvenance() as db: before = db.get_category_timer_sum(TimerCategory.WAITING) FecTimer.start_category(TimerCategory.MAPPING) FecTimer.end_category(TimerCategory.MAPPING) @@ -149,7 +149,7 @@ def test_stop_category_timing_clean(self): def test_stop_category_timing_messy(self): FecTimer.start_category(TimerCategory.WAITING) FecTimer.start_category(TimerCategory.RUN_OTHER) - with ProvenanceReader() as db: + with GlobalProvenance() as db: before = db.get_category_timer_sum(TimerCategory.WAITING) FecTimer.start_category(TimerCategory.MAPPING) FecTimer.start_category(TimerCategory.SHUTTING_DOWN) @@ -160,8 +160,7 @@ def test_stop_category_timing_messy(self): total = db.get_category_timer_sum(TimerCategory.WAITING) # As we never ended RUN_OTHER we never got back to WAITING self.assertEqual(total, before) - other = ProvenanceReader().get_category_timer_sum( - TimerCategory.RUN_OTHER) + other = db.get_category_timer_sum(TimerCategory.RUN_OTHER) self.assertGreater(other, 0) def test_stop_last_category_blocked(self): From 13a2ba32d8560c74349360323f17b8043f23e8fc Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 13:13:28 +0100 Subject: [PATCH 26/49] NoProvenanceDatabaseException --- .../interface/provenance/provenance_reader.py | 5 ++++- spinn_front_end_common/utilities/exceptions.py | 5 +++++ .../bit_field_compressor_report.py | 14 ++++++++++---- 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/spinn_front_end_common/interface/provenance/provenance_reader.py b/spinn_front_end_common/interface/provenance/provenance_reader.py index e9a9338d72..5f423362da 100644 --- a/spinn_front_end_common/interface/provenance/provenance_reader.py +++ b/spinn_front_end_common/interface/provenance/provenance_reader.py @@ -16,6 +16,8 @@ import os from spinn_front_end_common.data import FecDataView from spinn_front_end_common.utilities.constants import PROVENANCE_DB +from spinn_front_end_common.utilities.exceptions import ( + NoProvenanceDatabaseException) from spinn_front_end_common.utilities.sqlite_db import SQLiteDB @@ -70,7 +72,8 @@ def __init__(self, provenance_data_path=None): else: self._provenance_data_path = self.get_last_run_database_path() if not os.path.exists(self._provenance_data_path): - raise Exception(f"no such DB: {self._provenance_data_path}") + raise NoProvenanceDatabaseException( + f"no such DB: {self._provenance_data_path}") SQLiteDB.__init__(self, self._provenance_data_path, read_only=True, row_factory=None, text_factory=None) diff --git a/spinn_front_end_common/utilities/exceptions.py b/spinn_front_end_common/utilities/exceptions.py index b7e5edf158..ad4b9af1e6 100644 --- a/spinn_front_end_common/utilities/exceptions.py +++ b/spinn_front_end_common/utilities/exceptions.py @@ -60,3 +60,8 @@ class BufferedRegionNotPresent(SpinnFrontEndException): class CantFindSDRAMToUseException(SpinnFrontEndException): """ Raised when malloc and sdram stealing cannot occur. """ + +class NoProvenanceDatabaseException(SpinnFrontEndException): + """ + Raised when the Provenance database has not yet been created. + """ \ No newline at end of file diff --git a/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py b/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py index bd76079a60..2b4ee03988 100644 --- a/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py +++ b/spinn_front_end_common/utilities/report_functions/bit_field_compressor_report.py @@ -20,10 +20,12 @@ from spinn_utilities.log import FormatAdapter from spinn_front_end_common.abstract_models import AbstractHasAssociatedBinary from spinn_front_end_common.data import FecDataView -from spinn_front_end_common.interface.provenance import ProvenanceWriter +from spinn_front_end_common.interface.provenance import \ + (ProvenanceReader, ProvenanceWriter) from .bit_field_summary import BitFieldSummary +from spinn_front_end_common.utilities.exceptions import ( + NoProvenanceDatabaseException) from spinn_front_end_common.utilities.utility_objs import ExecutableType -from spinn_front_end_common.interface.provenance import ProvenanceReader logger = FormatAdapter(logging.getLogger(__name__)) _FILE_NAME = "bit_field_compressed_summary.rpt" @@ -56,11 +58,15 @@ def bitfield_compressor_report(): file_name = os.path.join(FecDataView.get_run_dir_path(), _FILE_NAME) try: with open(file_name, "w", encoding="utf-8") as f: - return _write_report(f) + _write_report(f) except IOError: logger.exception("Generate_placement_reports: Can't open file" " {} for writing.", _FILE_NAME) - return None + return + except NoProvenanceDatabaseException: + logger.exception( + "No proveance found to write bitfield_compressor_report") + return def _merged_component(to_merge_per_chip, writer): From a07f1bb0900f1430c662c97af9fb24581dbc126d Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 14:00:48 +0100 Subject: [PATCH 27/49] flake8 --- .../interface/provenance/global_provenance.py | 5 ++- .../interface/provenance/log_store_db.py | 1 - .../interface/provenance/provenance_reader.py | 32 +++++++++---------- .../interface/provenance/provenance_writer.py | 4 +-- .../utilities/exceptions.py | 3 +- 5 files changed, 21 insertions(+), 24 deletions(-) diff --git a/spinn_front_end_common/interface/provenance/global_provenance.py b/spinn_front_end_common/interface/provenance/global_provenance.py index 537b5b38af..1f9d8aa9f2 100644 --- a/spinn_front_end_common/interface/provenance/global_provenance.py +++ b/spinn_front_end_common/interface/provenance/global_provenance.py @@ -17,11 +17,10 @@ import logging import os import re -from spinn_utilities.config_holder import get_config_int from spinn_utilities.log import FormatAdapter from spinn_front_end_common.data import FecDataView from spinn_front_end_common.utilities.constants import ( - MICRO_TO_MILLISECOND_CONVERSION, PROVENANCE_DB) + MICRO_TO_MILLISECOND_CONVERSION) from spinn_front_end_common.utilities.sqlite_db import SQLiteDB logger = FormatAdapter(logging.getLogger(__name__)) @@ -59,7 +58,7 @@ def get_global_provenace_path(cls): for example before run is called """ return os.path.join( - FecDataView.get_provenance_dir_path(), + FecDataView.get_provenance_dir_path(), "global_provenance.sqlite3") def __init__(self, database_file=None, memory=False): diff --git a/spinn_front_end_common/interface/provenance/log_store_db.py b/spinn_front_end_common/interface/provenance/log_store_db.py index 91fea177fb..8b330c0f1a 100644 --- a/spinn_front_end_common/interface/provenance/log_store_db.py +++ b/spinn_front_end_common/interface/provenance/log_store_db.py @@ -17,7 +17,6 @@ from spinn_utilities.log_store import LogStore from spinn_utilities.overrides import overrides from .global_provenance import GlobalProvenance -from .provenance_reader import ProvenanceReader class LogStoreDB(LogStore): diff --git a/spinn_front_end_common/interface/provenance/provenance_reader.py b/spinn_front_end_common/interface/provenance/provenance_reader.py index 5f423362da..95f7b6d672 100644 --- a/spinn_front_end_common/interface/provenance/provenance_reader.py +++ b/spinn_front_end_common/interface/provenance/provenance_reader.py @@ -231,22 +231,22 @@ def demo(): """ # This uses the example file in the same directory as this script with ProvenanceReader(os.path.join( - os.path.dirname(__file__), "provenance.sqlite3")) as pr: - print("DIRECT QUERY:") - query = """ - SELECT x, y, the_value - FROM router_provenance - WHERE description = 'Local_P2P_Packets' - """ - results = pr.run_query(query) - for row in results: - print(row) - print("\nCORES WITH LATE SPIKES:") - print(pr.cores_with_late_spikes()) - print("\nROUETER (0,0) PROVENANCE:") - print(pr.get_provenance_for_router(0, 0)) - print("\nCORES WITH PROVENACE") - print(pr.get_cores_with_provenace()) + os.path.dirname(__file__), "provenance.sqlite3")) as pr: + print("DIRECT QUERY:") + query = """ + SELECT x, y, the_value + FROM router_provenance + WHERE description = 'Local_P2P_Packets' + """ + results = pr.run_query(query) + for row in results: + print(row) + print("\nCORES WITH LATE SPIKES:") + print(pr.cores_with_late_spikes()) + print("\nROUETER (0,0) PROVENANCE:") + print(pr.get_provenance_for_router(0, 0)) + print("\nCORES WITH PROVENACE") + print(pr.get_cores_with_provenace()) if __name__ == '__main__': diff --git a/spinn_front_end_common/interface/provenance/provenance_writer.py b/spinn_front_end_common/interface/provenance/provenance_writer.py index eed0d60ea4..3fd22ba8f6 100644 --- a/spinn_front_end_common/interface/provenance/provenance_writer.py +++ b/spinn_front_end_common/interface/provenance/provenance_writer.py @@ -13,15 +13,13 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -from datetime import datetime import logging import os import re from spinn_utilities.config_holder import get_config_int from spinn_utilities.log import FormatAdapter from spinn_front_end_common.data import FecDataView -from spinn_front_end_common.utilities.constants import ( - MICRO_TO_MILLISECOND_CONVERSION, PROVENANCE_DB) +from spinn_front_end_common.utilities.constants import (PROVENANCE_DB) from spinn_front_end_common.utilities.sqlite_db import SQLiteDB logger = FormatAdapter(logging.getLogger(__name__)) diff --git a/spinn_front_end_common/utilities/exceptions.py b/spinn_front_end_common/utilities/exceptions.py index ad4b9af1e6..29b6ba4a42 100644 --- a/spinn_front_end_common/utilities/exceptions.py +++ b/spinn_front_end_common/utilities/exceptions.py @@ -61,7 +61,8 @@ class CantFindSDRAMToUseException(SpinnFrontEndException): """ Raised when malloc and sdram stealing cannot occur. """ + class NoProvenanceDatabaseException(SpinnFrontEndException): """ Raised when the Provenance database has not yet been created. - """ \ No newline at end of file + """ From c8c7582bfa82eba1e865d0951b7e2a653ae1178a Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 14:10:25 +0100 Subject: [PATCH 28/49] flake8 --- unittests/interface/provenance/test_provenance_database.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/unittests/interface/provenance/test_provenance_database.py b/unittests/interface/provenance/test_provenance_database.py index 928c84a3f7..dc7b1d4a14 100644 --- a/unittests/interface/provenance/test_provenance_database.py +++ b/unittests/interface/provenance/test_provenance_database.py @@ -22,7 +22,8 @@ from spinn_utilities.config_holder import set_config from spinn_front_end_common.interface.config_setup import unittest_setup from spinn_front_end_common.interface.provenance import ( - LogStoreDB, GlobalProvenance, ProvenanceWriter, ProvenanceReader, TimerCategory, TimerWork) + LogStoreDB, GlobalProvenance, ProvenanceWriter, ProvenanceReader, + TimerCategory, TimerWork) logger = FormatAdapter(logging.getLogger(__name__)) From 2df110daf87340dcf166378f4743937235544cbf Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 15:10:31 +0100 Subject: [PATCH 29/49] dot use row factory for consitency --- .../interface/provenance/provenance_reader.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/spinn_front_end_common/interface/provenance/provenance_reader.py b/spinn_front_end_common/interface/provenance/provenance_reader.py index 95f7b6d672..9368f9b988 100644 --- a/spinn_front_end_common/interface/provenance/provenance_reader.py +++ b/spinn_front_end_common/interface/provenance/provenance_reader.py @@ -154,9 +154,8 @@ def get_provenance_for_router(self, x, y): ORDER BY description """ return "\n".join( - f"{ row['description'] }: { row['value'] }" - for row in self.run_query(query, [int(x), int(y)], - use_sqlite_rows=True)) + f"{ row[0] }: { row[1] }" + for row in self.run_query(query, [int(x), int(y)])) def get_cores_with_provenace(self): """ From 4b8674db535eb176280f0879e5624dd5f1a8f8cd Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 15:30:17 +0100 Subject: [PATCH 30/49] write global provenance outside of the run directories --- .../interface/provenance/global_provenance.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spinn_front_end_common/interface/provenance/global_provenance.py b/spinn_front_end_common/interface/provenance/global_provenance.py index 1f9d8aa9f2..cb0d2d8ead 100644 --- a/spinn_front_end_common/interface/provenance/global_provenance.py +++ b/spinn_front_end_common/interface/provenance/global_provenance.py @@ -58,7 +58,7 @@ def get_global_provenace_path(cls): for example before run is called """ return os.path.join( - FecDataView.get_provenance_dir_path(), + FecDataView.get_timestamp_dir_path(), "global_provenance.sqlite3") def __init__(self, database_file=None, memory=False): From 2f5ed9155dc7124f4cdbb40186d159f0d92eee98 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 15:46:25 +0100 Subject: [PATCH 31/49] split DDL files --- .../interface/provenance/global.sql | 86 +++++++++++++++++++ .../interface/provenance/global_provenance.py | 2 +- .../provenance/{db.sql => local.sql} | 76 ---------------- .../interface/provenance/provenance_writer.py | 2 +- 4 files changed, 88 insertions(+), 78 deletions(-) create mode 100644 spinn_front_end_common/interface/provenance/global.sql rename spinn_front_end_common/interface/provenance/{db.sql => local.sql} (70%) diff --git a/spinn_front_end_common/interface/provenance/global.sql b/spinn_front_end_common/interface/provenance/global.sql new file mode 100644 index 0000000000..23d59703ef --- /dev/null +++ b/spinn_front_end_common/interface/provenance/global.sql @@ -0,0 +1,86 @@ +-- Copyright (c) 2018-2022 The University of Manchester +-- +-- This program is free software: you can redistribute it and/or modify +-- it under the terms of the GNU General Public License as published by +-- the Free Software Foundation, either version 3 of the License, or +-- (at your option) any later version. +-- +-- This program is distributed in the hope that it will be useful, +-- but WITHOUT ANY WARRANTY; without even the implied warranty of +-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +-- GNU General Public License for more details. +-- +-- You should have received a copy of the GNU General Public License +-- along with this program. If not, see . + +-- https://www.sqlite.org/pragma.html#pragma_synchronous +PRAGMA main.synchronous = OFF; + +-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +-- A table holding the values for versions +CREATE TABLE IF NOT EXISTS version_provenance( + version_id INTEGER PRIMARY KEY AUTOINCREMENT, + description STRING NOT NULL, + the_value STRING NOT NULL); + +-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +-- A table holding the values for algorithm timings +CREATE TABLE IF NOT EXISTS timer_provenance( + timer_id INTEGER PRIMARY KEY AUTOINCREMENT, + category_id INTEGER NOT NULL, + algorithm STRING NOT NULL, + work STRING NOT NULL, + time_taken INTEGER NOT NULL, + skip_reason STRING); + +CREATE VIEW IF NOT EXISTS full_timer_view AS + SELECT timer_id, category, algorithm, work, machine_on, timer_provenance.time_taken, n_run, n_loop + FROM timer_provenance ,category_timer_provenance + WHERE timer_provenance.category_id = category_timer_provenance.category_id + ORDER BY timer_id; + +CREATE VIEW IF NOT EXISTS timer_view AS + SELECT category, algorithm, work, machine_on, time_taken, n_run, n_loop + FROM full_timer_view + WHERE skip_reason is NULL + ORDER BY timer_id; + +-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +-- A table holding the values for category timings +CREATE TABLE IF NOT EXISTS category_timer_provenance( + category_id INTEGER PRIMARY KEY AUTOINCREMENT, + category STRING NOT NULL, + time_taken INTEGER, + machine_on BOOL NOT NULL, + n_run INTEGER NOT NULL, + n_loop INTEGER); + +--------------------------------------------------------------------- +-- A table to store log.info +CREATE TABLE IF NOT EXISTS p_log_provenance( + log_id INTEGER PRIMARY KEY AUTOINCREMENT, + timestamp TIMESTAMP NOT NULL, + level INTEGER NOT NULL, + message STRING NOT NULL); + +CREATE TABLE IF NOT EXISTS log_level_names( + level INTEGER PRIMARY KEY NOT NULL, + name STRING NOT NULL); + +INSERT OR IGNORE INTO log_level_names + (level, name) +VALUES + (50, "CRITICAL"), + (40, "ERROR"), + (30, "WARNING"), + (20, "INFO"), + (10, "DEBUG"); + +CREATE VIEW IF NOT EXISTS p_log_view AS + SELECT + timestamp, + name, + message + FROM p_log_provenance left join log_level_names + ON p_log_provenance.level = log_level_names.level + ORDER BY p_log_provenance.log_id; diff --git a/spinn_front_end_common/interface/provenance/global_provenance.py b/spinn_front_end_common/interface/provenance/global_provenance.py index cb0d2d8ead..f8b8b16493 100644 --- a/spinn_front_end_common/interface/provenance/global_provenance.py +++ b/spinn_front_end_common/interface/provenance/global_provenance.py @@ -25,7 +25,7 @@ logger = FormatAdapter(logging.getLogger(__name__)) -_DDL_FILE = os.path.join(os.path.dirname(__file__), "db.sql") +_DDL_FILE = os.path.join(os.path.dirname(__file__), "global.sql") _RE = re.compile(r"(\d+)([_,:])(\d+)(?:\2(\d+))?") diff --git a/spinn_front_end_common/interface/provenance/db.sql b/spinn_front_end_common/interface/provenance/local.sql similarity index 70% rename from spinn_front_end_common/interface/provenance/db.sql rename to spinn_front_end_common/interface/provenance/local.sql index 1a83071739..355653cb34 100644 --- a/spinn_front_end_common/interface/provenance/db.sql +++ b/spinn_front_end_common/interface/provenance/local.sql @@ -16,13 +16,6 @@ -- https://www.sqlite.org/pragma.html#pragma_synchronous PRAGMA main.synchronous = OFF; --- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - --- A table holding the values for versions -CREATE TABLE IF NOT EXISTS version_provenance( - version_id INTEGER PRIMARY KEY AUTOINCREMENT, - description STRING NOT NULL, - the_value STRING NOT NULL); - -- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -- A table holding the values for power provenance -- Except for engery used by cores or routers @@ -31,45 +24,6 @@ CREATE TABLE IF NOT EXISTS power_provenance( description STRING NOT NULL, the_value FLOAT NOT NULL); --- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - --- A table holding the values for algorithm timings -CREATE TABLE IF NOT EXISTS timer_provenance( - timer_id INTEGER PRIMARY KEY AUTOINCREMENT, - category_id INTEGER NOT NULL, - algorithm STRING NOT NULL, - work STRING NOT NULL, - time_taken INTEGER NOT NULL, - skip_reason STRING); - -CREATE VIEW IF NOT EXISTS full_timer_view AS - SELECT timer_id, category, algorithm, work, machine_on, timer_provenance.time_taken, n_run, n_loop - FROM timer_provenance ,category_timer_provenance - WHERE timer_provenance.category_id = category_timer_provenance.category_id - ORDER BY timer_id; - -CREATE VIEW IF NOT EXISTS timer_view AS - SELECT category, algorithm, work, machine_on, time_taken, n_run, n_loop - FROM full_timer_view - WHERE skip_reason is NULL - ORDER BY timer_id; - --- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - --- A table holding the values for category timings -CREATE TABLE IF NOT EXISTS category_timer_provenance( - category_id INTEGER PRIMARY KEY AUTOINCREMENT, - category STRING NOT NULL, - time_taken INTEGER, - machine_on BOOL NOT NULL, - n_run INTEGER NOT NULL, - n_loop INTEGER); - --- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - --- A table holding the values for uncategorised general provenance -CREATE TABLE IF NOT EXISTS other_provenance( - other_id INTEGER PRIMARY KEY AUTOINCREMENT, - category STRING NOT NULL, - description STRING NOT NULL, - the_value STRING NOT NULL); -- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -- A table holding the values for data speed up packet gathers @@ -224,33 +178,3 @@ CREATE TABLE IF NOT EXISTS boards_provenance( ip_addres STRING NOT NULL, ethernet_x INTEGER NOT NULL, ethernet_y INTEGER NOT NULL); - ---------------------------------------------------------------------- --- A table to store log.info -CREATE TABLE IF NOT EXISTS p_log_provenance( - log_id INTEGER PRIMARY KEY AUTOINCREMENT, - timestamp TIMESTAMP NOT NULL, - level INTEGER NOT NULL, - message STRING NOT NULL); - -CREATE TABLE IF NOT EXISTS log_level_names( - level INTEGER PRIMARY KEY NOT NULL, - name STRING NOT NULL); - -INSERT OR IGNORE INTO log_level_names - (level, name) -VALUES - (50, "CRITICAL"), - (40, "ERROR"), - (30, "WARNING"), - (20, "INFO"), - (10, "DEBUG"); - -CREATE VIEW IF NOT EXISTS p_log_view AS - SELECT - timestamp, - name, - message - FROM p_log_provenance left join log_level_names - ON p_log_provenance.level = log_level_names.level - ORDER BY p_log_provenance.log_id; diff --git a/spinn_front_end_common/interface/provenance/provenance_writer.py b/spinn_front_end_common/interface/provenance/provenance_writer.py index 3fd22ba8f6..6ddd7feecd 100644 --- a/spinn_front_end_common/interface/provenance/provenance_writer.py +++ b/spinn_front_end_common/interface/provenance/provenance_writer.py @@ -24,7 +24,7 @@ logger = FormatAdapter(logging.getLogger(__name__)) -_DDL_FILE = os.path.join(os.path.dirname(__file__), "db.sql") +_DDL_FILE = os.path.join(os.path.dirname(__file__), "local.sql") _RE = re.compile(r"(\d+)([_,:])(\d+)(?:\2(\d+))?") From 9d93884e438c88165b18e60277ce4e1d51618427 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 15:58:41 +0100 Subject: [PATCH 32/49] removed other as never used. --- .../interface/provenance/global_provenance.py | 7 ++++--- .../interface/provenance/provenance_writer.py | 19 ------------------- .../provenance/test_provenance_database.py | 4 ---- 3 files changed, 4 insertions(+), 26 deletions(-) diff --git a/spinn_front_end_common/interface/provenance/global_provenance.py b/spinn_front_end_common/interface/provenance/global_provenance.py index f8b8b16493..7c1f97af5b 100644 --- a/spinn_front_end_common/interface/provenance/global_provenance.py +++ b/spinn_front_end_common/interface/provenance/global_provenance.py @@ -188,9 +188,10 @@ def _test_log_locked(self, text): # lock the database cur.execute( """ - INSERT INTO reports(message) - VALUES(?) - """, [text]) + INSERT INTO version_provenance( + description, the_value) + VALUES("foo", "bar") + """) cur.lastrowid # try logging and storing while locked. logger.warning(text) diff --git a/spinn_front_end_common/interface/provenance/provenance_writer.py b/spinn_front_end_common/interface/provenance/provenance_writer.py index 6ddd7feecd..cc3ed8e45d 100644 --- a/spinn_front_end_common/interface/provenance/provenance_writer.py +++ b/spinn_front_end_common/interface/provenance/provenance_writer.py @@ -78,25 +78,6 @@ def insert_power(self, description, the_value): VALUES(?, ?) """, [description, the_value]) - def insert_other(self, category, description, the_value): - """ - Insert unforeseen provenance into the other_provenace_table - - This allows to add provenance that does not readily fit into any of - the other categerogies - - :param str category: grouping from this provenance - :param str description: Specific provenance being saved - :param ste the_value: Data - """ - with self.transaction() as cur: - cur.execute( - """ - INSERT INTO other_provenance( - category, description, the_value) - VALUES(?, ?, ?) - """, [category, description, the_value]) - def insert_gatherer(self, x, y, address, bytes_read, run, description, the_value): """ diff --git a/unittests/interface/provenance/test_provenance_database.py b/unittests/interface/provenance/test_provenance_database.py index dc7b1d4a14..3e9d100f63 100644 --- a/unittests/interface/provenance/test_provenance_database.py +++ b/unittests/interface/provenance/test_provenance_database.py @@ -112,10 +112,6 @@ def test_category_timings(self): data = db.get_category_timer_sum(TimerCategory.MAPPING) self.assertEqual(12 + 123, data) - def test_other(self): - with ProvenanceWriter() as db: - db.insert_other("foo", "bar", 12) - def test_gatherer(self): with ProvenanceWriter() as db: db.insert_gatherer( From 7a5b89a4607c3326b1171c3a2a70746853b00c27 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Tue, 25 Oct 2022 16:13:09 +0100 Subject: [PATCH 33/49] flake8 --- .../buffer_management/storage_objects/base_database.py | 1 - .../buffer_management/storage_objects/buffer_database.py | 2 -- 2 files changed, 3 deletions(-) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py index d87615c906..12d604289f 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py @@ -14,7 +14,6 @@ # along with this program. If not, see . import os -import sqlite3 import time from spinn_utilities.abstract_context_manager import AbstractContextManager from spinn_front_end_common.data import FecDataView diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index 9c0862f7f6..5a57af85f2 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -16,7 +16,6 @@ import os import sqlite3 import time -from spinn_front_end_common.data import FecDataView from .base_database import BaseDatabase _DDL_FILE = os.path.join(os.path.dirname(__file__), "db.sql") @@ -46,7 +45,6 @@ class BufferDatabase(BaseDatabase): __slots__ = [] - def clear_region(self, x, y, p, region): """ Clears the data for a single region. From 4e7950499332e94cf771ecf2cc9557052f92dec6 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 26 Oct 2022 07:43:12 +0100 Subject: [PATCH 34/49] skip_reason in view --- spinn_front_end_common/interface/provenance/global.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spinn_front_end_common/interface/provenance/global.sql b/spinn_front_end_common/interface/provenance/global.sql index 23d59703ef..63b1471df1 100644 --- a/spinn_front_end_common/interface/provenance/global.sql +++ b/spinn_front_end_common/interface/provenance/global.sql @@ -34,7 +34,7 @@ CREATE TABLE IF NOT EXISTS timer_provenance( skip_reason STRING); CREATE VIEW IF NOT EXISTS full_timer_view AS - SELECT timer_id, category, algorithm, work, machine_on, timer_provenance.time_taken, n_run, n_loop + SELECT timer_id, category, algorithm, work, machine_on, timer_provenance.time_taken, n_run, n_loop, skip_reason FROM timer_provenance ,category_timer_provenance WHERE timer_provenance.category_id = category_timer_provenance.category_id ORDER BY timer_id; From db5e8811b4323e406cc2ce273955020a1428c4d6 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 26 Oct 2022 09:54:32 +0100 Subject: [PATCH 35/49] combine buffer.sqlite and provenance.sqlite3 into data.sqlite3 --- .../storage_objects/buffer_database.py | 4 +- .../buffer_management/storage_objects/db.sql | 68 ------------------- .../interface/provenance/provenance_reader.py | 13 +--- .../interface/provenance/provenance_writer.py | 21 ++---- .../base_database.py | 22 +++--- .../provenance/local.sql => utilities/db.sql} | 53 ++++++++++++++- spinn_front_end_common/utilities/sqlite_db.py | 2 +- 7 files changed, 75 insertions(+), 108 deletions(-) delete mode 100644 spinn_front_end_common/interface/buffer_management/storage_objects/db.sql rename spinn_front_end_common/{interface/buffer_management/storage_objects => utilities}/base_database.py (80%) rename spinn_front_end_common/{interface/provenance/local.sql => utilities/db.sql} (74%) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index 5a57af85f2..c27f891e92 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -13,12 +13,10 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import os import sqlite3 import time -from .base_database import BaseDatabase +from spinn_front_end_common.utilities.base_database import BaseDatabase -_DDL_FILE = os.path.join(os.path.dirname(__file__), "db.sql") _SECONDS_TO_MICRO_SECONDS_CONVERSION = 1000 #: Name of the database in the data folder diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql b/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql deleted file mode 100644 index b75bd93cb6..0000000000 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/db.sql +++ /dev/null @@ -1,68 +0,0 @@ --- Copyright (c) 2018-2019 The University of Manchester --- --- This program is free software: you can redistribute it and/or modify --- it under the terms of the GNU General Public License as published by --- the Free Software Foundation, either version 3 of the License, or --- (at your option) any later version. --- --- This program is distributed in the hope that it will be useful, --- but WITHOUT ANY WARRANTY; without even the implied warranty of --- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --- GNU General Public License for more details. --- --- You should have received a copy of the GNU General Public License --- along with this program. If not, see . - --- https://www.sqlite.org/pragma.html#pragma_synchronous -PRAGMA main.synchronous = OFF; - --- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - --- A table describing the cores. -CREATE TABLE IF NOT EXISTS core( - core_id INTEGER PRIMARY KEY AUTOINCREMENT, - x INTEGER NOT NULL, - y INTEGER NOT NULL, - processor INTEGER NOT NULL); --- Every processor has a unique ID -CREATE UNIQUE INDEX IF NOT EXISTS coreSanity ON core( - x ASC, y ASC, processor ASC); - - --- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - --- A table describing recording regions. -CREATE TABLE IF NOT EXISTS region( - region_id INTEGER PRIMARY KEY AUTOINCREMENT, - core_id INTEGER NOT NULL - REFERENCES core(core_id) ON DELETE RESTRICT, - local_region_index INTEGER NOT NULL, - address INTEGER, - content BLOB NOT NULL DEFAULT '', - content_len INTEGER DEFAULT 0, - fetches INTEGER NOT NULL DEFAULT 0, - append_time INTEGER); --- Every recording region has a unique vertex and index -CREATE UNIQUE INDEX IF NOT EXISTS regionSanity ON region( - core_id ASC, local_region_index ASC); - --- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - --- A table containing the data which doesn't fit in the content column of the --- region table; care must be taken with this to not exceed 1GB! We actually --- store one per auto-pause-resume cycle as that is more efficient. -CREATE TABLE IF NOT EXISTS region_extra( - extra_id INTEGER PRIMARY KEY ASC AUTOINCREMENT, - region_id INTEGER NOT NULL - REFERENCES region(region_id) ON DELETE RESTRICT, - content BLOB NOT NULL DEFAULT '', - content_len INTEGER DEFAULT 0); - -CREATE VIEW IF NOT EXISTS region_view AS - SELECT core_id, region_id, x, y, processor, local_region_index, address, - content, content_len, fetches, append_time, - (fetches > 1) AS have_extra -FROM core NATURAL JOIN region; - -CREATE VIEW IF NOT EXISTS extra_view AS - SELECT core_id, region_id, extra_id, x, y, processor, local_region_index, - address, append_time, region_extra.content AS content, - region_extra.content_len AS content_len -FROM core NATURAL JOIN region NATURAL JOIN region_extra; diff --git a/spinn_front_end_common/interface/provenance/provenance_reader.py b/spinn_front_end_common/interface/provenance/provenance_reader.py index 9368f9b988..f5640fecae 100644 --- a/spinn_front_end_common/interface/provenance/provenance_reader.py +++ b/spinn_front_end_common/interface/provenance/provenance_reader.py @@ -18,10 +18,10 @@ from spinn_front_end_common.utilities.constants import PROVENANCE_DB from spinn_front_end_common.utilities.exceptions import ( NoProvenanceDatabaseException) -from spinn_front_end_common.utilities.sqlite_db import SQLiteDB +from spinn_front_end_common.utilities.base_database import BaseDatabase -class ProvenanceReader(SQLiteDB): +class ProvenanceReader(BaseDatabase): """ Provides a connection to a database containing provenance for the current run and some convenience methods for extracting provenance data from it. @@ -67,14 +67,7 @@ def __init__(self, provenance_data_path=None): :param provenance_data_path: Path to the provenance database to wrap :type provenance_data_path: None or str """ - if provenance_data_path: - self._provenance_data_path = provenance_data_path - else: - self._provenance_data_path = self.get_last_run_database_path() - if not os.path.exists(self._provenance_data_path): - raise NoProvenanceDatabaseException( - f"no such DB: {self._provenance_data_path}") - SQLiteDB.__init__(self, self._provenance_data_path, read_only=True, + super().__init__(provenance_data_path, read_only=True, row_factory=None, text_factory=None) def run_query(self, query, params=()): diff --git a/spinn_front_end_common/interface/provenance/provenance_writer.py b/spinn_front_end_common/interface/provenance/provenance_writer.py index cc3ed8e45d..502f6ba624 100644 --- a/spinn_front_end_common/interface/provenance/provenance_writer.py +++ b/spinn_front_end_common/interface/provenance/provenance_writer.py @@ -14,21 +14,14 @@ # along with this program. If not, see . import logging -import os -import re from spinn_utilities.config_holder import get_config_int from spinn_utilities.log import FormatAdapter -from spinn_front_end_common.data import FecDataView -from spinn_front_end_common.utilities.constants import (PROVENANCE_DB) -from spinn_front_end_common.utilities.sqlite_db import SQLiteDB +from spinn_front_end_common.utilities.base_database import BaseDatabase logger = FormatAdapter(logging.getLogger(__name__)) -_DDL_FILE = os.path.join(os.path.dirname(__file__), "local.sql") -_RE = re.compile(r"(\d+)([_,:])(\d+)(?:\2(\d+))?") - -class ProvenanceWriter(SQLiteDB): +class ProvenanceWriter(BaseDatabase): """ Specific implementation of the Database for SQLite 3. .. note:: @@ -40,9 +33,7 @@ class ProvenanceWriter(SQLiteDB): You can't port to a different database engine without a lot of work. """ - __slots__ = [ - "_database_file" - ] + __slots__ = [] def __init__(self, database_file=None, memory=False): """ @@ -57,11 +48,7 @@ def __init__(self, database_file=None, memory=False): Otherwise a None file will mean the default should be used """ - if database_file is None and not memory: - database_file = os.path.join( - FecDataView.get_provenance_dir_path(), PROVENANCE_DB) - self._database_file = database_file - SQLiteDB.__init__(self, database_file, ddl_file=_DDL_FILE) + super().__init__(database_file) def insert_power(self, description, the_value): """ diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py b/spinn_front_end_common/utilities/base_database.py similarity index 80% rename from spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py rename to spinn_front_end_common/utilities/base_database.py index 12d604289f..bec70ab580 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/base_database.py +++ b/spinn_front_end_common/utilities/base_database.py @@ -14,12 +14,14 @@ # along with this program. If not, see . import os +import sqlite3 import time from spinn_utilities.abstract_context_manager import AbstractContextManager from spinn_front_end_common.data import FecDataView from spinn_front_end_common.utilities.sqlite_db import SQLiteDB -_DDL_FILE = os.path.join(os.path.dirname(__file__), "db.sql") +_DDL_FILE = os.path.join(os.path.dirname(__file__), + "db.sql") _SECONDS_TO_MICRO_SECONDS_CONVERSION = 1000 #: Name of the database in the data folder @@ -44,26 +46,30 @@ class BaseDatabase(SQLiteDB, AbstractContextManager): Threads can access different DBs just fine. """ - __slots__ = [] + __slots__ = ["_database_file"] - def __init__(self, database_file=None): + def __init__(self, database_file=None, *, read_only=False, + row_factory=sqlite3.Row, text_factory=memoryview): """ :param str database_file: The name of a file that contains (or will contain) an SQLite database holding the data. If omitted the default location will be used. """ - if database_file is None: - database_file = self.default_database_file() - - super().__init__(database_file, ddl_file=_DDL_FILE) + if database_file: + self._database_file = database_file + else: + self._database_file = self.default_database_file() + super().__init__( + self._database_file, read_only=read_only, row_factory=row_factory, + text_factory=text_factory, ddl_file=_DDL_FILE) @classmethod def default_database_file(cls): if FecDataView.get_reset_number(): return os.path.join( FecDataView.get_run_dir_path(), - f"buffer{FecDataView.get_reset_number()}.sqlite3") + f"data{FecDataView.get_reset_number()}.sqlite3") return os.path.join( FecDataView.get_run_dir_path(), "buffer.sqlite3") diff --git a/spinn_front_end_common/interface/provenance/local.sql b/spinn_front_end_common/utilities/db.sql similarity index 74% rename from spinn_front_end_common/interface/provenance/local.sql rename to spinn_front_end_common/utilities/db.sql index 355653cb34..6dd23971a8 100644 --- a/spinn_front_end_common/interface/provenance/local.sql +++ b/spinn_front_end_common/utilities/db.sql @@ -1,4 +1,4 @@ --- Copyright (c) 2018-2022 The University of Manchester +-- Copyright (c) 2018-2019 The University of Manchester -- -- This program is free software: you can redistribute it and/or modify -- it under the terms of the GNU General Public License as published by @@ -16,6 +16,57 @@ -- https://www.sqlite.org/pragma.html#pragma_synchronous PRAGMA main.synchronous = OFF; +-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +-- A table describing the cores. +CREATE TABLE IF NOT EXISTS core( + core_id INTEGER PRIMARY KEY AUTOINCREMENT, + x INTEGER NOT NULL, + y INTEGER NOT NULL, + processor INTEGER NOT NULL); +-- Every processor has a unique ID +CREATE UNIQUE INDEX IF NOT EXISTS coreSanity ON core( + x ASC, y ASC, processor ASC); + + +-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +-- A table describing recording regions. +CREATE TABLE IF NOT EXISTS region( + region_id INTEGER PRIMARY KEY AUTOINCREMENT, + core_id INTEGER NOT NULL + REFERENCES core(core_id) ON DELETE RESTRICT, + local_region_index INTEGER NOT NULL, + address INTEGER, + content BLOB NOT NULL DEFAULT '', + content_len INTEGER DEFAULT 0, + fetches INTEGER NOT NULL DEFAULT 0, + append_time INTEGER); +-- Every recording region has a unique vertex and index +CREATE UNIQUE INDEX IF NOT EXISTS regionSanity ON region( + core_id ASC, local_region_index ASC); + +-- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +-- A table containing the data which doesn't fit in the content column of the +-- region table; care must be taken with this to not exceed 1GB! We actually +-- store one per auto-pause-resume cycle as that is more efficient. +CREATE TABLE IF NOT EXISTS region_extra( + extra_id INTEGER PRIMARY KEY ASC AUTOINCREMENT, + region_id INTEGER NOT NULL + REFERENCES region(region_id) ON DELETE RESTRICT, + content BLOB NOT NULL DEFAULT '', + content_len INTEGER DEFAULT 0); + +CREATE VIEW IF NOT EXISTS region_view AS + SELECT core_id, region_id, x, y, processor, local_region_index, address, + content, content_len, fetches, append_time, + (fetches > 1) AS have_extra +FROM core NATURAL JOIN region; + +CREATE VIEW IF NOT EXISTS extra_view AS + SELECT core_id, region_id, extra_id, x, y, processor, local_region_index, + address, append_time, region_extra.content AS content, + region_extra.content_len AS content_len +FROM core NATURAL JOIN region NATURAL JOIN region_extra; + -- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -- A table holding the values for power provenance -- Except for engery used by cores or routers diff --git a/spinn_front_end_common/utilities/sqlite_db.py b/spinn_front_end_common/utilities/sqlite_db.py index 89259517b9..010d24a87f 100644 --- a/spinn_front_end_common/utilities/sqlite_db.py +++ b/spinn_front_end_common/utilities/sqlite_db.py @@ -112,7 +112,7 @@ def __init__(self, database_file=None, *, read_only=False, ddl_file=None, if text_factory: self.__db.text_factory = text_factory - if ddl_file: + if not read_only and ddl_file: with open(ddl_file, encoding="utf-8") as f: sql = f.read() self.__db.executescript(sql) From a435b437697fb52dfe66c4eed1218adc767d9408 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 26 Oct 2022 10:02:57 +0100 Subject: [PATCH 36/49] flake8 --- .../interface/provenance/provenance_reader.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/spinn_front_end_common/interface/provenance/provenance_reader.py b/spinn_front_end_common/interface/provenance/provenance_reader.py index f5640fecae..7b34ca953c 100644 --- a/spinn_front_end_common/interface/provenance/provenance_reader.py +++ b/spinn_front_end_common/interface/provenance/provenance_reader.py @@ -16,8 +16,6 @@ import os from spinn_front_end_common.data import FecDataView from spinn_front_end_common.utilities.constants import PROVENANCE_DB -from spinn_front_end_common.utilities.exceptions import ( - NoProvenanceDatabaseException) from spinn_front_end_common.utilities.base_database import BaseDatabase @@ -68,7 +66,7 @@ def __init__(self, provenance_data_path=None): :type provenance_data_path: None or str """ super().__init__(provenance_data_path, read_only=True, - row_factory=None, text_factory=None) + row_factory=None, text_factory=None) def run_query(self, query, params=()): """ From cfda518dc51f7f65f0f827d589526d64b2f4b051 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 26 Oct 2022 10:50:41 +0100 Subject: [PATCH 37/49] use data.sqlite3 not buffer.sqlite3 --- spinn_front_end_common/utilities/base_database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spinn_front_end_common/utilities/base_database.py b/spinn_front_end_common/utilities/base_database.py index bec70ab580..8af6fe8b02 100644 --- a/spinn_front_end_common/utilities/base_database.py +++ b/spinn_front_end_common/utilities/base_database.py @@ -71,7 +71,7 @@ def default_database_file(cls): FecDataView.get_run_dir_path(), f"data{FecDataView.get_reset_number()}.sqlite3") return os.path.join( - FecDataView.get_run_dir_path(), "buffer.sqlite3") + FecDataView.get_run_dir_path(), "data.sqlite3") @staticmethod def _get_core_id(cursor, x, y, p): From ce2a6528d51aa09ece13528020f51be88f8274a1 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 26 Oct 2022 15:53:38 +0100 Subject: [PATCH 38/49] merge core_mapping table into cores --- .../interface/abstract_spinnaker_base.py | 11 +++++ .../storage_objects/buffer_database.py | 46 +++++++++++++++++++ .../energy_provenance_reporter.py | 2 - .../placements_provenance_gatherer.py | 3 -- .../interface/provenance/provenance_writer.py | 20 -------- .../utilities/base_database.py | 5 +- spinn_front_end_common/utilities/db.sql | 15 ++---- ...speed_up_packet_gatherer_machine_vertex.py | 2 - .../test_buffered_database.py | 23 ++++++++++ .../provenance/test_provenance_database.py | 10 ---- 10 files changed, 85 insertions(+), 52 deletions(-) diff --git a/spinn_front_end_common/interface/abstract_spinnaker_base.py b/spinn_front_end_common/interface/abstract_spinnaker_base.py index 8402e3a906..48a051c069 100644 --- a/spinn_front_end_common/interface/abstract_spinnaker_base.py +++ b/spinn_front_end_common/interface/abstract_spinnaker_base.py @@ -68,6 +68,8 @@ AbstractVertexWithEdgeToDependentVertices, AbstractCanReset) from spinn_front_end_common.interface.buffer_management import BufferManager +from spinn_front_end_common.interface.buffer_management.storage_objects \ + import BufferDatabase from spinn_front_end_common.interface.config_handler import ConfigHandler from spinn_front_end_common.interface.interface_functions import ( application_finisher, application_runner, @@ -439,6 +441,9 @@ def __run(self, run_time, sync_time): self._do_mapping(total_run_time) + if not self._data_writer.is_ran_last(): + self._execute_record_core_names() + # Check if anything has per-timestep SDRAM usage is_per_timestep_sdram = self._is_per_timestep_sdram() @@ -888,6 +893,12 @@ def _do_placer(self, system_placements): raise ConfigurationException( f"Unexpected cfg setting placer: {name}") + def _execute_record_core_names(self): + with FecTimer( + "Record core names to databse", TimerWork.REPORT) as timer: + with BufferDatabase() as db: + db.store_vertex_labels() + def _execute_system_multicast_routing_generator(self): """ Runs, times and logs the SystemMulticastRoutingGenerator is required diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index c27f891e92..3ea15bca23 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -15,6 +15,7 @@ import sqlite3 import time +from spinn_front_end_common.data import FecDataView from spinn_front_end_common.utilities.base_database import BaseDatabase _SECONDS_TO_MICRO_SECONDS_CONVERSION = 1000 @@ -236,3 +237,48 @@ def get_region_data(self, x, y, p, region): return data, False except LookupError: return memoryview(b''), True + + def _set_core_name(self, cursor, x, y, p, core_name): + """ + :param ~sqlite3.Cursor cursor: + :param int x: + :param int y: + :param int p: + :param str core_name: + + """ + try: + cursor.execute( + """ + INSERT INTO core (x, y, processor, core_name) + VALUES (?, ?, ? ,?) + """, (x, y, p, core_name)) + except sqlite3.IntegrityError: + cursor.execute( + """ + UPDATE core SET core_name = ? + WHERE x = ? AND y = ? and processor = ? + + """, (core_name, x, y, p)) + + def store_vertex_labels(self): + with self.transaction() as cursor: + for placement in FecDataView.iterate_placemements(): + self._set_core_name(cursor, placement.x, placement.y, + placement.p, placement.vertex.label) + for chip in FecDataView.get_machine().chips: + for processor in chip.processors: + if processor.is_monitor: + self._set_core_name( + cursor, chip.x, chip.y, processor.processor_id, + f"SCAMP(OS)_{chip.x}:{chip.y}") + + def get_core_name(self, x, y, p): + with self.transaction() as cursor: + for row in cursor.execute( + """ + SELECT core_name + FROM core + WHERE x = ? AND y = ? and processor = ? + """, (x, y, p)): + return str(row["core_name"], 'utf8') diff --git a/spinn_front_end_common/interface/interface_functions/energy_provenance_reporter.py b/spinn_front_end_common/interface/interface_functions/energy_provenance_reporter.py index 90726dfc63..0f0cdc56ff 100644 --- a/spinn_front_end_common/interface/interface_functions/energy_provenance_reporter.py +++ b/spinn_front_end_common/interface/interface_functions/energy_provenance_reporter.py @@ -47,8 +47,6 @@ def energy_provenance_reporter(power_used): db.insert_core( x, y, p, "Energy (Joules)", power_used.get_core_active_energy_joules(x, y, p)) - if p == 0: - db.add_core_name(x, y, p, "SCAMP(OS)") for x, y in power_used.active_routers: db.insert_router( x, y, "Energy (Joules)", diff --git a/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py b/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py index 40fa932abb..fb9f65e2b5 100644 --- a/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py +++ b/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py @@ -56,9 +56,6 @@ def _add_placement_provenance(placement, errors): # get data try: placement.vertex.get_provenance_data_from_machine(placement) - with ProvenanceWriter() as db: - db.add_core_name(placement.x, placement.y, placement.p, - placement.vertex.label) except Exception: # pylint: disable=broad-except errors.append(traceback.format_exc()) diff --git a/spinn_front_end_common/interface/provenance/provenance_writer.py b/spinn_front_end_common/interface/provenance/provenance_writer.py index 502f6ba624..12e20b191b 100644 --- a/spinn_front_end_common/interface/provenance/provenance_writer.py +++ b/spinn_front_end_common/interface/provenance/provenance_writer.py @@ -140,26 +140,6 @@ def insert_core(self, x, y, p, description, the_value): VALUES(?, ?, ?, ?, ?) """, [x, y, p, description, the_value]) - def add_core_name(self, x, y, p, core_name): - """ - Adds a vertex or similar name for the core to the core_mapping table - - A second call to the same core is silently ignored even if the name - if different. - - :param int x: X coordinate of the chip - :param int y: Y coordinate of the chip - :param int p: id of the core - :param str core_name: Name to assign - """ - with self.transaction() as cur: - cur.execute( - """ - INSERT OR IGNORE INTO core_mapping( - x, y, p, core_name) - VALUES(?, ?, ?, ?) - """, [x, y, p, core_name]) - def insert_report(self, message): """ Save and if applicable logs a message to the report_table diff --git a/spinn_front_end_common/utilities/base_database.py b/spinn_front_end_common/utilities/base_database.py index 8af6fe8b02..57a48c94e2 100644 --- a/spinn_front_end_common/utilities/base_database.py +++ b/spinn_front_end_common/utilities/base_database.py @@ -73,8 +73,7 @@ def default_database_file(cls): return os.path.join( FecDataView.get_run_dir_path(), "data.sqlite3") - @staticmethod - def _get_core_id(cursor, x, y, p): + def _get_core_id(self, cursor, x, y, p): """ :param ~sqlite3.Cursor cursor: :param int x: @@ -84,7 +83,7 @@ def _get_core_id(cursor, x, y, p): """ for row in cursor.execute( """ - SELECT core_id FROM region_view + SELECT core_id FROM core WHERE x = ? AND y = ? AND processor = ? LIMIT 1 """, (x, y, p)): diff --git a/spinn_front_end_common/utilities/db.sql b/spinn_front_end_common/utilities/db.sql index 6dd23971a8..9a9625fe4d 100644 --- a/spinn_front_end_common/utilities/db.sql +++ b/spinn_front_end_common/utilities/db.sql @@ -22,7 +22,8 @@ CREATE TABLE IF NOT EXISTS core( core_id INTEGER PRIMARY KEY AUTOINCREMENT, x INTEGER NOT NULL, y INTEGER NOT NULL, - processor INTEGER NOT NULL); + processor INTEGER NOT NULL, + core_name STRING); -- Every processor has a unique ID CREATE UNIQUE INDEX IF NOT EXISTS coreSanity ON core( x ASC, y ASC, processor ASC); @@ -167,21 +168,11 @@ CREATE TABLE IF NOT EXISTS core_provenance( description STRING NOT NULL, the_value INTEGER NOT NULL); --- A table holding the mapping from vertex name to core x, y, p -CREATE TABLE IF NOT EXISTS core_mapping( - core_name STRING NOT NULL, - x INTEGER, - y INTEGER, - p INTEGER); - --- Every core has a unique x,y,p location. -CREATE UNIQUE INDEX IF NOT EXISTS core_sanity ON core_mapping( - x ASC, y ASC, p ASC); -- Create a view combining core name and data CREATE VIEW IF NOT EXISTS core_provenance_view AS SELECT core_name, x, y, p, description, the_value - FROM core_provenance NATURAL JOIN core_mapping; + FROM core_provenance NATURAL JOIN core; -- Compute some basic statistics per core over the provenance CREATE VIEW IF NOT EXISTS core_stats_view AS diff --git a/spinn_front_end_common/utility_models/data_speed_up_packet_gatherer_machine_vertex.py b/spinn_front_end_common/utility_models/data_speed_up_packet_gatherer_machine_vertex.py index 5c9dc89457..375e1e8dda 100644 --- a/spinn_front_end_common/utility_models/data_speed_up_packet_gatherer_machine_vertex.py +++ b/spinn_front_end_common/utility_models/data_speed_up_packet_gatherer_machine_vertex.py @@ -1479,8 +1479,6 @@ def get_provenance_data_from_machine(self, placement): n_sdp_sent, n_sdp_recvd, n_in_streams, n_out_streams = ( _FOUR_WORDS.unpack_from(data)) with ProvenanceWriter() as db: - db.add_core_name( - placement.x, placement.y, placement.p, placement.vertex.label) db.insert_core( placement.x, placement.y, placement.p, "Sent_SDP_Packets", n_sdp_sent) diff --git a/unittests/interface/buffer_management/test_buffered_database.py b/unittests/interface/buffer_management/test_buffered_database.py index e644f22201..0f7c1b7008 100644 --- a/unittests/interface/buffer_management/test_buffered_database.py +++ b/unittests/interface/buffer_management/test_buffered_database.py @@ -15,6 +15,9 @@ import unittest import os +from pacman.model.graphs.machine import SimpleMachineVertex +from pacman.model.placements import Placement, Placements +from spinn_front_end_common.data.fec_data_writer import FecDataWriter from spinn_front_end_common.interface.buffer_management.storage_objects \ import BufferDatabase from spinn_front_end_common.interface.config_setup import unittest_setup @@ -45,3 +48,23 @@ def test_use_database(self): self.assertEqual(bytes(data), b"abcdef") self.assertTrue(os.path.isfile(f), "DB still exists") + + def test_placements(self): + writer = FecDataWriter.mock() + info = Placements([]) + p1 = Placement(SimpleMachineVertex(None, label="V1"), 1, 2, 3) + info.add_placement(p1) + v2 = SimpleMachineVertex(None, label="V2") + p2 = Placement(v2, 1, 2, 5) + info.add_placement(p2) + info.add_placement(Placement(SimpleMachineVertex(None), 2, 2, 3)) + writer.set_placements(info) + with BufferDatabase() as db: + db.store_data_in_region_buffer(1, 2, 3, 0, False, b"abc") + db.store_vertex_labels() + label = db.get_core_name(1, 2, 3) + self.assertEqual("V1", label) + label = db.get_core_name(1, 2, 5) + self.assertEqual("V2", label) + label = db.get_core_name(4, 3, 0) + self.assertEqual("SCAMP(OS)_4:3", label) \ No newline at end of file diff --git a/unittests/interface/provenance/test_provenance_database.py b/unittests/interface/provenance/test_provenance_database.py index 3e9d100f63..49fbd0e501 100644 --- a/unittests/interface/provenance/test_provenance_database.py +++ b/unittests/interface/provenance/test_provenance_database.py @@ -158,16 +158,6 @@ def test_cores(self): db.insert_core(1, 3, 2, "des2", 67) db.insert_core(1, 3, 1, "des1", 48) - def test_core_name(self): - with ProvenanceWriter() as db: - db.add_core_name(1, 3, 2, "first_core") - db.add_core_name(1, 3, 3, "second_core") - db.add_core_name(1, 3, 2, "first_core") - db.add_core_name(1, 3, 2, "new_name is ignored") - with ProvenanceReader() as db: - data = db.run_query("Select * from core_mapping") - self.assertEqual(2, len(data)) - def test_messages(self): set_config("Reports", "provenance_report_cutoff", 3) with LogCapture() as lc: From 17b005b005e0cf9d1394f57b2605998b81da3e03 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 26 Oct 2022 16:08:30 +0100 Subject: [PATCH 39/49] normalise core_provenance table --- .../interface/provenance/provenance_writer.py | 7 ++++--- spinn_front_end_common/utilities/db.sql | 9 ++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/spinn_front_end_common/interface/provenance/provenance_writer.py b/spinn_front_end_common/interface/provenance/provenance_writer.py index 12e20b191b..d5d15cb083 100644 --- a/spinn_front_end_common/interface/provenance/provenance_writer.py +++ b/spinn_front_end_common/interface/provenance/provenance_writer.py @@ -133,12 +133,13 @@ def insert_core(self, x, y, p, description, the_value): :param int the_value: data """ with self.transaction() as cur: + core_id = self._get_core_id(cur, x, y, p) cur.execute( """ INSERT INTO core_provenance( - x, y, p, description, the_value) - VALUES(?, ?, ?, ?, ?) - """, [x, y, p, description, the_value]) + core_id, description, the_value) + VALUES(?, ?, ?) + """, [core_id, description, the_value]) def insert_report(self, message): """ diff --git a/spinn_front_end_common/utilities/db.sql b/spinn_front_end_common/utilities/db.sql index 9a9625fe4d..76ca567c6d 100644 --- a/spinn_front_end_common/utilities/db.sql +++ b/spinn_front_end_common/utilities/db.sql @@ -161,17 +161,16 @@ CREATE VIEW IF NOT EXISTS router_summary_view AS -- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -- A table holding the values for each core CREATE TABLE IF NOT EXISTS core_provenance( - core_id INTEGER PRIMARY KEY AUTOINCREMENT, - x INTEGER NOT NULL, - y INTEGER NOT NULL, - p INTEGER NOT NULL, + cp_id INTEGER PRIMARY KEY AUTOINCREMENT, + core_id INTEGER NOT NULL + REFERENCES core(core_id) ON DELETE RESTRICT, description STRING NOT NULL, the_value INTEGER NOT NULL); -- Create a view combining core name and data CREATE VIEW IF NOT EXISTS core_provenance_view AS - SELECT core_name, x, y, p, description, the_value + SELECT core_name, x, y, processor as p, description, the_value FROM core_provenance NATURAL JOIN core; -- Compute some basic statistics per core over the provenance From 018d5eb8410db1bdd68613655077ce6cca3db839 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Wed, 26 Oct 2022 16:19:07 +0100 Subject: [PATCH 40/49] flake8 --- spinn_front_end_common/interface/abstract_spinnaker_base.py | 4 ++-- .../buffer_management/storage_objects/buffer_database.py | 1 - .../interface_functions/placements_provenance_gatherer.py | 2 +- .../interface/buffer_management/test_buffered_database.py | 2 +- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/spinn_front_end_common/interface/abstract_spinnaker_base.py b/spinn_front_end_common/interface/abstract_spinnaker_base.py index 48a051c069..7debc26658 100644 --- a/spinn_front_end_common/interface/abstract_spinnaker_base.py +++ b/spinn_front_end_common/interface/abstract_spinnaker_base.py @@ -895,8 +895,8 @@ def _do_placer(self, system_placements): def _execute_record_core_names(self): with FecTimer( - "Record core names to databse", TimerWork.REPORT) as timer: - with BufferDatabase() as db: + "Record core names to databse", TimerWork.REPORT): + with BufferDatabase() as db: db.store_vertex_labels() def _execute_system_multicast_routing_generator(self): diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index 3ea15bca23..c569c10992 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -258,7 +258,6 @@ def _set_core_name(self, cursor, x, y, p, core_name): """ UPDATE core SET core_name = ? WHERE x = ? AND y = ? and processor = ? - """, (core_name, x, y, p)) def store_vertex_labels(self): diff --git a/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py b/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py index fb9f65e2b5..41ef888afc 100644 --- a/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py +++ b/spinn_front_end_common/interface/interface_functions/placements_provenance_gatherer.py @@ -18,7 +18,7 @@ from spinn_utilities.log import FormatAdapter from spinn_utilities.progress_bar import ProgressBar from spinn_front_end_common.interface.provenance import ( - AbstractProvidesProvenanceDataFromMachine, ProvenanceWriter) + AbstractProvidesProvenanceDataFromMachine) logger = FormatAdapter(logging.getLogger(__name__)) diff --git a/unittests/interface/buffer_management/test_buffered_database.py b/unittests/interface/buffer_management/test_buffered_database.py index 0f7c1b7008..3733b89b29 100644 --- a/unittests/interface/buffer_management/test_buffered_database.py +++ b/unittests/interface/buffer_management/test_buffered_database.py @@ -67,4 +67,4 @@ def test_placements(self): label = db.get_core_name(1, 2, 5) self.assertEqual("V2", label) label = db.get_core_name(4, 3, 0) - self.assertEqual("SCAMP(OS)_4:3", label) \ No newline at end of file + self.assertEqual("SCAMP(OS)_4:3", label) From 28430cf981620eff36c33c6c439d2b0eabec2f91 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Thu, 27 Oct 2022 11:45:31 +0100 Subject: [PATCH 41/49] fixes after merge --- .../interface/abstract_spinnaker_base.py | 1 - .../storage_objects/buffer_database.py | 58 +++++++++++++++++- .../report_functions/chip_active_report.py | 2 +- unittests/utilities/buffer.sqlite3 | Bin 32768 -> 73728 bytes 4 files changed, 58 insertions(+), 3 deletions(-) diff --git a/spinn_front_end_common/interface/abstract_spinnaker_base.py b/spinn_front_end_common/interface/abstract_spinnaker_base.py index f0d82455e0..e222cdfbc2 100644 --- a/spinn_front_end_common/interface/abstract_spinnaker_base.py +++ b/spinn_front_end_common/interface/abstract_spinnaker_base.py @@ -2288,7 +2288,6 @@ def _execute_prepare_chip_power(self): if timer.skip_if_virtual_board(): return db = BufferDatabase() - db.store_placements() db.store_chip_power_monitors() db.close() diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index 1a2262e154..a752d5b898 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -280,4 +280,60 @@ def get_core_name(self, x, y, p): FROM core WHERE x = ? AND y = ? and processor = ? """, (x, y, p)): - return str(row["core_name"], 'utf8') + if row["core_name"]: + return str(row["core_name"], 'utf8') + else: + return None + + def store_chip_power_monitors(self): + # delayed import due to circular refrences + from spinn_front_end_common.utility_models. \ + chip_power_monitor_machine_vertex import ( + ChipPowerMonitorMachineVertex) + + with self.transaction() as cursor: + for _ in cursor.execute( + """ + SELECT name FROM sqlite_master + WHERE type='table' AND name='chip_power_monitor' + """): + # Already exists so no need to run again + return + + cursor.execute( + """ + CREATE TABLE chip_power_monitors( + cpm_id INTEGER PRIMARY KEY autoincrement, + core_id INTEGER NOT NULL + REFERENCES core(core_id) ON DELETE RESTRICT, + sampling_frequency FLOAT NOT NULL) + """) + + cursor.execute( + """ + CREATE VIEW chip_power_monitors_view AS + SELECT core_id, x, y, processor, sampling_frequency + FROM core NATURAL JOIN chip_power_monitors + """) + + for placement in FecDataView.iterate_placements_by_vertex_type( + ChipPowerMonitorMachineVertex): + core_id = self._get_core_id( + cursor, placement.x, placement.y, placement.p) + cursor.execute( + """ + INSERT INTO chip_power_monitors( + core_id, sampling_frequency) + VALUES (?, ?) + """, (core_id, placement.vertex.sampling_frequency)) + assert cursor.rowcount == 1 + + def iterate_chip_power_monitor_cores(self): + with self.transaction() as cursor: + for row in cursor.execute( + """ + SELECT x, y, processor, sampling_frequency + FROM chip_power_monitors_view + ORDER BY core_id + """): + yield row diff --git a/spinn_front_end_common/utilities/report_functions/chip_active_report.py b/spinn_front_end_common/utilities/report_functions/chip_active_report.py index 6b4d872c41..3566e1e2b3 100644 --- a/spinn_front_end_common/utilities/report_functions/chip_active_report.py +++ b/spinn_front_end_common/utilities/report_functions/chip_active_report.py @@ -81,7 +81,7 @@ def __write_report(f, buffer_path): energy_factor = time_for_recorded_sample * milliwatts for core in range(0, 18): - label = db.get_label(row["x"], row["y"], core) + label = db.get_core_name(row["x"], row["y"], core) if (active_sums[core] > 0) or label: f.write( f"processor {row['x']}:{row['y']}:{core}({label})" diff --git a/unittests/utilities/buffer.sqlite3 b/unittests/utilities/buffer.sqlite3 index c36eac0b3c4c44e26dd85f05342ac4af426cf732..ceb648e1248c608908d5719b2febcb20ee1147ee 100644 GIT binary patch literal 73728 zcmeI4U2Gd!6~|}nvB%@oo^FfRn|3Kvt*XYVw@Jtbw1uuVO`Q_fNxF^$6tVJ7ooQkk zd))D)O@z=YPC`LEtoDWS#seQOh!qm>h=c?@fCS^s@tWq+FeVfNeEuMsg05C8!X009sH0T2KI z5C8!X009s<3IZ81$p}K~^xVSyY;~cM$&%ZYdz(ozDUb9Rxpk!{hv+SPF41VU*Y%OH zOCNsj`NGMxS;)23c5U1$JSEUp*ooNUjm2td?ow$(*W@|1)zDS#REjp)L&nqgqh)6k zMnhA!&FgagwxVB?^;Jbvw2`smSm6vun-wlkuQuA()q9F&H7ThrX}l*NhXtxaDs)SBjNjSiP$<2kmjYKlm@pQ0|LWAR~DQ&t*ki%XMx zQGA0Dza#!f{Db&g@kiqK#BYfI6n`atU3^>ox%h_oXYt3h77h>q0T2KI5C8!X009sH z0T2KI5CDNABalpx?;u!_oX9r~*<^xz_rPYziQFCI$%$nXL*|)F^F)_rV*)vo@>q(L z(|g{O$CKtcmY_BTmZLTWHco8{Y%D>H2?G88pW_-#=5%_JUmzDaKmY_l;HU}g*2%j@ zHpP6k7!wlQvSprq$Yx-tKS1!=AD{kOdrM=#VYlC%`RyH>zLe#2H#JI(4nl)*EW8XvnEf zv(XySSC!gbd2L-Okeal_BbGOUVtKn%N?Vw4&E zo2e|!IqtPJ^{&#ATXn^dU8^iCU6-!hkV3v~=@~vhKF)5R)a9FNijBvuA1)cPcvh#b zluHpUju=ha^%~34?0mI!tyGb&S7zs?DmSE;N;jmbrRu`$Jh3!any;EJZa9kb3sq@; zsa!V8H+z(OMo3z$R%Yj~N&bq?NQDd(x}P}4=O-rEosSxBugFAKQ?#1vcSKRz=XivI z>A5MWy?>6&%gdV5>4dy8aoyb1m0mJ!z15@E0btIQ7pBPgSYuS!x-`V+pM93?o;Ogo z)q6gU!F$06F(@?&nx*>(_z;~}(tPgn3&==|e2 zK#X5t`Ft+NcAq!#5#+R)m>&=+F^L3y=#%(_sH`VMr98ojj}V)pv?kwF)~qa8=$=pU zxwB_eJ0GM$t!(I;Y&;p=D{tVsrFN(W!&6gM$WzdWR8u)JX4llZyjHVyTg!^|0Rg`WidVN*t_}LNd z5lbMwAdMUAkX8AvQZr!oXVHx%`TV(a?DjPylD#mz*Ss&Xy^@i%UoZeJ<*Ic4d@*gS7@=Y>KWv4u zLOVei46|FK4H~9qbBi)rNN$?fNz41C`PrA3O2o~prO#LaYpT)QVs)0bWu$Qu5Ythk zOv?-1&vJZzc$nQ4J=jePi3rpLvMKD9NCV3!rw_KT(R^u7`{zd?9Hq`E+cpW_zzW0# ze&{hFChU+7`rwT07;V|jA$G?Kug3ZO*&Mr-H-?isX^s-T;-ZGqRiOiEP)UD8}|)oKseHpN$w*x3z?xGFhk4(CVg9Y zh5uXfD~bEuPvWn{o@Rf|JP^-V8>QEeWW%&iBj#&?=bqy8lauVukoBNzwUj#DUlAVG zhJqxjP}vhcr=v9N2cuorlM8&=BfKqWliAf)+v{sG-HS!9^xJAj-&2Wh2lUOhLbiHQ zyM+TRB7+fHDCd*;)2G=x*5q!hn%>bA<8<7I#ws*vG|6gDUQvSGDRf_WlFyBfrglb+ zMJY`$WN+A%wN2wOPUF>AV6|W_LxVyRzsAzZ+1`5mbXQ=H{l?aC+6_VjgyEO7we|Sb zeKp7DE?h|Me8C0iydN;}Ik#bWjwpoAsA^1Md&xei?+NzCxreV?S3kh#CMQ!}H%0@a z6V*frcA$=*xRIG_KHl~gNs>EK+YeuIw);k0I4eR=>hdoHX$QAnJIT|`zCCH>JDO}f zsm^yR`lfQgKP$ntAN3QP?qEEjA513Z6nD_r&rE08ROl|B;Bylbshy6o?p4=yvVHfa z8!cSq9mWW;e-z0Q&(puhKIOO_q_69gjW&7yDP$!|0?xj1GMw*IenM+ zVs=uEP0=ZcLhJ}bfJ3kKTrVEe%X)jQ)f^z8St-YFhc<_*9oc_;=4X_C7$*mi9n zQ!3m00ck)1V8`;KmY_l00cnb@C5z~F|>*+ delta 96 zcmZoTz|zpbG(lQaoPmLX1BhXO3&>!ZoZzKBF@Qx;oIy`!122C90~gzK22MVHKE5@) r{ycZNZ*V>1+{1pJ?fJ&Wbhgdy$16ElfZ7&JY@9QxfMqj_z@K~on4lP6 From ecc956d0ad2e4ea2cf31190c84388f91df638e67 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Thu, 27 Oct 2022 12:11:47 +0100 Subject: [PATCH 42/49] get_reset_str --- spinn_front_end_common/data/fec_data_view.py | 22 +++++++++++++++++++ .../utilities/base_database.py | 7 ++---- unittests/data/test_simulator_data.py | 2 ++ 3 files changed, 26 insertions(+), 5 deletions(-) diff --git a/spinn_front_end_common/data/fec_data_view.py b/spinn_front_end_common/data/fec_data_view.py index f0d179312b..db9230bff5 100644 --- a/spinn_front_end_common/data/fec_data_view.py +++ b/spinn_front_end_common/data/fec_data_view.py @@ -436,6 +436,28 @@ def get_reset_number(cls): raise cls._exception("run_number") return cls.__fec_data._reset_number + @classmethod + def get_reset_str(cls): + """ + Get the number of times a reset has happene as a string. Zero as "" + + Only counts the first reset after each run. + + So resets that are first soft then hard are ignored. + Double reset calls without a run and resets before run are ignored. + + Reset numbers start at zero + + :raises ~spinn_utilities.exceptions.SpiNNUtilsException: + If the run_number is currently unavailable + """ + if cls.__fec_data._reset_number is None: + raise cls._exception("run_number") + if cls.__fec_data._reset_number: + return str(cls.__fec_data._reset_number) + else: + return "" + # run number @classmethod diff --git a/spinn_front_end_common/utilities/base_database.py b/spinn_front_end_common/utilities/base_database.py index 57a48c94e2..611868c993 100644 --- a/spinn_front_end_common/utilities/base_database.py +++ b/spinn_front_end_common/utilities/base_database.py @@ -66,12 +66,9 @@ def __init__(self, database_file=None, *, read_only=False, @classmethod def default_database_file(cls): - if FecDataView.get_reset_number(): - return os.path.join( + return os.path.join( FecDataView.get_run_dir_path(), - f"data{FecDataView.get_reset_number()}.sqlite3") - return os.path.join( - FecDataView.get_run_dir_path(), "data.sqlite3") + f"data{FecDataView.get_reset_str()}.sqlite3") def _get_core_id(self, cursor, x, y, p): """ diff --git a/unittests/data/test_simulator_data.py b/unittests/data/test_simulator_data.py index 51695d31cf..9d29214a3e 100644 --- a/unittests/data/test_simulator_data.py +++ b/unittests/data/test_simulator_data.py @@ -248,6 +248,7 @@ def test_directories_reset(self): run_dir = FecDataView.get_run_dir_path() self.assertIn("run_1", run_dir) self.assertEqual(0, writer.get_reset_number()) + self.assertEqual("", writer.get_reset_str()) writer.start_run() run_dir = FecDataView.get_run_dir_path() self.assertIn("run_1", run_dir) @@ -263,6 +264,7 @@ def test_directories_reset(self): self.assertEqual(0, writer.get_reset_number()) writer.hard_reset() self.assertEqual(1, writer.get_reset_number()) + self.assertEqual("1", writer.get_reset_str()) run_dir = FecDataView.get_run_dir_path() self.assertIn("run_3", run_dir) writer.start_run() From df44eeaacb194b274f121efc6f42a01cab505c32 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Thu, 27 Oct 2022 12:23:32 +0100 Subject: [PATCH 43/49] new report each reset --- .../utilities/report_functions/chip_active_report.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/spinn_front_end_common/utilities/report_functions/chip_active_report.py b/spinn_front_end_common/utilities/report_functions/chip_active_report.py index 3566e1e2b3..e67e342067 100644 --- a/spinn_front_end_common/utilities/report_functions/chip_active_report.py +++ b/spinn_front_end_common/utilities/report_functions/chip_active_report.py @@ -30,9 +30,6 @@ #: converter between joules to kilowatt hours JOULES_TO_KILOWATT_HOURS = 3600000 -# energy report file name -CHIP_ACTIVE_FILENAME = "chip_active_report.rpt" - def write_chip_active_report(report_path=None, buffer_path=None): """ Writes the report. @@ -46,12 +43,12 @@ def write_chip_active_report(report_path=None, buffer_path=None): """ if report_path is None: try: - report_dir = FecDataView.get_run_dir_path() report_path = os.path.join( - report_dir, CHIP_ACTIVE_FILENAME) + FecDataView.get_run_dir_path(), + f"chip_active_report{FecDataView.get_reset_str()}.rpt") except SpiNNUtilsException: report_path = os.path.join( - os.path.curdir, CHIP_ACTIVE_FILENAME) + os.path.curdir, "chip_active_report.rpt") logger.warning(f"no report_path so writing to {report_path}") # create detailed report From d2b334d8cbcbb37b770fe01b9893f2e66b2d3055 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Thu, 27 Oct 2022 13:53:43 +0100 Subject: [PATCH 44/49] flake8 --- spinn_front_end_common/utilities/base_database.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/spinn_front_end_common/utilities/base_database.py b/spinn_front_end_common/utilities/base_database.py index 611868c993..2a6beccb17 100644 --- a/spinn_front_end_common/utilities/base_database.py +++ b/spinn_front_end_common/utilities/base_database.py @@ -66,9 +66,8 @@ def __init__(self, database_file=None, *, read_only=False, @classmethod def default_database_file(cls): - return os.path.join( - FecDataView.get_run_dir_path(), - f"data{FecDataView.get_reset_str()}.sqlite3") + return os.path.join(FecDataView.get_run_dir_path(), + f"data{FecDataView.get_reset_str()}.sqlite3") def _get_core_id(self, cursor, x, y, p): """ From 5ba870a6ddd6d4d9727776783cb11ac141e95e4a Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Thu, 27 Oct 2022 15:57:30 +0100 Subject: [PATCH 45/49] replace buffer.sqlite3 with data.sqlite3 in tests --- .../{buffer.sqlite3 => data.sqlite3} | Bin 73728 -> 73728 bytes unittests/utilities/test_chip_active.py | 4 ++-- 2 files changed, 2 insertions(+), 2 deletions(-) rename unittests/utilities/{buffer.sqlite3 => data.sqlite3} (89%) diff --git a/unittests/utilities/buffer.sqlite3 b/unittests/utilities/data.sqlite3 similarity index 89% rename from unittests/utilities/buffer.sqlite3 rename to unittests/utilities/data.sqlite3 index ceb648e1248c608908d5719b2febcb20ee1147ee..3d85d3ef85a212dffebe9f227e4e1415c3ff7f7e 100644 GIT binary patch delta 2612 zcmbW2eQ*`k6~OoI*V)~7?>+D3gXAS4AABW&2T2H#4~ZsXLJWpPu%qLs3DzV-OG5_j z@CSdqB$Dw1I}mopjxqv@j^Em_t#xW07%3tOhEyqqTB4#bZDrby)fvS4yt^li@Q;6( zna%IF=iNQ`?0Ngn4(vAv_M6)ZvY=kmv@CU8`dd1Zny*9sXxA=GqfpTnzCpu}a6j(G z0lWhnaVEO*ygVfj$-VM%`GCAf{!Ffsx5{?eEbHY0IY&;DNg0cu71Zc8KlbK`|gUiayaTZWpUXt7sI9M3tB&rivU96FR@Z|HID}@YDP_Kf(|2 zKk!lhEZ@Z+;lJXW`6j+$bnsLx4K23jx=c&Ja*{YSlOQyaNVuLvz%mjJ*OA1ak;H=r z5*Mx|ao`#ffO--emXcU-HHiu7C1eIHR^o(pEFy`+LK1|lNF>ye2v|VEp_U{D^GQ6Y zA#tIa#DOXjfO#Y~%q6iPl_oP`4v7J?ZOw`LQArYq3KE2J5(%?N1k5DiFoPrp(@8v- zM&d#li36o103{?g6x-=&GYh6t$b=#i0}7Qy{U{)bLy`m`pF~0)iGV329CArwkVE1@ zHi-*aBn~7<0OGcm4)?ILQQf`kl&@aa*w#_ezPhrrW=T!93|n$#usU0)i?3K&tyc0d zmsBTW(Kcn)+P?l@-dmv}l?jhRX8-#B&YCFXpD(`IEwH0_t>Morc~yqL7X!yN z{0jex<3Xb10Q!fklCwSCNU{yxy=&L^*$y*u*-zJR=xzyD+s3#i^7t!!tNXP5x-mXF z_=b@;+-rLpbIdI%-L=fF8FioEt(G$XH?7nCj<$*bn@X9#zs>X~T8mU5HOUHpU-JWg ze;W_aN4Av~>c`mTUh^D7yo#--_;s+h(%o=*_I#PRCH447v7lif8JU;vaKk((;uxx z+uKW>$yV&xJ7AUuX8$*s_Ec#t;;@Dn@C*DGp25H2`}i&%z}GRr7jO^mKp(f@eYgql z!FAZJhVoCa4V$nY>u|0b&r>lM1zPg|@=GTV#j4K{m?8vPLE=Wtp1a3F%5EE{Ok%e~6F8Nj1R_j}F#|G+1invP26p zzMP27MA<|X*Aw|NV(dDi*GP05h|aY{xQ1xg6Ro90^J=28gs7)oTNU6kGlauXg&kGS zVq$y|5f>8WRYXxo2EqRgIWIWBY!U%y3K&RWReYIbP>HR=)W1n-fLw3%VVp& z!DFS$T-LP`GpBa0MC4q@k@0^)? z=6rW%|CHE2B@S1Yv3kZ>nVQ8>HI``Ptp3xkajf%PrJ^^=@C3e(hp-P@@CvR*+q>#r z@V@ar^FHv7c%w4vcYFOV8qBt>n(ggvtvlO0T~AS~L|ubfH>`#gEv=S^0t&13fuYQE zd+XOWHLlyxQ0^2&kr~cBb$3Ce#Z`~dleLr7QoGV~agk{_&G0h*jngW{7fAETss}C5 zV$eIhryo^(D`?E&S{3Z+e`a8KNH{@HIL7cp{1-3cIs6Q3IOy_0fl3#y#U-+sB~^AJ>X_xT{8;E^}y2x5qR35 z1)er=U76W-@VkW=U~Y|*GTX&~oQUiaIMS;!gT-D;a9bCBG=Eks8|JhhT`G$;PYi~7yz7kxL5^zxD~ z4}9zLJ~!`As4NBlTjB5O+#B^0_ita$zkZ|#9QAw(J@*AyxIDNza%0efzD+4>=s@!W z5T}mKc?~LxtGBMkT55u{P*E&uibYwW@Z-w?)&{Rc-~z+z>TO-c1sP4B$7(}>1w!OO z)AGQ%5b7r1+uE09nKv*VEJ`50|7fvO> zrlY6c0Eg~5y}&owgl8JTlJ}pn!8vdjn3b^@WH->`-yezkgWWy-gZ`sCJ6qefHe~B8 zS^I4RfJv>>OIns2^zO_ZihVV6k!N&j%B=!}cI7&>>~295vvHc5Ss`a<*8uO*=;K^9 z2s)kHLf60Rd~@z?h|728QUH}SH{S*ovi65GKumW1*aOxIGnPtO*(CjTHcpq%9))uG l>Nx-?ldqqD8XzjK{WJ&=kzGH#04RrkIWC||-gfCy`afRzGy4Dl diff --git a/unittests/utilities/test_chip_active.py b/unittests/utilities/test_chip_active.py index 422396e9c1..04dc86546e 100644 --- a/unittests/utilities/test_chip_active.py +++ b/unittests/utilities/test_chip_active.py @@ -43,7 +43,7 @@ def test_db_only(self): writer.set_run_dir_path("THIS DIRECTORY DOES NOT EXIST") except InvalidDirectory: pass - db_path = os.path.join(os.path.dirname(__file__), "buffer.sqlite3") + db_path = os.path.join(os.path.dirname(__file__), "data.sqlite3") write_chip_active_report(buffer_path=db_path) def test_all_params(self): @@ -53,7 +53,7 @@ def test_all_params(self): writer.set_run_dir_path("THIS DIRECTORY DOES NOT EXIST") except InvalidDirectory: pass - db_path = os.path.join(os.path.dirname(__file__), "buffer.sqlite3") + db_path = os.path.join(os.path.dirname(__file__), "data.sqlite3") report = os.path.join(os.path.dirname(__file__), "my_active.rpt") write_chip_active_report(report_path=report, buffer_path=db_path) From 85a28e02e4918422e53ef16c2802f3a457ef0c2a Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Thu, 27 Oct 2022 16:03:15 +0100 Subject: [PATCH 46/49] flake8 --- .../buffer_management/storage_objects/buffer_database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index a752d5b898..fb071bf5b7 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -289,7 +289,7 @@ def store_chip_power_monitors(self): # delayed import due to circular refrences from spinn_front_end_common.utility_models. \ chip_power_monitor_machine_vertex import ( - ChipPowerMonitorMachineVertex) + ChipPowerMonitorMachineVertex) with self.transaction() as cursor: for _ in cursor.execute( From 0a1f4d6c144fe429fc54c7c4012cc9fe38caa11d Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Fri, 28 Oct 2022 08:20:57 +0100 Subject: [PATCH 47/49] pass in Class type to avoid ugly import --- .../interface/abstract_spinnaker_base.py | 4 +- .../storage_objects/buffer_database.py | 47 ++++++++++++------- 2 files changed, 32 insertions(+), 19 deletions(-) diff --git a/spinn_front_end_common/interface/abstract_spinnaker_base.py b/spinn_front_end_common/interface/abstract_spinnaker_base.py index e222cdfbc2..5471ff35c3 100644 --- a/spinn_front_end_common/interface/abstract_spinnaker_base.py +++ b/spinn_front_end_common/interface/abstract_spinnaker_base.py @@ -116,7 +116,7 @@ from spinn_front_end_common.utilities.iobuf_extractor import IOBufExtractor from spinn_front_end_common.utilities.utility_objs import ExecutableType from spinn_front_end_common.utility_models import ( - DataSpeedUpPacketGatherMachineVertex) + DataSpeedUpPacketGatherMachineVertex, ChipPowerMonitorMachineVertex) from spinn_front_end_common.utilities.report_functions.reports import ( generate_comparison_router_report, partitioner_report, placer_reports_with_application_graph, @@ -2288,7 +2288,7 @@ def _execute_prepare_chip_power(self): if timer.skip_if_virtual_board(): return db = BufferDatabase() - db.store_chip_power_monitors() + db.store_chip_power_monitors(ChipPowerMonitorMachineVertex) db.close() def _report_chip_active(self): diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index fb071bf5b7..c18dd8ce90 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -261,6 +261,10 @@ def _set_core_name(self, cursor, x, y, p, core_name): """, (core_name, x, y, p)) def store_vertex_labels(self): + """ + Stores the name of all cores including monitors + + """ with self.transaction() as cursor: for placement in FecDataView.iterate_placemements(): self._set_core_name(cursor, placement.x, placement.y, @@ -273,6 +277,15 @@ def store_vertex_labels(self): f"SCAMP(OS)_{chip.x}:{chip.y}") def get_core_name(self, x, y, p): + """ + Retruns the name of the Vertex or monitor running on the core + + :param int x: + :param int y: + :param int p: + :return: The Vertex name, a monitor name or None if nothing running + :rtype: str or None + """ with self.transaction() as cursor: for row in cursor.execute( """ @@ -285,24 +298,17 @@ def get_core_name(self, x, y, p): else: return None - def store_chip_power_monitors(self): - # delayed import due to circular refrences - from spinn_front_end_common.utility_models. \ - chip_power_monitor_machine_vertex import ( - ChipPowerMonitorMachineVertex) + def store_chip_power_monitors(self, monitor_class): + """ + Store the existence and sampling frequency of all chip power monitors + :param Class monitor_class: The Class of chip power monitors. + This is a parameter to avoid circular or ugly importing here + """ with self.transaction() as cursor: - for _ in cursor.execute( - """ - SELECT name FROM sqlite_master - WHERE type='table' AND name='chip_power_monitor' - """): - # Already exists so no need to run again - return - cursor.execute( """ - CREATE TABLE chip_power_monitors( + CREATE TABLE IF NOT EXISTS chip_power_monitors( cpm_id INTEGER PRIMARY KEY autoincrement, core_id INTEGER NOT NULL REFERENCES core(core_id) ON DELETE RESTRICT, @@ -311,24 +317,31 @@ def store_chip_power_monitors(self): cursor.execute( """ - CREATE VIEW chip_power_monitors_view AS + CREATE VIEW IF NOT EXISTS chip_power_monitors_view AS SELECT core_id, x, y, processor, sampling_frequency FROM core NATURAL JOIN chip_power_monitors """) for placement in FecDataView.iterate_placements_by_vertex_type( - ChipPowerMonitorMachineVertex): + monitor_class): core_id = self._get_core_id( cursor, placement.x, placement.y, placement.p) cursor.execute( """ - INSERT INTO chip_power_monitors( + REPLACE INTO chip_power_monitors( core_id, sampling_frequency) VALUES (?, ?) """, (core_id, placement.vertex.sampling_frequency)) assert cursor.rowcount == 1 def iterate_chip_power_monitor_cores(self): + """ + Iterates of the chip power monintors + + :return: iterates of dict like object contaning "x", "y" ,"processor" + and "sampling_frequency" fields + :rtpye: sqlite3.Row + """ with self.transaction() as cursor: for row in cursor.execute( """ From 3cb34f3adaa3ffea3c54653f76fac84f884dfef8 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Fri, 28 Oct 2022 08:31:37 +0100 Subject: [PATCH 48/49] doc spacing --- .../buffer_management/storage_objects/buffer_database.py | 1 - 1 file changed, 1 deletion(-) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index c18dd8ce90..a10ee0044f 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -245,7 +245,6 @@ def _set_core_name(self, cursor, x, y, p, core_name): :param int y: :param int p: :param str core_name: - """ try: cursor.execute( From 0b27d38e50c66ef7c676a0e797f4b727dca8f897 Mon Sep 17 00:00:00 2001 From: "Christian Y. Brenninkmeijer" Date: Fri, 28 Oct 2022 08:41:50 +0100 Subject: [PATCH 49/49] doc spacing --- .../buffer_management/storage_objects/buffer_database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py index a10ee0044f..497944d893 100644 --- a/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py +++ b/spinn_front_end_common/interface/buffer_management/storage_objects/buffer_database.py @@ -338,7 +338,7 @@ def iterate_chip_power_monitor_cores(self): Iterates of the chip power monintors :return: iterates of dict like object contaning "x", "y" ,"processor" - and "sampling_frequency" fields + and "sampling_frequency" fields :rtpye: sqlite3.Row """ with self.transaction() as cursor: