Add initial data dict
This commit is contained in:
parent
fb28c6c5c5
commit
36fcb5f260
19 changed files with 784 additions and 116 deletions
|
|
@ -1,3 +1,14 @@
|
|||
@techreport{barrieroInformationManagementProcess2010,
|
||||
title = {Information {{Management Process Description Guideline}}},
|
||||
author = {Barriero, Amy},
|
||||
year = 2010,
|
||||
number = {PDG01-2010},
|
||||
institution = {NIRMA},
|
||||
url = {https://international.anl.gov/training/materials/6H/Gilbert/PDG02%20Documents%20and%20Records%20Process%20Description.pdf},
|
||||
langid = {american},
|
||||
file = {/pool/Reading/Nuclear/institutions/nirma/PDG01 Information Management Process Description.pdf}
|
||||
}
|
||||
|
||||
@misc{cahillDesignPhilosophyBrief2025,
|
||||
title = {Design {{Philosophy Brief}}},
|
||||
author = {Cahill, William},
|
||||
|
|
@ -6,6 +17,44 @@
|
|||
url = {https://maritimesai.kiteworks.com/#/file/8b92c7cb-4444-4a3e-aba6-fdf328f7d2f8?currentPage=1}
|
||||
}
|
||||
|
||||
@techreport{cloverDocumentControlRecords2010,
|
||||
title = {Document {{Control}} and {{Records Management Process Description}}},
|
||||
author = {Clover, Bill},
|
||||
year = 2010,
|
||||
number = {PDG02-2010},
|
||||
institution = {NIRMA},
|
||||
url = {https://international.anl.gov/training/materials/6H/Gilbert/PDG02%20Documents%20and%20Records%20Process%20Description.pdf},
|
||||
langid = {american},
|
||||
file = {/pool/Reading/Nuclear/institutions/nirma/PDG02 Documents and Records Process Description.pdf}
|
||||
}
|
||||
|
||||
@techreport{fleerReactorTechnologyStudy25,
|
||||
title = {Reactor {{Technology Study}}},
|
||||
author = {Fleer, D and Edens, A and Ciocco, S and Jacqueline, K},
|
||||
year = 25,
|
||||
month = nov,
|
||||
number = {B4M-ES-121043},
|
||||
institution = {BWXT},
|
||||
url = {https://kiteworks.bwxt.com/web/file/416b69b9-4c5c-44c9-9605-40a25e181493?currentPage=1},
|
||||
copyright = {Export Controlled},
|
||||
file = {/home/nick/pool/Users/Nick/Documents/2025/What is Nuclear LLC/jobs/Marine/AMS docs/B4M-ES-121043_Rev001.pdf}
|
||||
}
|
||||
|
||||
@techreport{halpinInformationManagementNuclear1978d,
|
||||
title = {Information Management for Nuclear Power Stations: Project Description},
|
||||
shorttitle = {Information Management for Nuclear Power Stations},
|
||||
author = {Halpin, D. W.},
|
||||
year = 1978,
|
||||
month = mar,
|
||||
number = {ORO-5270-1},
|
||||
institution = {Georgia Inst. of Tech., Atlanta (USA). School of Civil Engineering},
|
||||
doi = {10.2172/6543303},
|
||||
url = {https://www.osti.gov/biblio/6543303},
|
||||
abstract = {A study of the information management structure required to support nuclear power plant construction was performed by a joint university-industry group under the sponsorship of the Department of Energy (DOE), formerly the Energy Research and Development Administration (ERDA). The purpose of this study was (1) to study methods for the control of information during the construction and start-up of nuclear power plants, and (2) identify those data elements intrinsic to nuclear power plants which must be maintained in a structured format for quick access and retrieval. Maintenance of the massive amount of data needed for control of a nuclear project during design, procurement, construction, start-up/testing, and operational phases requires a structuring which allows immediate update and retrieval based on a wide variety of access criteria. The objective of the research described has been to identify design concepts which support the development of an information control system responsive to these requirements. A conceptual design of a Management Information Data Base System which can meet the project control and information exchange needs of today's large nuclear power plant construction projects has been completed and an approach recommended for development and implementation of a complete operational system.},
|
||||
langid = {english},
|
||||
file = {/pool/Reading/Nuclear/process/configuration management/Information Management for Nuclear Power Stations 1978/Halpin - 1978 - Information management for nuclear power stations project description.pdf}
|
||||
}
|
||||
|
||||
@misc{imoCodeSafetyNuclear1982,
|
||||
title = {Code of {{Safety}} for {{Nuclear Merchant Ships}}},
|
||||
author = {IMO},
|
||||
|
|
@ -15,3 +64,27 @@
|
|||
publisher = {Internaional Maritime Organization},
|
||||
url = {https://wwwcdn.imo.org/localresources/en/KnowledgeCentre/IndexofIMOResolutions/AssemblyDocuments/A.491(12).pdf}
|
||||
}
|
||||
|
||||
@techreport{renuartAdvancedNuclearTechnology2014,
|
||||
title = {Advanced {{Nuclear Technology}}: {{Data-Centric Configuration Management}} for {{Efficiency}} and {{Cost Reduction}}: {{An Economic Basis}} for {{Implementation}}},
|
||||
author = {Renuart, R.},
|
||||
year = 2014,
|
||||
month = dec,
|
||||
number = {3002003126},
|
||||
pages = {170},
|
||||
institution = {EPRI},
|
||||
url = {https://www.epri.com/research/products/3002003126},
|
||||
abstract = {The Electric Power Research Institute (EPRI) Advanced Nuclear Technology (ANT) Program has been working on defining the tools that can be a part of an effective configuration management (CM) system. This includes the potential use of modern digital data management tools that can be useful not only across the plant life cycle, including engineering, procurement, construction (EPC), and decommissioning, but also for the management of plant configuration—control of the licensing basis, plant operation, and input and control of many plant programs.},
|
||||
langid = {american},
|
||||
file = {/home/nick/pool/Reading/Nuclear/process/configuration management/Advanced Nuclear Technology:
|
||||
Data-Centric Configuration Management for
|
||||
Efficiency and Cost Reduction 000000003002003126.pdf}
|
||||
}
|
||||
|
||||
@misc{SQLModel,
|
||||
title = {{{SQLModel}}},
|
||||
url = {https://sqlmodel.tiangolo.com/},
|
||||
abstract = {SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness.},
|
||||
langid = {english},
|
||||
file = {/home/nick/Zotero/storage/MA9HAJ52/sqlmodel.tiangolo.com.html}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
---
|
||||
rmdc-systems:
|
||||
# This file contains a listing of specific software systems
|
||||
# used to implement information management. Data from this file
|
||||
# is brought into procedures as appropriate.
|
||||
RMDC:
|
||||
- name: NukeVault
|
||||
description: Specialized commercial records management system
|
||||
use-cases: Storing Documents and Records generated during design of Project X
|
||||
|
|
@ -8,3 +10,5 @@ rmdc-systems:
|
|||
description: A place where our suppliers can get documents
|
||||
use-cases: External suppliers send documents/records to us
|
||||
location: Online
|
||||
Data Management:
|
||||
- name: Data Dictionary
|
||||
|
|
|
|||
|
|
@ -18,7 +18,8 @@ sys.path.insert(0, SRC)
|
|||
import datetime
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
company_name = "Applied Maritime Sciences"
|
||||
company_name = "Applied Maritime Sciences, LLC"
|
||||
project_name = "Project 1959"
|
||||
project = f"{company_name} Governing Documents"
|
||||
author = company_name
|
||||
release = "1.0"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
- name: Component Cooling Water System
|
||||
name: Component Cooling Water System
|
||||
abbrev: CCW
|
||||
functions:
|
||||
- >
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
- name: Reactor Compartment Ventilation System
|
||||
name: Reactor Compartment Ventilation System
|
||||
abbrev: RCV
|
||||
safety class: II
|
||||
functions:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
- name: Fuel handling equipment
|
||||
name: Fuel handling equipment
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
|
|
|
|||
|
|
@ -1,49 +0,0 @@
|
|||
- name: Reactor vessel and closure head
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
- name: Primary coolant pump
|
||||
desc: Includes motors, coolers, valves, and piping
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 4
|
||||
tags: INTERFACE
|
||||
- name: Reactor vessel internals
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
- name: Steam generator
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 12
|
||||
tags: INTERFACE
|
||||
- name: Pressurizer
|
||||
desc: Pressurizer with spray and surge line
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
- name: Nuclear steam plant supports and restraints
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
- name: Control rod drive service structure
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
- name: Reactor coolant system insulation
|
||||
desc: >
|
||||
Includes insulation for:
|
||||
|
||||
* Reactor vessel and closure head
|
||||
* Pressurizer
|
||||
* Surge and spray line piping
|
||||
* Reactor coolant pumps
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
Primary Coolant System
|
||||
======================
|
||||
|
||||
.. datatemplate:yaml:: pcs.yaml
|
||||
:template: system.tmpl
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
name: Primary Coolant System
|
||||
abbrev: PCS
|
||||
functions:
|
||||
- Remove heat from the core during normal operation
|
||||
- Generate steam
|
||||
equipment:
|
||||
- name: Reactor vessel and closure head
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
- name: Primary coolant pump
|
||||
desc: Includes motors, coolers, valves, and piping
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 4
|
||||
tags: INTERFACE
|
||||
- name: Reactor vessel internals
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
- name: Steam generator
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 12
|
||||
tags: INTERFACE
|
||||
- name: Pressurizer
|
||||
desc: Pressurizer with spray and surge line
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
- name: Nuclear steam plant supports and restraints
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
- name: Control rod drive service structure
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
- name: Reactor coolant system insulation
|
||||
desc: >
|
||||
Includes insulation for:
|
||||
|
||||
* Reactor vessel and closure head
|
||||
* Pressurizer
|
||||
* Surge and spray line piping
|
||||
* Reactor coolant pumps
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 1
|
||||
tags: INTERFACE
|
||||
|
|
@ -1,5 +1,4 @@
|
|||
|
||||
- name: Control rod drive mechanisms
|
||||
name: Control rod drive mechanisms
|
||||
params:
|
||||
- name: Quantity
|
||||
val: 37
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
name: Shielding System
|
||||
equipment:
|
||||
- name: Primary biological shielding
|
||||
desc: >
|
||||
Consists of shielding water tanks at
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
equipment:
|
||||
- name: Collision barrier
|
||||
params:
|
||||
- name: Quantity
|
||||
|
|
|
|||
|
|
@ -9,3 +9,9 @@ Plant
|
|||
Ship/index
|
||||
Shipyard/index
|
||||
*
|
||||
|
||||
|
||||
.. plant-data-table:: plant
|
||||
:columns: PBS, Value, Abbrev, Description, Tags
|
||||
:max-depth: 4
|
||||
:hide-empty:
|
||||
127
documents/procedures/administration/information_management.rst
Normal file
127
documents/procedures/administration/information_management.rst
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
|
||||
Information Management Plan
|
||||
===========================
|
||||
|
||||
Purpose
|
||||
-------
|
||||
This plan is the highest-level description of how information is
|
||||
managed at |inst|. It defines the information management requirements
|
||||
and explains the chosen processes and tools that meet the requirements.
|
||||
|
||||
Scope
|
||||
-----
|
||||
This plan applies to creation, storage, exchange, and retirement of project
|
||||
information related to |project|. This includes plant configuration management
|
||||
data as defined in :cite:p:`barrieroInformationManagementProcess2010`. The plan
|
||||
is not limited to information affecting quality, it also includes business
|
||||
information.
|
||||
|
||||
|
||||
Background
|
||||
----------
|
||||
The potential benefits of the digital transformation are well known across all
|
||||
business sectors. Numerous commercial nuclear information management studies
|
||||
have further suggested that properly implemented information management can improve
|
||||
efficiency and quality while reducing costs
|
||||
:cite:p:`halpinInformationManagementNuclear1978d,agencyInformationTechnologyNuclear2010,barrieroInformationManagementProcess2010,renuartAdvancedNuclearTechnology2014`
|
||||
In addition, management of information related to product quality is subject
|
||||
to nuclear quality regulations in all jurisdictions.
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
.. req:: Quality-related information shall be managed in accordance with 10 CFR 50 Appendix B
|
||||
:id: R_INFO_APPB
|
||||
:links: R_10CFR50_APPB
|
||||
:tags: quality
|
||||
:basis: Compliance with Appendix B is necessary for licensing
|
||||
|
||||
Note that non-quality related information is not necessarily subject
|
||||
to this requirement.
|
||||
|
||||
.. req:: A data dictionary shall be maintained defining controlled data
|
||||
:id: R_DATA_DICT
|
||||
:basis: It will provide a central reference for all project members to
|
||||
find specific, precise, and up-to-date data definitions to enable
|
||||
unambiguous communications and collaboration.
|
||||
|
||||
The dictionary shall define data types, data fields, constraints on the
|
||||
fields, relationships between the data, source, sensitivity, usage,
|
||||
owner/steward, sample values, and transformation logic, as applicable. It
|
||||
shall be revision controlled such that changes can be clearly seen and
|
||||
remembered.
|
||||
|
||||
.. req:: Data shall be managed such that data exchanges and transformations between
|
||||
parties and systems can be readily automated
|
||||
:id: R_DATA_EXCHANGE
|
||||
:basis: Over the project life, numerous parties and systems will ramp up
|
||||
and down due to changing relationships and technologies. Automated data
|
||||
exchanges are expected to improve the ease, cost, speed, and quality of
|
||||
the inevitable exchanges and transformations.
|
||||
|
||||
This effectively requires rich data import and export capabilities
|
||||
in each tool used to manage data.
|
||||
|
||||
.. req:: Data shall be subject to role-based access controls (RBAC) or stronger
|
||||
:id: R_DATA_ACCESS
|
||||
:basis: Role-based access control (RBAC) is a strong standard
|
||||
covering the needs of commercial nuclear information
|
||||
from export control and business sensitivity perspectives.
|
||||
|
||||
More sensitive data, such as Security Related Information,
|
||||
may use stronger access controls such as ABAC or MAC.
|
||||
|
||||
Implementation
|
||||
--------------
|
||||
This section defines the specific implementation of the requirements.
|
||||
|
||||
General principles
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
A hub data architecture has been chosen for this project, based on
|
||||
arguments and experiences in :cite:p:`agencyInformationTechnologyNuclear2010`.
|
||||
|
||||
.. figure:: /_static/data-hub.png
|
||||
|
||||
Hub architecture, from :cite:p:`agencyInformationTechnologyNuclear2010`
|
||||
|
||||
This is designed to enable rapid integration of a wide variety of partner
|
||||
organizations, specialized information management tools, and engineering/design
|
||||
tools while striving to future-proof the multi-decade project.
|
||||
|
||||
The underlying data layer consists of:
|
||||
|
||||
* Structured text (e.g. YAML, XML, JSON) controlled in version-controlled repositories
|
||||
* Databases (e.g. Postgres)
|
||||
* Documents/drawings (PDFs, native files, HTML) stored on corporate drives and managed
|
||||
by the Records Management/Document Control system
|
||||
* Technical data (3D models, simulation input/output, laser scans, schedule dumps) stored
|
||||
on corporate drives, managed by the Technical Data Management system
|
||||
|
||||
Above the data layer sits the data authoring and manipulation layer, which includes:
|
||||
|
||||
* Office tools: word processors, spreadsheets, text editors, IDEs, etc., including
|
||||
online collaboration tools
|
||||
* PM tools: Primavera P6, HR tools
|
||||
* Engineering tools: SolidWorks, ANSYS, CASMO, MCNP, Intergraph, Revit
|
||||
* Construction tools
|
||||
* Maintenance tools
|
||||
|
||||
One-way or bidirectional data exchanges between tools and institutions occur
|
||||
through the API, which reads the data layer and presents data representations to
|
||||
authorized users or services in clearly-defined formats over the network.
|
||||
|
||||
.. _info-mgmt-data-dict:
|
||||
|
||||
Data Dictionary
|
||||
^^^^^^^^^^^^^^^
|
||||
The data dictionary is defined and maintained as described in
|
||||
:need:`I_DATA_DICT`.
|
||||
|
||||
The data dictionary itself is located at :ref:`data-dict`.
|
||||
|
||||
|
||||
.. insert render of the data dictionary table here.
|
||||
|
||||
Technology stack
|
||||
^^^^^^^^^^^^^^^^
|
||||
.. insert render of the IT systems table here.
|
||||
12
documents/schedule/index.rst
Normal file
12
documents/schedule/index.rst
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
.. raw:: latex
|
||||
|
||||
\begin{landscape}
|
||||
|
||||
Schedule
|
||||
########
|
||||
|
||||
.. schedule:: _data/schedule.yaml
|
||||
|
||||
.. raw:: latex
|
||||
|
||||
\end{landscape}
|
||||
228
src/nrsk/models.py
Normal file
228
src/nrsk/models.py
Normal file
|
|
@ -0,0 +1,228 @@
|
|||
"""
|
||||
Define the Data Dictionary.
|
||||
|
||||
Implementation of Data Dictionary
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. impl:: Maintain the Data Dictionary base data using Pydantic
|
||||
:id: I_DATA_DICT
|
||||
:links: R_DATA_DICT
|
||||
|
||||
The data dictionary is managed using Pydantic. Pydantic allows for
|
||||
concise Python code to richly define data models and their fields. From a single
|
||||
class definition, it provides data validation, automatic rich documentation (via
|
||||
automatic a Sphinx plugin), an integration with FastAPI for data exchange, and
|
||||
relatively easy integration with sqlalchemy for database persistence. Changes to
|
||||
the schema can be managed and controlled via the revision control system, and
|
||||
changes to a single source (the Python code) will automatically propagate the
|
||||
rendered documentation and, potentially the database (e.g. using *alembic*)
|
||||
|
||||
Using SQLAchemy as the database engine enables wide flexibility in underlying
|
||||
database technology, including PostgreSQL, MySQL, SQLite, Oracle, and MS SQL
|
||||
Server. Pydantic models allows us to validate data loaded from a database,
|
||||
directly from structured text file, or from JSON data delivered via the network.
|
||||
|
||||
Analysis of Alternatives
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
SQLModel :cite:p:`SQLModel` was considered as the data layer base, but it was
|
||||
determined to be less mature than pydantic and sqlalchemy, with inadequate
|
||||
documentation related to field validation. It was determined to use Pydantic
|
||||
directly for schema definitions.
|
||||
|
||||
.. _data-dict:
|
||||
|
||||
Data Dictionary
|
||||
^^^^^^^^^^^^^^^
|
||||
This is the official Data Dictionary discussed in :ref:`the Information
|
||||
Management Plan <info-mgmt-data-dict>`.
|
||||
"""
|
||||
|
||||
import re
|
||||
import uuid
|
||||
from datetime import date, datetime
|
||||
from typing import Annotated, Optional
|
||||
|
||||
from pydantic import (
|
||||
AnyUrl,
|
||||
BaseModel,
|
||||
EmailStr,
|
||||
Field,
|
||||
FieldValidationInfo,
|
||||
PositiveInt,
|
||||
ValidationError,
|
||||
field_validator,
|
||||
)
|
||||
|
||||
ALL_CAPS = re.compile("^[A-Z]$")
|
||||
UUID_PK = Annotated[
|
||||
uuid.UUID,
|
||||
Field(
|
||||
default_factory=uuid.uuid4,
|
||||
description="The unique ID of this object. Used as a primary key in the database.",
|
||||
examples=["3fa85f64-5717-4562-b3fc-2c963f66afa6"],
|
||||
frozen=True,
|
||||
primary_key=True,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
"""A person involved in the Project."""
|
||||
|
||||
uuid: UUID_PK
|
||||
given_name: str
|
||||
family_name: str
|
||||
preferred_name: Optional[str] = None
|
||||
email: EmailStr
|
||||
joined_on: Optional[datetime]
|
||||
deactivated_on: Optional[datetime]
|
||||
|
||||
|
||||
class OpenItem(BaseModel):
|
||||
uuid: UUID_PK
|
||||
name: str
|
||||
status: str
|
||||
created_on: datetime
|
||||
closed_on: Optional[datetime] = None
|
||||
|
||||
|
||||
class SSC(BaseModel):
|
||||
"""
|
||||
A Structure, System, or Component in the plant.
|
||||
|
||||
This is a generic hierarchical object that can represent plants, units,
|
||||
buildings and their structures, systems, subsystems, components,
|
||||
subcomponents, etc.
|
||||
|
||||
A physical tree of buildings/structures/rooms may have overlapping
|
||||
contents in terms of systems/components/equipment/parts
|
||||
"""
|
||||
|
||||
uuid: UUID_PK
|
||||
name: str
|
||||
pbs_code: Optional[str] = Field(
|
||||
description="An integer sequence that determines the 'system number' and also the ordering in printouts",
|
||||
examples=["1.2.3", "20.5.11"],
|
||||
default="",
|
||||
)
|
||||
"""PBS code is tied closely to the structure of the PBS, obviously. If 1.2
|
||||
is a category level, that's ok, but that doesn't imply that the second level
|
||||
of PBS 2 is also a category level; it may be systems.
|
||||
Since this can change in major PBS reorganizations, it should not be used
|
||||
for cross-referencing (use UUID).
|
||||
"""
|
||||
|
||||
abbrev: str = Field(
|
||||
description="A human-friendly abbreviation uniquely defining the system"
|
||||
)
|
||||
parent: Optional["SSC"] = None
|
||||
functions: Optional[list[str]] = Field(
|
||||
description="Functions of this system", default=None
|
||||
)
|
||||
|
||||
@field_validator("abbrev")
|
||||
@classmethod
|
||||
def abbrev_must_be_all_caps(cls, v: str, info: FieldValidationInfo) -> str: # noqa: D102
|
||||
assert re.match(ALL_CAPS, v), f"{info.field_name} must be all CAPS"
|
||||
|
||||
@field_validator("pbs_code")
|
||||
@classmethod
|
||||
def pbs_must_be_int_sequence(cls, v: str, info: FieldValidationInfo) -> str: # noqa: D102
|
||||
assert not v or re.match(r"^(\d+\.?)+$", v), (
|
||||
f"{info.field_name} must be an integer sequence, like 1.2.3"
|
||||
)
|
||||
|
||||
|
||||
class SystemsList(BaseModel):
|
||||
"""A flat list of Systems in the plant.
|
||||
|
||||
Can be used e.g. to render a snapshot of the Master Systems List.
|
||||
|
||||
Does not include categories like "Nuclear Island" or "Primary Systems".
|
||||
|
||||
We may want another structure that represents the whole tree in a
|
||||
well-defined manner, or we may want to add a 'path' attr
|
||||
to systems that define where they live.
|
||||
"""
|
||||
|
||||
systems: list[SSC]
|
||||
|
||||
|
||||
class ParamDef(BaseModel):
|
||||
"""A parameter class defining an aspect of plant design."""
|
||||
|
||||
uuid: UUID_PK
|
||||
name: str = Field(
|
||||
description="Name of parameter class", examples=["Nominal gross power"]
|
||||
)
|
||||
description: str = Field(
|
||||
description="Detailed description of what parameters of this type represent"
|
||||
)
|
||||
valid_units: Optional[list[str]] = Field(
|
||||
description="List of units allowed", examples=["MW", "W", "shp"], default=None
|
||||
)
|
||||
|
||||
|
||||
class ParamVal(BaseModel):
|
||||
"""A particular value of a Parameter, assigned to a particular SSC."""
|
||||
|
||||
ssc: SSC
|
||||
pdef: ParamDef
|
||||
value: str
|
||||
units: Optional[str] = None
|
||||
pedigree: str = Field(
|
||||
description="Indication of how well it is known (rough estimate, final design, as-built)."
|
||||
)
|
||||
source: str = Field(description="Where this version of the value came from")
|
||||
|
||||
|
||||
class ITSystem(BaseModel):
|
||||
"""An IT system used by the project."""
|
||||
|
||||
uuid: UUID_PK
|
||||
name: str
|
||||
vendor: str
|
||||
version: Optional[str] = None
|
||||
use_cases: list[str] = Field(
|
||||
description="One or more use cases this system is used for.",
|
||||
examples=[
|
||||
[
|
||||
"Document management",
|
||||
]
|
||||
],
|
||||
)
|
||||
physical_location: str = Field(description="Where the system is physically located")
|
||||
url: Optional[AnyUrl] = Field(description="Full URL to the system", default=None)
|
||||
custodian: Optional[User] = Field(
|
||||
description="Person currently in charge of system", default=None
|
||||
)
|
||||
launched_on: Optional[datetime] = None
|
||||
retired_on: Optional[datetime] = None
|
||||
quality_related: bool
|
||||
|
||||
|
||||
class Document(BaseModel):
|
||||
uuid: UUID_PK
|
||||
title: str = Field(
|
||||
description="Descriptive title explaining the contents",
|
||||
examples=["CNSG Development and Status 1966-1977"],
|
||||
)
|
||||
"""
|
||||
.. impl:: Document title
|
||||
|
||||
This is how doc titles are done.
|
||||
"""
|
||||
revision: str = Field(
|
||||
description="Revision number",
|
||||
examples=["0", "1", "1a", "A"],
|
||||
)
|
||||
type: str
|
||||
originators: list[str]
|
||||
status: str
|
||||
|
||||
@field_validator("type")
|
||||
@classmethod
|
||||
def type_must_be_valid(cls, v: str, info: FieldValidationInfo) -> str:
|
||||
assert v in ["CALC", "PROC"], (
|
||||
f"{info.field_name} must be within the list of doctypes"
|
||||
)
|
||||
0
src/nrsk/plant/__init__.py
Normal file
0
src/nrsk/plant/__init__.py
Normal file
66
src/nrsk/plant/load_plant_data.py
Normal file
66
src/nrsk/plant/load_plant_data.py
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
"""
|
||||
Read plant information like systems, equipment, & params from a folder structure.
|
||||
|
||||
This reads it into the standard data structures defined via Pydantic,
|
||||
which can then be used for any other purpose (reporting, etc.)
|
||||
|
||||
The structure here is path/to/system where the folders define the
|
||||
functional hierarchy (i.e. plant, 'island', system, subsystem).
|
||||
|
||||
Some files can exist in the hierarchy:
|
||||
|
||||
* System data files *.yaml
|
||||
* System documents *.rst
|
||||
|
||||
The documents often make use of the data in the yaml file through
|
||||
system-level (or other) ``datatemplate`` directives, e.g. to print
|
||||
out a list of System Functions or Parameters.
|
||||
|
||||
This module parses the directory tree and YAML files, combining them into one
|
||||
big tree of data.
|
||||
|
||||
Future considerations:
|
||||
|
||||
* It may make sense to have ``system.yaml`` (or ``equipment.yaml``) and
|
||||
``parameters.yaml`` in each of these folders for longer-term efficient
|
||||
loading of just the Systems List vs. the entire Equipment List (which
|
||||
will end up being more efficient in a proper database). Or not... I mean
|
||||
we could just statically render everything and it'd be pretty performant
|
||||
during reads. Maybe just have system, equipment, and param info in the
|
||||
yaml file.
|
||||
|
||||
"""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def load_yaml_tree(root_path: str | Path) -> dict:
|
||||
"""Load a directory tree of files to represent the Plant systems and params."""
|
||||
root_path = Path(root_path)
|
||||
yaml = YAML(typ="safe")
|
||||
tree = {}
|
||||
|
||||
for root, dirs, files in root_path.walk():
|
||||
# Ensure empty folders get included in tree.
|
||||
current = tree
|
||||
rel = root.relative_to(root_path)
|
||||
parts = rel.parts
|
||||
logger.info(f"loading {parts}")
|
||||
# drill into the part of the tree where we are
|
||||
for part in parts:
|
||||
if part not in current:
|
||||
current[part] = {}
|
||||
current = current[part]
|
||||
for file in files:
|
||||
if file.endswith(".yaml"):
|
||||
data = yaml.load(root / file)
|
||||
current.update(data)
|
||||
if parts and not current:
|
||||
current.update({"name": parts[-1]})
|
||||
logger.info(f"{current}")
|
||||
return tree
|
||||
138
src/nrsk/plant/plant_data_table.py
Normal file
138
src/nrsk/plant/plant_data_table.py
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
"""Sphinx directive that makes tables of Plant Data from the PBS tree.
|
||||
|
||||
Since individual system-level data can be nicely handled with datatemplates,
|
||||
this custom directive just looks at the whole tree and makes the PBS
|
||||
structure.
|
||||
|
||||
This is somewhat duplicative of the TOC directive in the Plant folder,
|
||||
but the automatic sphinx numbering and lack of abbrev is a bit sad.
|
||||
"""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst.directives.tables import Table
|
||||
from sphinx.util import logging
|
||||
|
||||
from nrsk.plant.load_plant_data import load_yaml_tree
|
||||
|
||||
logger = logging.getLogger("[plant_data_table]")
|
||||
|
||||
|
||||
class PlantBreakdownStructureTable(Table):
|
||||
"""Plant Breakdown Structure Table."""
|
||||
|
||||
has_content = False
|
||||
required_arguments = 1
|
||||
optional_arguments = 0
|
||||
option_spec = {
|
||||
"start-node": str,
|
||||
"columns": lambda x: [c.strip() for c in x.split(",")],
|
||||
"max-depth": int,
|
||||
"hide-empty": lambda x: True,
|
||||
}
|
||||
|
||||
def get_default_columns(self):
|
||||
return ["Path", "Value", "Tags"]
|
||||
|
||||
def run(self):
|
||||
env = self.state.document.settings.env
|
||||
pbs_path = Path(env.srcdir) / Path(self.arguments[0])
|
||||
logger.info(f"[plant-data-table] Loading data from: {pbs_path}")
|
||||
|
||||
if not pbs_path.exists():
|
||||
logger.warning(f"Input data not found: {pbs_path}")
|
||||
return [nodes.paragraph(text=f"PBS data not found: {pbs_path}")]
|
||||
|
||||
data = load_yaml_tree(pbs_path)
|
||||
|
||||
# Drill down to optional key-path
|
||||
if "key-path" in self.options:
|
||||
keys = self.options["key-path"].split(".")
|
||||
logger.info(f"Using subkey: {keys}")
|
||||
for k in keys:
|
||||
data = data[k]
|
||||
|
||||
max_depth = int(self.options.get("max-depth", 10))
|
||||
hide_empty = "hide-empty" in self.options
|
||||
columns = self.options.get("columns")
|
||||
if not columns:
|
||||
columns = self.get_default_columns()
|
||||
|
||||
# Build table
|
||||
table_node = nodes.table()
|
||||
classes = table_node.get("classes", []) # want table wider: this doesn't work
|
||||
classes.append("full-width")
|
||||
table_node["classes"] = classes
|
||||
tgroup = nodes.tgroup(cols=len(columns))
|
||||
table_node += tgroup
|
||||
|
||||
# Header
|
||||
for _ in columns:
|
||||
tgroup += nodes.colspec(colwidth=10)
|
||||
head = nodes.thead()
|
||||
tgroup += head
|
||||
row = nodes.row()
|
||||
for col in columns:
|
||||
row += nodes.entry("", nodes.paragraph(text=col))
|
||||
head += row
|
||||
|
||||
# Body
|
||||
tbody = nodes.tbody()
|
||||
tgroup += tbody
|
||||
|
||||
def walk(obj, path="", depth=0):
|
||||
if depth >= max_depth:
|
||||
return
|
||||
if not isinstance(obj, dict):
|
||||
return
|
||||
for k, v in obj.items():
|
||||
current_path = f"{path}.{k}" if path else k
|
||||
if hide_empty and self.is_empty(v):
|
||||
continue
|
||||
if not isinstance(v, dict):
|
||||
continue
|
||||
self.add_row(tbody, columns, current_path, v, depth)
|
||||
if "functions" not in obj:
|
||||
# stop if you hit a system with functions
|
||||
walk(v, current_path, depth + 1)
|
||||
|
||||
walk(data)
|
||||
|
||||
return [table_node]
|
||||
|
||||
def is_empty(self, value):
|
||||
return value in ({}, [], "", None)
|
||||
|
||||
def add_row(self, tbody, columns, path, value, depth):
|
||||
"""Add a row to the table."""
|
||||
row = nodes.row()
|
||||
indent = " " * depth * 2 # em spaces for indentation
|
||||
|
||||
cols = []
|
||||
cols.append(path) # path
|
||||
cols.append(value.get("name", "(noname)") * 10)
|
||||
cols.append(value.get("abbrev", ""))
|
||||
cols.append(value.get("desc", ""))
|
||||
cols.append(value.get("tags", ""))
|
||||
|
||||
for col in cols:
|
||||
entry = nodes.entry()
|
||||
para = nodes.paragraph()
|
||||
para += nodes.Text(col)
|
||||
entry += para
|
||||
row += entry
|
||||
|
||||
tbody += row
|
||||
|
||||
|
||||
def setup(app):
|
||||
"""Setup for sphinx extension."""
|
||||
app.add_directive("plant-data-table", PlantBreakdownStructureTable)
|
||||
|
||||
return {
|
||||
"version": "0.1",
|
||||
"parallel_read_safe": True,
|
||||
"parallel_write_safe": True,
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue