[General] Add API docs and pydantic for Data Dict
Fix schedule for latex
This commit is contained in:
parent
e474c140ee
commit
fb28c6c5c5
11 changed files with 186 additions and 57 deletions
7
.gitignore
vendored
7
.gitignore
vendored
|
|
@ -1,4 +1,7 @@
|
|||
# project stuff
|
||||
documents/api
|
||||
**/generated_assets/
|
||||
|
||||
# Typical Python stuff:
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
|
@ -160,3 +163,7 @@ cython_debug/
|
|||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# Sphinx
|
||||
**/_build
|
||||
**/_generated
|
||||
|
|
|
|||
18
.vscode/settings.json
vendored
18
.vscode/settings.json
vendored
|
|
@ -5,4 +5,22 @@
|
|||
"source.organizeImports": "explicit"
|
||||
}
|
||||
},
|
||||
"files.watcherExclude": {
|
||||
"**/_build/**": true,
|
||||
"**/__pycache__": true
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/__pycache__": true,
|
||||
"**/.pytest_cache": true,
|
||||
"**/.mypy_cache": true,
|
||||
"**/build": true,
|
||||
"**/dist": true,
|
||||
"**/_build": true,
|
||||
"**/_build/**": true
|
||||
},
|
||||
"files.exclude": {
|
||||
"**/_build": true,
|
||||
"**/*.egg-info": true,
|
||||
"**/__pycache__": true
|
||||
}
|
||||
}
|
||||
BIN
documents/_static/data-hub.png
Normal file
BIN
documents/_static/data-hub.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 128 KiB |
|
|
@ -10,9 +10,11 @@
|
|||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#
|
||||
# import os
|
||||
# import sys
|
||||
# sys.path.insert(0, os.path.abspath('.'))
|
||||
import os
|
||||
import sys
|
||||
|
||||
SRC = os.path.abspath("../src")
|
||||
sys.path.insert(0, SRC)
|
||||
import datetime
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
|
@ -39,11 +41,19 @@ extensions = [
|
|||
"sphinx.ext.imgmath",
|
||||
"sphinxcontrib.datatemplates",
|
||||
"sphinxcontrib.mermaid",
|
||||
"sphinxcontrib.apidoc",
|
||||
"sphinx.ext.graphviz",
|
||||
# "sphinx.ext.imgconverter", # SVG to png but rasterizes and bad
|
||||
"sphinxcontrib.inkscapeconverter", # SVG to pdf without rasterizing
|
||||
"sphinx_timeline",
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.napoleon",
|
||||
"sphinx_autodoc_typehints",
|
||||
"sphinx.ext.autosummary",
|
||||
"sphinxcontrib.autodoc_pydantic",
|
||||
"sphinx.ext.intersphinx",
|
||||
"nrsk.schedule.load_schedule",
|
||||
"nrsk.plant.plant_data_table",
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
|
|
@ -84,6 +94,28 @@ html_css_files = [
|
|||
# https://sphinx-needs.readthedocs.io/en/latest/installation.html#plantuml-support
|
||||
plantuml = "java -Djava.awt.headless=true -jar /usr/share/plantuml/plantuml.jar"
|
||||
|
||||
latex_engine = "xelatex"
|
||||
latex_elements = {
|
||||
# "fontenc": r"\usepackage[T2A]{fontenc}",
|
||||
# "babel": r"\usepackage[english,russian]{babel}",
|
||||
# "fontpkg": r"""
|
||||
# \setmainfont{DejaVu Serif}
|
||||
# \setsansfont{DejaVu Sans}
|
||||
# \setmonofont{DejaVu Sans Mono}
|
||||
# """,
|
||||
"figure_align": "H",
|
||||
"extraclassoptions": "openany",
|
||||
#'\makeatletter\@openrightfalse\makeatother'
|
||||
"extrapackages": r"""
|
||||
\usepackage{fancyhdr}
|
||||
\usepackage{etoolbox}
|
||||
\usepackage{pdflscape}
|
||||
\usepackage{tabulary}
|
||||
""",
|
||||
"preamble": r"""
|
||||
\AtBeginEnvironment{figure}{\pretocmd{\hyperlink}{\protect}{}{}}
|
||||
""",
|
||||
}
|
||||
# LaTeX document generation options
|
||||
# doesn't work with sphinx-needs
|
||||
latex_documents = [
|
||||
|
|
@ -123,6 +155,7 @@ latex_documents = [
|
|||
# ]
|
||||
rst_prolog = f"""
|
||||
.. |inst| replace:: **{company_name}**
|
||||
.. |project| replace:: **{project_name}**
|
||||
"""
|
||||
|
||||
# will need to move relevant refs somewhere
|
||||
|
|
@ -138,3 +171,29 @@ mermaid_version = "10.6.1"
|
|||
# Sphinx Needs config
|
||||
needs_include_needs = True # turn off to hide all needs (e.g. for working docs)
|
||||
needs_extra_options = ["basis"]
|
||||
|
||||
autodoc_typehints = "description"
|
||||
autodoc_typehints_description_target = "all"
|
||||
autodoc_default_options = {
|
||||
"members": True,
|
||||
"private-members": False,
|
||||
"undoc-members": True,
|
||||
"ignore-module-all": True,
|
||||
}
|
||||
autodoc_member_order = "bysource"
|
||||
|
||||
apidoc_module_dir = SRC
|
||||
apidoc_module_first = True
|
||||
apidoc_output_dir = "api"
|
||||
apidoc_separate_modules = True
|
||||
|
||||
autodoc_pydantic_model_show_field_summary = True
|
||||
autodoc_pydantic_model_show_validator_summary = True
|
||||
autodoc_pydantic_field_doc_policy = "both"
|
||||
|
||||
set_type_checking_flag = True
|
||||
|
||||
intersphinx_mapping = {
|
||||
"pydantic": ("https://docs.pydantic.dev/latest", None),
|
||||
"python": ("https://docs.python.org/3", None),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,6 +10,39 @@ Glossary
|
|||
|
||||
.. glossary::
|
||||
|
||||
Configuration Management
|
||||
The process of identifying and documenting the characteristics of a
|
||||
facility's structures, systems and components (including computer
|
||||
systems and software), and of ensuring that changes to these
|
||||
characteristics are properly incorporated into the facility
|
||||
documentation. :cite:p:`agencyInformationTechnologyNuclear2010`
|
||||
|
||||
Controlled document
|
||||
Documents whose content is maintained uniform among the copies by an
|
||||
administrative control system. The goal of controlling documents is to
|
||||
ensure that work is performed using approved current information, not
|
||||
obsolete information. Important documents to be controlled are uniquely
|
||||
identified (including revision number, date, and specific copy number),
|
||||
and distribution is formally controlled. Revisions to controlled
|
||||
documents are uniquely tracked and implemented, including mandatory page
|
||||
replacements and receipt acknowledgment. Controlled documents typically
|
||||
include procedures for operations, surveillance, and maintenance, and
|
||||
safety basis documents such as the SAR, and hazard and accident
|
||||
analyses. :cite:p:`agencyInformationTechnologyNuclear2010`
|
||||
|
||||
Design basis
|
||||
The range of conditions and events taken explicitly into account in the
|
||||
design of a facility, according to established criteria, such that the
|
||||
facility can withstand them without exceeding authorized limits by the
|
||||
planned operation of safety systems.
|
||||
:cite:p:`agencyInformationTechnologyNuclear2010`
|
||||
|
||||
Design control
|
||||
Measures established to ensure that the information from design input
|
||||
and design process documents for structures, systems, and components is
|
||||
correctly translated into the final design.
|
||||
:cite:p:`agencyInformationTechnologyNuclear2010`
|
||||
|
||||
Document
|
||||
A written collection of information, instructions, drawings,
|
||||
specifications, etc. that is *maintained* throughout the
|
||||
|
|
|
|||
|
|
@ -9,10 +9,12 @@
|
|||
purpose/index
|
||||
organization/index
|
||||
procedures/index
|
||||
project/index
|
||||
plant/index
|
||||
schedule/index
|
||||
bibliography
|
||||
requirements/index
|
||||
glossary
|
||||
api/nrsk
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ This procedure governs the creation, maintenance, and retention of
|
|||
:term:`Records <Record>` and :term:`Documents <Document>`.
|
||||
|
||||
.. impl:: Define processes for lifetime records
|
||||
:links: R_GDC_1_4
|
||||
:links: R_GDC_01_04
|
||||
|
||||
.. impl:: Define processes for Document Control
|
||||
:links: R_APPB_45
|
||||
|
|
@ -96,3 +96,6 @@ satisfy the needs of a lower-level procedure.
|
|||
data,
|
||||
title='Record/Document types',
|
||||
) }}
|
||||
|
||||
See Also
|
||||
^^^^^^^^
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ and `RG 1.232 <https://www.nrc.gov/docs/ML1732/ML17325A611.pdf>`_.
|
|||
|
||||
.. needtable:: Appendix A summary
|
||||
:filter: id.startswith("R_GDC")
|
||||
:columns: id
|
||||
:columns: id, title
|
||||
|
||||
.. include:: /generated_assets/10-cfr-50-app-a-list.rst
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ description = """\
|
|||
and tools supporting efficient nuclear energy endeavors.\
|
||||
"""
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.9"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"openpyxl",
|
||||
"pyyaml",
|
||||
|
|
@ -24,7 +24,7 @@ dependencies = [
|
|||
"sphinxcontrib-bibtex >= 2.6.1",
|
||||
"sphinxcontrib-glossaryused @ git+https://github.com/partofthething/glossaryused@bb321e6581b4c0618cd6dc4f1fd8355d314aee4d",
|
||||
"sphinx-autobuild",
|
||||
"sphinxcontrib.datatemplates",
|
||||
"sphinxcontrib-datatemplates",
|
||||
"sphinxcontrib-mermaid",
|
||||
"sphinxcontrib-svg2pdfconverter",
|
||||
"sphinx-timeline",
|
||||
|
|
@ -32,6 +32,12 @@ dependencies = [
|
|||
"matplotlib",
|
||||
"pandas",
|
||||
"jpype1",
|
||||
"ruamel-yaml>=0.18.16",
|
||||
"pydantic>=2.12.5",
|
||||
"sphinx-autodoc-typehints>=3.5.2",
|
||||
"email-validator>=2.3.0",
|
||||
"sphinxcontrib-apidoc>=0.6.0",
|
||||
"autodoc-pydantic>=2.2.0",
|
||||
]
|
||||
classifiers = [
|
||||
"Programming Language :: Python :: 3",
|
||||
|
|
@ -81,3 +87,8 @@ include_trailing_comma = true
|
|||
force_grid_wrap = 0
|
||||
line_length = 88
|
||||
profile = "black"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"ipython>=8.18.1",
|
||||
]
|
||||
|
|
|
|||
0
src/nrsk/schedule/__init__.py
Normal file
0
src/nrsk/schedule/__init__.py
Normal file
|
|
@ -9,13 +9,11 @@ import logging
|
|||
import os
|
||||
import re
|
||||
from datetime import datetime
|
||||
from glob import glob
|
||||
from pathlib import Path
|
||||
|
||||
import jpype
|
||||
import jpype.imports
|
||||
import matplotlib.dates as mdates
|
||||
import matplotlib.pyplot as plt
|
||||
import mpl_toolkits.axisartist as axisartist
|
||||
import pandas as pd
|
||||
import yaml
|
||||
from docutils import nodes
|
||||
|
|
@ -29,18 +27,12 @@ logger.setLevel(logging.DEBUG)
|
|||
# Start JVM with MPXJ jar
|
||||
jpype.startJVM(classpath=["/home/nick/repos/mpxj/mpxj-lib/*"])
|
||||
|
||||
from java.io import File
|
||||
from java.io import File # noqa: E402
|
||||
from java.time import LocalDateTime # noqa: E402
|
||||
from org.mpxj import ( # noqa: E402
|
||||
Availability,
|
||||
Duration,
|
||||
FieldType,
|
||||
ProjectFile,
|
||||
Relation,
|
||||
RelationType,
|
||||
Resource,
|
||||
TaskField,
|
||||
TaskType,
|
||||
TimeUnit,
|
||||
)
|
||||
from org.mpxj.cpm import MicrosoftScheduler, PrimaveraScheduler # noqa: E402
|
||||
|
|
@ -196,7 +188,7 @@ def _preprocess_plot(project):
|
|||
return df, df_deps
|
||||
|
||||
|
||||
def plot_schedule(
|
||||
def plot_schedule( # noqa: C901
|
||||
input_fname: str = "scheduled.xml", project=None, output_fname: str = "schedule.svg"
|
||||
):
|
||||
"""Generate plot of schedule."""
|
||||
|
|
@ -270,7 +262,7 @@ def plot_schedule(
|
|||
plt.title("AMS High-Level Schedule")
|
||||
# plt.tight_layout()
|
||||
plt.savefig(output_fname)
|
||||
plt.show()
|
||||
# plt.show()
|
||||
|
||||
|
||||
class ScheduleDirective(Directive):
|
||||
|
|
@ -281,65 +273,69 @@ class ScheduleDirective(Directive):
|
|||
|
||||
def run(self): # noqa: D102
|
||||
env = self.state.document.settings.env
|
||||
builder = env.app.builder
|
||||
schedule_data = self.arguments[0]
|
||||
|
||||
schedule_data_abs = os.path.join(env.srcdir, schedule_data)
|
||||
schedule_data_abs = Path(env.srcdir) / schedule_data
|
||||
|
||||
if not os.path.exists(schedule_data_abs):
|
||||
if not schedule_data_abs.exists():
|
||||
logger.error(f"Schedule file not found: {schedule_data_abs}")
|
||||
return []
|
||||
|
||||
# Image output directory
|
||||
gen_dir = os.path.join(env.app.srcdir, "generated_assets")
|
||||
# put image within _static so html builder knows to copy it over.
|
||||
gen_dir = Path(env.app.srcdir) / "_static" / "generated_assets"
|
||||
ensuredir(gen_dir)
|
||||
ensuredir(os.path.join(env.app.outdir, "_downloads"))
|
||||
|
||||
# Name of the generated file
|
||||
base = os.path.splitext(os.path.basename(schedule_data))[0]
|
||||
out_image = os.path.join(gen_dir, f"{base}.svg")
|
||||
out_image = gen_dir / f"{base}.svg"
|
||||
|
||||
start_date = datetime(2026, 1, 1)
|
||||
proj = load_from_yaml(fname=schedule_data)
|
||||
solve_schedule(proj, start_date)
|
||||
plot_schedule(project=proj, output_fname=out_image)
|
||||
writer = UniversalProjectWriter(FileFormat.MSPDI)
|
||||
writer.write(proj, os.path.join("_build", "_downloads", f"{base}_mspdi.xml"))
|
||||
writer.write(proj, gen_dir / f"{base}_mspdi.xml")
|
||||
|
||||
env.note_dependency(schedule_data_abs)
|
||||
rel = str(os.path.relpath(out_image, env.app.srcdir))
|
||||
# trying to mock /generated_assets/schedule.svg for the build folder
|
||||
# but it ends up in _images actually.
|
||||
# somewhat hacky but works in subfolders
|
||||
abs_rel = os.path.join("/", rel)
|
||||
image_node = nodes.image(uri=abs_rel)
|
||||
uri = builder.get_relative_uri(env.docname, "_images/" + f"{base}.svg")
|
||||
uri = uri.replace(".html", "")
|
||||
|
||||
ref_node = nodes.reference("", "", refuri=uri)
|
||||
ref_node += image_node
|
||||
ref_node["target"] = "_blank"
|
||||
ref_node["rel"] = "noopener"
|
||||
|
||||
uri_dl1 = builder.get_relative_uri(
|
||||
env.docname, "_downloads/" + f"{base}_mspdi.xml"
|
||||
)
|
||||
uri_dl1 = uri_dl1.replace(".html", "")
|
||||
download1 = nodes.reference(
|
||||
text="Download schedule in MS Project XML format",
|
||||
refuri=uri_dl1,
|
||||
classes=["download-link"],
|
||||
)
|
||||
|
||||
uri = f"/_static/generated_assets/{base}.svg"
|
||||
image_node = nodes.image(uri=uri)
|
||||
paragraph = nodes.paragraph()
|
||||
paragraph += ref_node
|
||||
paragraph += download1
|
||||
|
||||
# download link only makes sense in web env, not PDF
|
||||
builder_name = self.state.document.settings.env.app.builder.name
|
||||
if builder_name not in ("html", "singlehtml", "dirhtml"):
|
||||
paragraph += image_node
|
||||
else:
|
||||
# add hyperlink to image. Since this may be called from a subdir we need
|
||||
# relative paths that walk up appropriately.
|
||||
docname = env.docname # subdir/mydoc
|
||||
relative_root_path = "../" * docname.count(os.sep)
|
||||
hyperlink_uri = relative_root_path + uri[1:]
|
||||
|
||||
# Result when docname is 'subdir/mydoc':
|
||||
# hyperlink_uri will be: ../_static/generated_assets/my_diagram.svg
|
||||
ref_node = nodes.reference("", "", refuri=hyperlink_uri)
|
||||
ref_node += image_node
|
||||
ref_node["target"] = "_blank"
|
||||
ref_node["rel"] = "noopener"
|
||||
paragraph += ref_node
|
||||
|
||||
# and hyperlink to schedule data
|
||||
hyperlink_uri = (
|
||||
relative_root_path + f"_static/generated_assets/{base}_mspdi.xml"
|
||||
)
|
||||
download1 = nodes.reference(
|
||||
text="Download schedule in MS Project XML format",
|
||||
refuri=hyperlink_uri,
|
||||
classes=["download-link"],
|
||||
)
|
||||
paragraph += download1
|
||||
|
||||
return [paragraph]
|
||||
|
||||
|
||||
def setup(app):
|
||||
"""Setup for sphinx extension."""
|
||||
def setup(app): # noqa: D103
|
||||
app.add_directive("schedule", ScheduleDirective)
|
||||
|
||||
return {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue