Skip to content

Commit 2ff14e7

Browse files
authored
✨ Add ids option for needimport (#1292)
A more performant alternative to the `filter` option. This PR also adds some other performance optimisations; caching the schema load and not reading the import file twice (during validation) Also, add to and improve the `needimport` tests
1 parent 44d7db9 commit 2ff14e7

File tree

9 files changed

+2503
-98
lines changed

9 files changed

+2503
-98
lines changed

docs/directives/needimport.rst

+13-1
Original file line numberDiff line numberDiff line change
@@ -56,13 +56,25 @@ In most cases this should be the latest available version.
5656
tags
5757
~~~~
5858

59-
You can attach tags to existing tags of imported needs using the ``:tags:`` option.
59+
You can attach tags to existing tags of imported needs using the ``:tags:`` option
60+
(as a comma-separated list).
6061
This may be useful to mark easily imported needs and to create specialised filters for them.
6162

63+
ids
64+
~~~
65+
66+
.. versionadded:: 3.1.0
67+
68+
You can use the ``:ids:`` option to import only the needs with the given ids
69+
(as a comma-separated list).
70+
This is useful if you want to import only a subset of the needs from the JSON file.
71+
6272
filter
6373
~~~~~~
6474

6575
You can use the ``:filter:`` option to imports only the needs which pass the filter criteria.
76+
This is a string that is evaluated as a Python expression,
77+
it is less performant than the ``:ids:`` option, but more flexible.
6678

6779
Please read :ref:`filter` for more information.
6880

sphinx_needs/directives/needimport.py

+24-15
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
from sphinx_needs.defaults import string_to_boolean
2020
from sphinx_needs.filter_common import filter_single_need
2121
from sphinx_needs.logging import log_warning
22-
from sphinx_needs.needsfile import check_needs_file
22+
from sphinx_needs.needsfile import SphinxNeedsFileException, check_needs_data
2323
from sphinx_needs.utils import add_doc, import_prefix_link_edit, logger
2424

2525

@@ -37,6 +37,7 @@ class NeedimportDirective(SphinxDirective):
3737
"version": directives.unchanged_required,
3838
"hide": directives.flag,
3939
"collapse": string_to_boolean,
40+
"ids": directives.unchanged_required,
4041
"filter": directives.unchanged_required,
4142
"id_prefix": directives.unchanged_required,
4243
"tags": directives.unchanged_required,
@@ -56,10 +57,6 @@ def run(self) -> Sequence[nodes.Node]:
5657
filter_string = self.options.get("filter")
5758
id_prefix = self.options.get("id_prefix", "")
5859

59-
tags = self.options.get("tags", [])
60-
if len(tags) > 0:
61-
tags = [tag.strip() for tag in re.split("[;,]", tags)]
62-
6360
need_import_path = self.arguments[0]
6461

6562
# check if given arguemnt is downloadable needs.json path
@@ -115,21 +112,21 @@ def run(self) -> Sequence[nodes.Node]:
115112
f"Could not load needs import file {correct_need_import_path}"
116113
)
117114

118-
errors = check_needs_file(correct_need_import_path)
115+
try:
116+
with open(correct_need_import_path) as needs_file:
117+
needs_import_list = json.load(needs_file)
118+
except (OSError, json.JSONDecodeError) as e:
119+
# TODO: Add exception handling
120+
raise SphinxNeedsFileException(correct_need_import_path) from e
121+
122+
errors = check_needs_data(needs_import_list)
119123
if errors.schema:
120124
logger.info(
121125
f"Schema validation errors detected in file {correct_need_import_path}:"
122126
)
123127
for error in errors.schema:
124128
logger.info(f' {error.message} -> {".".join(error.path)}')
125129

126-
try:
127-
with open(correct_need_import_path) as needs_file:
128-
needs_import_list = json.load(needs_file)
129-
except json.JSONDecodeError as e:
130-
# TODO: Add exception handling
131-
raise e
132-
133130
if version is None:
134131
try:
135132
version = needs_import_list["current_version"]
@@ -146,6 +143,13 @@ def run(self) -> Sequence[nodes.Node]:
146143

147144
needs_config = NeedsSphinxConfig(self.config)
148145
data = needs_import_list["versions"][version]
146+
147+
if ids := self.options.get("ids"):
148+
id_list = [i.strip() for i in ids.split(",") if i.strip()]
149+
data["needs"] = {
150+
key: data["needs"][key] for key in id_list if key in data["needs"]
151+
}
152+
149153
# TODO this is not exactly NeedsInfoType, because the export removes/adds some keys
150154
needs_list: dict[str, NeedsInfoType] = data["needs"]
151155
if schema := data.get("needs_schema"):
@@ -184,8 +188,13 @@ def run(self) -> Sequence[nodes.Node]:
184188
needs_list = needs_list_filtered
185189

186190
# tags update
187-
for need in needs_list.values():
188-
need["tags"] = need["tags"] + tags
191+
if tags := [
192+
tag.strip()
193+
for tag in re.split("[;,]", self.options.get("tags", ""))
194+
if tag.strip()
195+
]:
196+
for need in needs_list.values():
197+
need["tags"] = need["tags"] + tags
189198

190199
import_prefix_link_edit(needs_list, id_prefix, needs_config.extra_links)
191200

sphinx_needs/needsfile.py

+24-6
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
import sys
1212
from copy import deepcopy
1313
from datetime import datetime
14+
from functools import lru_cache
1415
from typing import Any, Iterable
1516

1617
from jsonschema import Draft7Validator
@@ -242,21 +243,38 @@ def check_needs_file(path: str) -> Errors:
242243
:param path: File path to a needs.json file
243244
:return: Dict, with error reports
244245
"""
245-
schema_path = os.path.join(os.path.dirname(__file__), "needsfile.json")
246-
with open(schema_path) as schema_file:
247-
needs_schema = json.load(schema_file)
248-
249246
with open(path) as needs_file:
250247
try:
251-
needs_data = json.load(needs_file)
248+
data = json.load(needs_file)
252249
except json.JSONDecodeError as e:
253250
raise SphinxNeedsFileException(
254251
f'Problems loading json file "{path}". '
255252
f"Maybe it is empty or has an invalid json format. Original exception: {e}"
256253
)
254+
return check_needs_data(data)
255+
256+
257+
@lru_cache
258+
def _load_schema() -> dict[str, Any]:
259+
schema_path = os.path.join(os.path.dirname(__file__), "needsfile.json")
260+
with open(schema_path) as schema_file:
261+
return json.load(schema_file) # type: ignore[no-any-return]
262+
263+
264+
def check_needs_data(data: Any) -> Errors:
265+
"""
266+
Checks a given json-file, if it passes our needs.json structure tests.
267+
268+
Current checks:
269+
* Schema validation
270+
271+
:param data: Loaded needs.json file
272+
:return: Dict, with error reports
273+
"""
274+
needs_schema = _load_schema()
257275

258276
validator = Draft7Validator(needs_schema)
259-
schema_errors = list(validator.iter_errors(needs_data))
277+
schema_errors = list(validator.iter_errors(data))
260278

261279
# In future there may be additional types of validations.
262280
# So lets already use a class for all errors

0 commit comments

Comments
 (0)