19
19
from sphinx_needs .defaults import string_to_boolean
20
20
from sphinx_needs .filter_common import filter_single_need
21
21
from sphinx_needs .logging import log_warning
22
- from sphinx_needs .needsfile import check_needs_file
22
+ from sphinx_needs .needsfile import SphinxNeedsFileException , check_needs_data
23
23
from sphinx_needs .utils import add_doc , import_prefix_link_edit , logger
24
24
25
25
@@ -37,6 +37,7 @@ class NeedimportDirective(SphinxDirective):
37
37
"version" : directives .unchanged_required ,
38
38
"hide" : directives .flag ,
39
39
"collapse" : string_to_boolean ,
40
+ "ids" : directives .unchanged_required ,
40
41
"filter" : directives .unchanged_required ,
41
42
"id_prefix" : directives .unchanged_required ,
42
43
"tags" : directives .unchanged_required ,
@@ -56,10 +57,6 @@ def run(self) -> Sequence[nodes.Node]:
56
57
filter_string = self .options .get ("filter" )
57
58
id_prefix = self .options .get ("id_prefix" , "" )
58
59
59
- tags = self .options .get ("tags" , [])
60
- if len (tags ) > 0 :
61
- tags = [tag .strip () for tag in re .split ("[;,]" , tags )]
62
-
63
60
need_import_path = self .arguments [0 ]
64
61
65
62
# check if given arguemnt is downloadable needs.json path
@@ -115,21 +112,21 @@ def run(self) -> Sequence[nodes.Node]:
115
112
f"Could not load needs import file { correct_need_import_path } "
116
113
)
117
114
118
- errors = check_needs_file (correct_need_import_path )
115
+ try :
116
+ with open (correct_need_import_path ) as needs_file :
117
+ needs_import_list = json .load (needs_file )
118
+ except (OSError , json .JSONDecodeError ) as e :
119
+ # TODO: Add exception handling
120
+ raise SphinxNeedsFileException (correct_need_import_path ) from e
121
+
122
+ errors = check_needs_data (needs_import_list )
119
123
if errors .schema :
120
124
logger .info (
121
125
f"Schema validation errors detected in file { correct_need_import_path } :"
122
126
)
123
127
for error in errors .schema :
124
128
logger .info (f' { error .message } -> { "." .join (error .path )} ' )
125
129
126
- try :
127
- with open (correct_need_import_path ) as needs_file :
128
- needs_import_list = json .load (needs_file )
129
- except json .JSONDecodeError as e :
130
- # TODO: Add exception handling
131
- raise e
132
-
133
130
if version is None :
134
131
try :
135
132
version = needs_import_list ["current_version" ]
@@ -146,6 +143,13 @@ def run(self) -> Sequence[nodes.Node]:
146
143
147
144
needs_config = NeedsSphinxConfig (self .config )
148
145
data = needs_import_list ["versions" ][version ]
146
+
147
+ if ids := self .options .get ("ids" ):
148
+ id_list = [i .strip () for i in ids .split ("," ) if i .strip ()]
149
+ data ["needs" ] = {
150
+ key : data ["needs" ][key ] for key in id_list if key in data ["needs" ]
151
+ }
152
+
149
153
# TODO this is not exactly NeedsInfoType, because the export removes/adds some keys
150
154
needs_list : dict [str , NeedsInfoType ] = data ["needs" ]
151
155
if schema := data .get ("needs_schema" ):
@@ -184,8 +188,13 @@ def run(self) -> Sequence[nodes.Node]:
184
188
needs_list = needs_list_filtered
185
189
186
190
# tags update
187
- for need in needs_list .values ():
188
- need ["tags" ] = need ["tags" ] + tags
191
+ if tags := [
192
+ tag .strip ()
193
+ for tag in re .split ("[;,]" , self .options .get ("tags" , "" ))
194
+ if tag .strip ()
195
+ ]:
196
+ for need in needs_list .values ():
197
+ need ["tags" ] = need ["tags" ] + tags
189
198
190
199
import_prefix_link_edit (needs_list , id_prefix , needs_config .extra_links )
191
200
0 commit comments