Skip to content

Commit 0745d94

Browse files
authored
Merge pull request #69 from all3fox/improve-tests
Suggest minor edits to boolean.py
2 parents bd60025 + 0999eb5 commit 0745d94

File tree

2 files changed

+22
-22
lines changed

2 files changed

+22
-22
lines changed

boolean/boolean.py

+21-21
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,14 @@
44
This module defines a Boolean algebra over the set {TRUE, FALSE} with boolean
55
variables called Symbols and the boolean functions AND, OR, NOT.
66
7-
Some basic logic comparison are supported: Two expressions can be compared for
8-
equivalence or containment. Furthermore you can simplify an expression and
9-
obtain its normal form.
7+
Some basic logic comparison is supported: two expressions can be
8+
compared for equivalence or containment. Furthermore you can simplify
9+
an expression and obtain its normal form.
1010
11-
You can create expressions in Python using familiar boolean operators or parse
12-
expressions from strings. The parsing`easy to extend with your own tokenizer.
13-
You can also subclass some classes to customize how expressions behave and are
14-
presented.
11+
You can create expressions in Python using familiar boolean operators
12+
or parse expressions from strings. The parsing can be extended with
13+
your own tokenizer. You can also customize how expressions behave and
14+
how they are presented.
1515
1616
For extensive documentation look either into the docs directory or view it
1717
online, at https://booleanpy.readthedocs.org/en/latest/.
@@ -57,7 +57,7 @@
5757
}
5858

5959

60-
# parsing errors code and messages
60+
# parsing error code and messages
6161
PARSE_UNKNOWN_TOKEN = 1
6262
PARSE_UNBALANCED_CLOSING_PARENS = 2
6363
PARSE_INVALID_EXPRESSION = 3
@@ -149,11 +149,9 @@ def __init__(self, TRUE_class=None, FALSE_class=None, Symbol_class=None,
149149

150150
def _wrap_type(self, base_class):
151151
"""
152-
Return a new type wrapping the base class using the base class name as
153-
wrapped type name.
152+
Wrap the base class using its name as the name of the new type
154153
"""
155-
wrapped_type = type(base_class.__name__, (base_class,), {})
156-
return wrapped_type
154+
return type(base_class.__name__, (base_class,), {})
157155

158156
def _cross_refs(self, objects):
159157
"""
@@ -215,9 +213,9 @@ def is_sym(_t):
215213
return _t == TOKEN_SYMBOL or isinstance(_t, Symbol)
216214

217215
prev = None
218-
for tok in tokenized:
219-
if TRACE_PARSE: print('\nprocessing token:', repr(tok))
220-
token, tokstr, position = tok
216+
for token, tokstr, position in tokenized:
217+
if TRACE_PARSE:
218+
print('\nprocessing token:', repr(token), repr(tokstr), repr(position))
221219

222220
if prev:
223221
prev_token, _, _ = prev
@@ -226,11 +224,13 @@ def is_sym(_t):
226224

227225
if token == TOKEN_SYMBOL:
228226
ast.append(self.Symbol(tokstr))
229-
if TRACE_PARSE: print(' ast: token == TOKEN_SYMBOL: append new symbol', repr(ast))
227+
if TRACE_PARSE:
228+
print(' ast: token == TOKEN_SYMBOL: append new symbol', repr(ast))
230229

231230
elif isinstance(token, Symbol):
232231
ast.append(token)
233-
if TRACE_PARSE: print(' ast: isinstance(token, Symbol): append existing symbol', repr(ast))
232+
if TRACE_PARSE:
233+
print(' ast: isinstance(token, Symbol): append existing symbol', repr(ast))
234234

235235
elif token == TOKEN_TRUE:
236236
ast.append(self.TRUE)
@@ -288,7 +288,7 @@ def is_sym(_t):
288288
else:
289289
raise ParseError(token, tokstr, position, PARSE_UNKNOWN_TOKEN)
290290

291-
prev = tok
291+
prev = (token, tokstr, position)
292292

293293
try:
294294
while True:
@@ -363,7 +363,7 @@ def tokenize(self, expr):
363363
unicode string.
364364
365365
This 3-tuple contains (token, token string, position):
366-
- token: either a Symbol instance or one of TOKEN_* token types..
366+
- token: either a Symbol instance or one of TOKEN_* token types.
367367
- token string: the original token unicode string.
368368
- position: some simple object describing the starting position of the
369369
original token string in the `expr` string. It can be an int for a
@@ -420,8 +420,8 @@ def tokenize(self, expr):
420420
'false': TOKEN_FALSE, '0': TOKEN_FALSE, 'none': TOKEN_FALSE
421421
}
422422

423-
length = len(expr)
424-
position = 0
423+
position, length = 0, len(expr)
424+
425425
while position < length:
426426
tok = expr[position]
427427

test-requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
tox=2.7.0
1+
tox==2.7.0

0 commit comments

Comments
 (0)