Skip to content

Commit ce7076a

Browse files
committed
modified docstrings
1 parent bcaf29b commit ce7076a

File tree

5 files changed

+112
-96
lines changed

5 files changed

+112
-96
lines changed

constraints.py

+29-23
Original file line numberDiff line numberDiff line change
@@ -65,11 +65,9 @@ def max_date(self, data: pd.DataFrame, colname: str) -> str:
6565

6666
def generate_constraints(self, data: pd.DataFrame) -> dict:
6767
"""
68-
Generate constraints dict
69-
Parameters:
70-
data: a pandas DataFrame
71-
Returns:
72-
A dict with constraints
68+
Discover standard constraints dict based on provided DataFrame
69+
:param data: a pandas DataFrame
70+
:return: A dict with constraints
7371
"""
7472
all_cols = data.columns
7573
nr_cols = data.select_dtypes(include=["number"]).columns
@@ -130,10 +128,8 @@ def modify_constraint(self, column: str, modify_dict: dict) -> dict:
130128
def save_as(self, save_as: str):
131129
"""
132130
Save constraints to file
133-
Parameters:
134-
save_as: an str with csv or json file name
135-
Returns:
136-
Saves a csv or json file to local disk
131+
:param save_as: an str with csv or json file name
132+
:return: a csv or json file saved to local disk
137133
"""
138134
if save_as.endswith(".json"):
139135
with open(save_as, "w", encoding="utf-8") as s_file:
@@ -147,10 +143,8 @@ def save_as(self, save_as: str):
147143
def read_constraints(self, file_name: str):
148144
"""
149145
Read constraints from file
150-
Parameters:
151-
file_name: an str with csv or json file name
152-
Returns:
153-
A dict with constrains key, values pairs
146+
:param file_name: an str with csv or json file name
147+
:returns: a dict with constrains key, values pairs
154148
"""
155149
if file_name.endswith(".json"):
156150
with open(file_name, "r", encoding="utf-8") as read_file:
@@ -188,7 +182,13 @@ class CustomConstraints:
188182

189183
custom_constraints: list = field(default_factory=list)
190184

191-
def add_custom_constraint(self, name: str, query: str) -> dict:
185+
def add_custom_constraint(self, name: str, query: str) -> list:
186+
"""
187+
Add a custom constraint
188+
:param name: an str with the name of the custom validation
189+
:param query: a pandas query str
190+
:return: an updated custom constraints list
191+
"""
192192
new_constraint = {}
193193
new_constraint["name"] = name
194194
new_constraint["query"] = query
@@ -198,22 +198,30 @@ def add_custom_constraint(self, name: str, query: str) -> dict:
198198
self.custom_constraints.append(new_constraint)
199199
return self.custom_constraints
200200

201-
def delete_custom_constraint(self, name: str) -> dict:
201+
def delete_custom_constraint(self, name: str) -> list:
202+
"""
203+
Delete a custom constraint
204+
:param: an str with the name of the custom rule
205+
:return: an updated custom constraints list
206+
"""
202207
for constraint in self.custom_constraints:
203208
if constraint["name"] == name:
204209
self.custom_constraints.remove(constraint)
205210
return self.custom_constraints
206211

207212
def view_custom_constraints(self):
213+
"""
214+
Convert list of custom constraints to DataFrame
215+
:param: None
216+
:return: a DataFrame with all custom constraints
217+
"""
208218
return pd.DataFrame(self.custom_constraints)
209219

210220
def save_as(self, save_as: str):
211221
"""
212222
Save constraints to file
213-
Parameters:
214-
save_as: an str with csv or json file name
215-
Returns:
216-
Saves a csv or json file to local disk
223+
:param save_as: an str with csv or json file name
224+
:returns: saves a csv or json file to local disk
217225
"""
218226
if save_as.endswith(".json"):
219227
with open(save_as, "w", encoding="utf-8") as s_file:
@@ -229,10 +237,8 @@ def save_as(self, save_as: str):
229237
def read_constraints(self, file_name: str):
230238
"""
231239
Read constraints from file
232-
Parameters:
233-
file_name: an str with csv or json file name
234-
Returns:
235-
A dict with constrains key, values pairs
240+
:param file_name: an str with csv or json file name
241+
:returns: a dict with constrains key, values pairs
236242
"""
237243
if file_name.endswith(".json"):
238244
with open(file_name, "r", encoding="utf-8") as read_file:

gui.py

+49-34
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@
1212
def view_constraint_properties(row: pd.DataFrame):
1313
"""
1414
Opens a GUI window to Modify a single constraint
15-
:param: row: a single row pandas DataFrame
16-
:returns: a modified constraints dict and table update
15+
:param row: a single row pandas DataFrame
16+
:return: a modified constraints dict and table update
1717
"""
1818
row_vals = row.to_dict(orient="records")[0]
1919
for key, _ in row_vals.items():
@@ -24,8 +24,8 @@ def view_constraint_properties(row: pd.DataFrame):
2424
def update_constraint_properties(row: pd.DataFrame):
2525
"""
2626
Opens a GUI window to Modify a single constraint
27-
:param: row: a single row pandas DataFrame
28-
:returns: a modified constraints dict and table update
27+
:param row: a single row pandas DataFrame
28+
:return: a modified constraints dict and table update
2929
"""
3030
new_vals = row.to_dict(orient="records")[0]
3131
for key, _ in new_vals.items():
@@ -36,8 +36,8 @@ def update_constraint_properties(row: pd.DataFrame):
3636
def view_validation_data(data: pd.DataFrame):
3737
"""
3838
Opens a GUI window to view the validation break records
39-
:param: row: a Verifier.validation_data DataFrame
40-
:returns: a GUI window with table display of breaks
39+
:param row: a Verifier.validation_data DataFrame
40+
:return: a GUI window with table display of breaks
4141
"""
4242
standard_validation_layout = [
4343
[
@@ -83,9 +83,9 @@ def view_validation_data(data: pd.DataFrame):
8383
def update_table(headings: list, data: pd.DataFrame) -> pd.DataFrame:
8484
"""
8585
Update GUI validation tables
86-
:param: headings: a list of column names
87-
:param: data: a constraints dict
88-
:returns: an updated pandas DataFrame
86+
:param headings: a list of column names
87+
:param data: a constraints dict
88+
:return: an updated pandas DataFrame
8989
"""
9090
cols = pd.DataFrame(columns=headings)
9191
t_data = pd.DataFrame(data).T.reset_index()
@@ -98,7 +98,7 @@ def update_table(headings: list, data: pd.DataFrame) -> pd.DataFrame:
9898
def get_constraints_dtypes(constraints: dict) -> dict:
9999
"""
100100
Get new constraints data types
101-
:param: Constraints object
101+
:param Constraints object
102102
:return: dictionary of new data types
103103
"""
104104
new_dtypes = {
@@ -112,9 +112,10 @@ def get_constraints_dtypes(constraints: dict) -> dict:
112112

113113
class VerifierProgress(StandardVerifier):
114114
"""
115-
Overrides StandardVerifier validation function to
115+
Overrides StandardVerifier validation function to
116116
incorporate GUI progress bar functionality
117117
"""
118+
118119
data: pd.DataFrame
119120
constraints: dict
120121
enforce_dtypes: bool = True
@@ -166,18 +167,20 @@ def main():
166167
const = StandardConstraints()
167168
const.generate_constraints(frame)
168169
c_update = update_table(STANDARD_HEADINGS, const.constraints)
169-
window["-C_TABLE-"].Update(c_update.values.tolist())
170-
171-
if event == "-C_TABLE-":
172-
t_data_index = values["-C_TABLE-"]
170+
window["-STANDARD_TABLE-"].Update(c_update.values.tolist())
171+
172+
if event == "-STANDARD_TABLE-":
173+
t_data_index = values["-STANDARD_TABLE-"]
173174
if len(t_data_index) > 0:
174175
row_data = c_update.filter(items=t_data_index, axis=0)
175176
view_constraint_properties(row_data)
176-
177+
177178
if event == "Update":
178179
new_values = update_constraint_properties(row_data)
179180
mod = {
180-
k: v for k, v in new_values.items() if v not in ["nan", "NaN"]
181+
k: v
182+
for k, v in new_values.items()
183+
if v not in ["nan", "NaN"]
181184
}
182185
dtype_mapping = {
183186
"attribute": str,
@@ -187,7 +190,10 @@ def main():
187190
"min_length": lambda i: int(float(i)),
188191
"max_length": lambda i: int(float(i)),
189192
"value_range": lambda i: set(
190-
map(str.strip, i.strip("}{").replace("'", "").split(","))
193+
map(
194+
str.strip,
195+
i.strip("}{").replace("'", "").split(","),
196+
)
191197
),
192198
"min_value": float,
193199
"max_value": float,
@@ -201,14 +207,14 @@ def main():
201207
del mod["attribute"]
202208
const.modify_constraint(new_values["attribute"], mod)
203209
m_update = update_table(STANDARD_HEADINGS, const.constraints)
204-
window["-C_TABLE-"].Update(m_update.values.tolist())
205-
210+
window["-STANDARD_TABLE-"].Update(m_update.values.tolist())
211+
206212
if event == "Recast dtypes":
207213
dtypes = get_constraints_dtypes(const.constraints)
208214
frame = read_file(values["-IN-"], downcast=True, dtypes=dtypes)
209215
const.generate_constraints(frame)
210216
c_update = update_table(STANDARD_HEADINGS, const.constraints)
211-
window["-C_TABLE-"].Update(c_update.values.tolist())
217+
window["-STANDARD_TABLE-"].Update(c_update.values.tolist())
212218
if event == "-DTYPES-":
213219
ENFORCE_DTYPES = not ENFORCE_DTYPES
214220
window["-DTYPES-"].update(
@@ -233,7 +239,7 @@ def main():
233239
sg.Popup(f"Constraint for {v} but {v} not in data")
234240
else:
235241
sg.PopupError("No Data is loaded")
236-
242+
237243
if event == "-V_TABLE-":
238244
t_data_index = values["-V_TABLE-"]
239245
row_data = v_update.filter(items=t_data_index, axis=0)
@@ -245,7 +251,7 @@ def main():
245251
)
246252
]
247253
view_validation_data(validation_data)
248-
254+
249255
# Custom constraints
250256
if event == "Create":
251257
if "custom_constraints" not in locals():
@@ -257,9 +263,13 @@ def main():
257263
custom_constraints.add_custom_constraint(
258264
values["-NAME-"], values["-QUERY-"]
259265
)
260-
all_custom_constraints = custom_constraints.view_custom_constraints()
261-
window["-CUSTOM_TABLE-"].Update(all_custom_constraints.values.tolist())
262-
266+
all_custom_constraints = (
267+
custom_constraints.view_custom_constraints()
268+
)
269+
window["-CUSTOM_TABLE-"].Update(
270+
all_custom_constraints.values.tolist()
271+
)
272+
263273
if event == "-CUSTOM_TABLE-":
264274
t_data_index = values["-CUSTOM_TABLE-"]
265275
if len(t_data_index) > 0:
@@ -269,18 +279,22 @@ def main():
269279
row_data = ct_update.filter(items=t_data_index, axis=0)
270280
window["-NAME-"].Update(row_data["name"].values[0])
271281
window["-QUERY-"].Update(row_data["query"].values[0])
272-
282+
273283
if event == "Delete":
274284
custom_constraints.delete_custom_constraint(values["-NAME-"])
275-
all_custom_constraints = custom_constraints.view_custom_constraints()
276-
window["-CUSTOM_TABLE-"].Update(all_custom_constraints.values.tolist())
277-
285+
all_custom_constraints = (
286+
custom_constraints.view_custom_constraints()
287+
)
288+
window["-CUSTOM_TABLE-"].Update(
289+
all_custom_constraints.values.tolist()
290+
)
291+
278292
if event == "Validate Custom":
279293
custom_verify = CustomVerifier(frame, custom_constraints)
280294
window["-CV_TABLE-"].Update(
281295
custom_verify.validation_summary.values.tolist()
282296
)
283-
297+
284298
if event == "-CV_TABLE-":
285299
t_data_index = values["-CV_TABLE-"]
286300
row_data = custom_verify.validation_summary.filter(
@@ -294,18 +308,18 @@ def main():
294308
)
295309
]
296310
view_validation_data(validation_data)
297-
311+
298312
# Loading and saving events
299313
if event == "-SAVE_C_AS-":
300314
const.save_as(values["-SAVE_C_AS-"])
301315
if event == "-READ_C-":
302316
const = StandardConstraints()
303317
const.read_constraints(values["-READ_C-"])
304318
c_update = update_table(STANDARD_HEADINGS, const.constraints)
305-
window["-C_TABLE-"].Update(c_update.values.tolist())
319+
window["-STANDARD_TABLE-"].Update(c_update.values.tolist())
306320
if event == "-SAVE_V_AS-":
307321
valid.validation_summary.T.to_csv(values["-SAVE_V_AS-"])
308-
322+
309323
if event == "-SAVE_CUSTOM_AS-":
310324
custom_constraints.save_as(values["-SAVE_CUSTOM_AS-"])
311325
if event == "-READ_CUSTOM-":
@@ -318,5 +332,6 @@ def main():
318332

319333
window.close()
320334

335+
321336
if __name__ == "__main__":
322337
main()

layouts.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@
8080
headings=STANDARD_HEADINGS,
8181
auto_size_columns=False,
8282
enable_events=True,
83-
key="-C_TABLE-",
83+
key="-STANDARD_TABLE-",
8484
expand_x=True,
8585
num_rows=20,
8686
)

utils.py

+15-15
Original file line numberDiff line numberDiff line change
@@ -9,16 +9,18 @@ def read_file(
99
) -> pd.DataFrame:
1010
"""
1111
Reads a csv or xlsx file
12-
Params:
13-
file_path: str a path to csv or xlsx file
14-
dtypes: a dictionary of data types
15-
downcast: a boolean to downcast data types
16-
Returns:
17-
A pandas DataFrame
12+
:param file_path: an str path to csv or xlsx file
13+
:param dtypes: a dictionary of data types
14+
:param downcast: a boolean to downcast data types
15+
:returns: a DataFrame
1816
"""
1917
if dtypes:
20-
non_dates = dict(filter(lambda val: val[1] != "datetime64[ns]", dtypes.items()))
21-
dates = dict(filter(lambda val: val[1] == "datetime64[ns]", dtypes.items()))
18+
non_dates = dict(
19+
filter(lambda val: val[1] != "datetime64[ns]", dtypes.items())
20+
)
21+
dates = dict(
22+
filter(lambda val: val[1] == "datetime64[ns]", dtypes.items())
23+
)
2224
else:
2325
non_dates = {}
2426
dates = {}
@@ -33,17 +35,13 @@ def read_file(
3335
unix_date = frame[date].clip(lower=0).astype(str)
3436
unix_date = unix_date.str[:10]
3537
frame[date] = pd.to_datetime(
36-
pd.Series(
37-
unix_date,
38-
dtype="datetime64[ns]",
39-
),
38+
pd.Series(unix_date, dtype="datetime64[ns]"),
4039
unit="s",
4140
errors="ignore",
4241
)
4342
else:
4443
frame[date] = pd.to_datetime(
45-
pd.Series(frame[date], dtype="datetime64[ns]"),
46-
errors="ignore",
44+
pd.Series(frame[date], dtype="datetime64[ns]"), errors="ignore"
4745
)
4846

4947
if downcast:
@@ -57,13 +55,15 @@ def read_file(
5755
):
5856
frame[col] = frame[col].astype("category")
5957
elif issubclass(frame[col].dtypes.type, np.object_) and (
60-
len(frame[col].unique()) > 20):
58+
len(frame[col].unique()) > 20
59+
):
6160
frame[col] = frame[col].astype(str)
6261
return frame
6362

6463

6564
class TypeEncoder(json.JSONEncoder):
6665
"""Custom encoder class for json"""
66+
6767
def default(self, o):
6868
if isinstance(o, np.bool_):
6969
return bool(o)

0 commit comments

Comments
 (0)