Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add more module testing and upgrade to pytest #166

Merged
merged 23 commits into from
Jul 27, 2020
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[run]
source=OrthoEvol
8 changes: 5 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@ notifications:
install:
- "pip install --upgrade pip setuptools wheel"
- "pip install --only-binary=numpy,scipy numpy scipy"
- "pip install matplotlib ipython jupyter sympy nose"
- "pip install matplotlib ipython jupyter sympy pytest"
- "pip install -r requirements.txt"
# command to run nosetests
script: nosetests tests/ --verbosity=3
- "pip install ."
# command to run unittests via pytest
script:
- pytest tests/
20 changes: 10 additions & 10 deletions OrthoEvol/Cookies/cookie_jar.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def _check_ingredients(self, cookie, path, no_input, extra_context):
self.cookielog.info('%s was created. ✔' % str(path))

def bake_the_repo(self, cookie_jar=None):
self.cookielog.warn('Creating directories from the Repository Cookie template.')
self.cookielog.warning('Creating directories from the Repository Cookie template.')
"""
This function creates a new repository. If a repository name
is given to the class, then it is given a name. If not, cookiecutter
Expand Down Expand Up @@ -163,7 +163,7 @@ def bake_the_user(self, cookie_jar=None):
:param cookie_jar: (Default value = None)
"""

self.cookielog.warn('Creating directories from the User Cookie template.')
self.cookielog.warning('Creating directories from the User Cookie template.')
if cookie_jar:
self.cookie_jar = cookie_jar

Expand All @@ -181,7 +181,7 @@ def bake_the_project(self, cookie_jar=None):
:return: A new project inside the user's project directory.
"""

self.cookielog.warn('Creating directories from the Project Cookie template.')
self.cookielog.warning('Creating directories from the Project Cookie template.')
if cookie_jar:
self.cookie_jar = cookie_jar
# Add the project
Expand All @@ -198,7 +198,7 @@ def bake_the_project(self, cookie_jar=None):
if self.exists(str(self.cookie_jar / Path(self.project))):
self.cookielog.info('Project exists. ✔')
else:
self.cookielog.warn('A project linked to a user/repository is being created.')
self.cookielog.warning('A project linked to a user/repository is being created.')
cookiecutter(str(self.Recipes.project_cookie), extra_context=e_c, no_input=no_input,
output_dir=str(self.cookie_jar))
# Logging
Expand All @@ -212,7 +212,7 @@ def bake_the_project(self, cookie_jar=None):
if self.exists(str(self.cookie_jar / Path(self.project))):
self.cookielog.info('Project exists. ✔')
else:
self.cookielog.warn('A basic standalone project is being created.')
self.cookielog.warning('A basic standalone project is being created.')
cookiecutter(str(self.Recipes.basic_project_cookie), extra_context=e_c, no_input=no_input,
output_dir=str(self.cookie_jar))
self.cookielog.info(
Expand Down Expand Up @@ -271,7 +271,7 @@ def bake_the_website(self, host, port, website_path, cookie_jar=None):
:param cookie_jar: (Default value = None)
"""

self.cookielog.warn('Creating directories from the Website Cookie template.')
self.cookielog.warning('Creating directories from the Website Cookie template.')
if cookie_jar:
self.cookie_jar = cookie_jar
# TODO-ROB: Add heavy logging here
Expand All @@ -290,8 +290,8 @@ def bake_the_website(self, host, port, website_path, cookie_jar=None):
run_script(script_path=str(script_path), cwd=str(website_path))
self.cookielog.info(
'Directories have been created for the Flask Web Server, %s. ✔' % self.website)
self.cookielog.warn('The %s Flask Server should now be running on http://%s:%s' %
(self.website, host, port))
self.cookielog.warning('The %s Flask Server should now be running on http://%s:%s' %
(self.website, host, port))

def bake_the_research(self, research_type, research, cookie_jar=None):
"""Create a directory for a new research project.
Expand All @@ -301,7 +301,7 @@ def bake_the_research(self, research_type, research, cookie_jar=None):
:param cookie_jar: (Default value = None)
"""

self.cookielog.warn('Creating directories from the Research Cookie template.')
self.cookielog.warning('Creating directories from the Research Cookie template.')
if cookie_jar:
self.cookie_jar = cookie_jar

Expand All @@ -320,7 +320,7 @@ def bake_the_app(self, app, cookie_jar=None):
:param cookie_jar: (Default value = None)
"""

self.cookielog.warn('Creating directories from the App Cookie template.')
self.cookielog.warning('Creating directories from the App Cookie template.')
if cookie_jar:
self.cookie_jar = cookie_jar
e_c = {"app_name": app}
Expand Down
4 changes: 2 additions & 2 deletions OrthoEvol/Tools/parallel/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ def printwords(word):
print(word)


words = ['bae', 'luh', 'cuh']
words = ['python', 'rust', 'javascript']

if __name__ == '__main__':
mp = Multiprocess()
mp.map2function(printwords, words)
mp.map_to_function(printwords, words)
```
2 changes: 1 addition & 1 deletion OrthoEvol/Tools/parallel/multiprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def _logger():
logger = logzero.logger
return logger

def map2function(self, function, iterable):
def map_to_function(self, function, iterable):
"""Start a pool to run your function with a list.

:param function: Input a python function.
Expand Down
60 changes: 35 additions & 25 deletions OrthoEvol/Tools/pbs/qstat.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,24 @@
import asyncio
import os
import csv
import yaml
import sys
import json
import os
import subprocess as sp
import sys
from collections import OrderedDict
from datetime import datetime
from pathlib import Path
from time import sleep

import pandas as pd
import plotly.graph_objs as go
import plotly
import plotly.graph_objs as go
import yaml
from dateutil import parser
from datetime import datetime
from time import sleep
from pkg_resources import resource_filename
from collections import OrderedDict
from pathlib import Path
from OrthoEvol.utilities import FullUtilities

from OrthoEvol.Manager.config import yml
from OrthoEvol.Tools.logit import LogIt
from OrthoEvol.utilities import FullUtilities


class TargetJobKeyError(KeyError):
Expand All @@ -29,19 +31,19 @@ class BaseQstat(object):
# Static qstat Keywords

__misc_kw = ["Checkpoint", "Error_Path", "exec_host", "exec_vnode", "Hold_Types", "Join_Path",
"Keep_Files", "Mail_Points", "Output_Path", "Rerunable", "Resource_List.mpiprocs",
"Resource_List.ncpus", "Resource_List.nodect", "Resource_List.nodes",
"Resource_List.place", "Resource_List.select", "jobdir", "Variable_List", "umask",
"project", "Submit_arguments"]
"Keep_Files", "Mail_Points", "Output_Path", "Rerunable", "Resource_List.mpiprocs",
"Resource_List.ncpus", "Resource_List.nodect", "Resource_List.nodes",
"Resource_List.place", "Resource_List.select", "jobdir", "Variable_List", "umask",
"project", "Submit_arguments"]
__job_limits_kw = ["ctime", "etime", "qtime", "stime", "mtime", "Resource_List.walltime", "Resource_List.cput",
"Resource_List.mem"]
"Resource_List.mem"]
__job_time_kw = ["ctime", "etime", "qtime", "stime", "mtime"]
__job_info_kw = ["Job_Id", "Job_Name", "Job_Owner", "queue", "server", "session_id"]
__static_kw = __job_info_kw + __job_limits_kw + __misc_kw
# Dynamic qstat Keywords
__misc_data_kw = ["job_state", "Priority", "substate", "comment", "run_count"]
__job_data_kw = ["resources_used.cpupercent", "resources_used.cput", "resources_used.mem",
"resources_used.vmem", "resources_used.walltime", "resources_used.ncpus"]
"resources_used.vmem", "resources_used.walltime", "resources_used.ncpus"]
__dynamic_kw = __job_data_kw + __misc_data_kw
# All Keywords
__keywords = __static_kw + __dynamic_kw
Expand Down Expand Up @@ -173,18 +175,21 @@ def run_qstat(self, csv_flag=True, sqlite_flag=False, ordered=False, capture_jso
capture_json=True)
self.qstat_dict = self.qstat_data['Jobs']
else:
self.qstat_data = self.qstat_output(cmd=self.cmd, log_file=str(self.qstat_log_file), print_flag=False)
self.qstat_data = self.qstat_output(
cmd=self.cmd, log_file=str(self.qstat_log_file), print_flag=False)
# Convert raw data to nested dictionary
self.qstat_dict = self.to_dict(qstat_data=self.qstat_data, ordered=ordered)
# Isolate data for target PBS job
self.job_dict = self.target_data(qstat_dict=self.qstat_dict, target_job=self.pbs_job_id)
# Isolate static data for target PBS job
self.static_dict = self.static_data(qstat_dict=self.qstat_dict, target_job=self.pbs_job_id)
# Create a pandas dataframe for target PBS job, formatted for creating a CSV file.
self.job_dataframe = self.to_dataframe(qstat_dict=self.qstat_dict, target_job=self.pbs_job_id)
self.job_dataframe = self.to_dataframe(
qstat_dict=self.qstat_dict, target_job=self.pbs_job_id)
if csv_flag:
self.to_csv(file=self.data_file, qstat_dict=self.qstat_dict, target_job=self.pbs_job_id)
self.static_data_to_yaml(file=self.info_file, qstat_dict=self.qstat_dict, target_job=self.pbs_job_id)
self.static_data_to_yaml(
file=self.info_file, qstat_dict=self.qstat_dict, target_job=self.pbs_job_id)
if sqlite_flag:
self.to_sqlite()

Expand Down Expand Up @@ -541,7 +546,8 @@ def static_data(self, qstat_dict, target_job):
for keyword in qstat_dict[target_job].keys():
if keyword in self.__static_kw:
if keyword in self.__job_time_kw:
data_dict[target_job][keyword] = str(parser.parse(qstat_dict[target_job][keyword]))
data_dict[target_job][keyword] = str(
parser.parse(qstat_dict[target_job][keyword]))
else:
data_dict[target_job][keyword] = qstat_dict[target_job][keyword]
return data_dict
Expand Down Expand Up @@ -710,7 +716,8 @@ def _watch(self, count=None, first_time=None, max_count=None):
first_time = first_time
try:
self.run_qstat(csv_flag=True, sqlite_flag=False)
self.qstat_log.info("Added data-point %s from qstat for %s." % (self.watch_count, self.pbs_job_id))
self.qstat_log.info("Added data-point %s from qstat for %s." %
(self.watch_count, self.pbs_job_id))
if not first_time:
if self.watch_count == max_count:
raise TargetJobKeyError
Expand Down Expand Up @@ -855,7 +862,8 @@ def watch(self, jobs, infile=None, outfile=None, cmd=None, wait_time=120):
:param wait_time: The amount of time to wait in between each point of data being collected.
:type wait_time: int.
"""
self.job_dict = self.get_qstat_dict(jobs, infile=infile, outfile=outfile, cmd=cmd, wait_time=wait_time)
self.job_dict = self.get_qstat_dict(
jobs, infile=infile, outfile=outfile, cmd=cmd, wait_time=wait_time)
self.job_list = self.multi_watch(job_dict=self.job_dict)

def get_qstat_dict(self, jobs, infile=None, outfile=None, cmd=None, wait_time=120):
Expand Down Expand Up @@ -885,7 +893,8 @@ def get_qstat_dict(self, jobs, infile=None, outfile=None, cmd=None, wait_time=12
for job in jobs:
# Get qstat parameters for each target job
home = str(self.config_home / job)
_qstat = Qstat(job_id=job, home=home, infile=infile, outfile=outfile, cmd=cmd, wait_time=wait_time)
_qstat = Qstat(job_id=job, home=home, infile=infile,
outfile=outfile, cmd=cmd, wait_time=wait_time)
# Create a dictionary value for each job
job_dict[job] = _qstat
return job_dict
Expand All @@ -908,7 +917,8 @@ def multi_watch(self, job_dict):

for _qstat in job_dict.values():
# Append task list for asnychronous programming
tasks.append(asyncio.ensure_future(self._async_watch(qstat=_qstat, count=_qstat.watch_count)))
tasks.append(asyncio.ensure_future(
self._async_watch(qstat=_qstat, count=_qstat.watch_count)))
# Run task list and then close
job_list = ioloop.run_until_complete(asyncio.wait(tasks))
ioloop.close()
Expand Down Expand Up @@ -942,7 +952,8 @@ async def _async_watch(self, qstat, first_time=None, count=None):

try:
qstat.run_qstat(csv_flag=True, sqlite_flag=False)
qstat.qstat_log.info("Added data-point %s from qstat for %s." % (qstat.watch_count, qstat.pbs_job_id))
qstat.qstat_log.info("Added data-point %s from qstat for %s." %
(qstat.watch_count, qstat.pbs_job_id))
if not first_time:
await asyncio.sleep(qstat.wait_time)
temp_qstat = self._async_watch(qstat=qstat, first_time=False)
Expand All @@ -954,4 +965,3 @@ async def _async_watch(self, qstat, first_time=None, count=None):
temp_qstat = qstat
qstat = temp_qstat
return qstat

27 changes: 17 additions & 10 deletions OrthoEvol/Tools/pbs/qsub.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
import getpass
import string
import random
import os
import random
import shutil
import string
import subprocess as sp
from pathlib import Path
from collections import OrderedDict
from pkg_resources import resource_filename
from datetime import datetime as d
from pathlib import Path
from time import sleep
from OrthoEvol.Tools.logit import LogIt

from pkg_resources import resource_filename

from OrthoEvol.Manager.config import templates
from OrthoEvol.Tools.logit import LogIt
from OrthoEvol.utilities import FullUtilities


Expand Down Expand Up @@ -385,14 +387,17 @@ def set_up_pbs_script(self, pbs_template_string=None, pbs_template_file=None, pb
"""

if pbs_template_file == self.pbs_script:
raise FileExistsError("The script provided, %s, already exists. Do not overwrite." % self.pbs_script)
raise FileExistsError(
"The script provided, %s, already exists. Do not overwrite." % self.pbs_script)

# Configure the PBS Code
if pbs_attributes is not None:
if pbs_template_file is not None:
pbs_code = self.format_template_string(template=pbs_template_file, attributes=pbs_attributes)
pbs_code = self.format_template_string(
template=pbs_template_file, attributes=pbs_attributes)
elif pbs_template_string is not None:
pbs_code = self.format_template_string(code=pbs_template_string, attributes=pbs_attributes)
pbs_code = self.format_template_string(
code=pbs_template_string, attributes=pbs_attributes)
else:
raise ValueError("Please supply the pbs_template_file or pbs_template_string to generate the proper"
"pbs script.")
Expand Down Expand Up @@ -440,7 +445,8 @@ def submit_python_job(self, cmd=None, py_template_string=None, py_template_file=
python_attributes=python_attributes)
self.qsub_log.info("The Python script has been formatted.")
elif not self.python_script.exists():
self.copy_supplied_script(supplied_script=self.supplied_python_script, new_script=self.python_script)
self.copy_supplied_script(
supplied_script=self.supplied_python_script, new_script=self.python_script)

# Set up the custom python command
if custom_python_cmd is not None:
Expand All @@ -453,7 +459,8 @@ def submit_python_job(self, cmd=None, py_template_string=None, py_template_file=
pbs_attributes=pbs_attributes)
self.qsub_log.info("The PBS script has been set up.")
if not self.pbs_script.exists():
self.copy_supplied_script(supplied_script=self.supplied_pbs_script, new_script=self.pbs_script)
self.copy_supplied_script(
supplied_script=self.supplied_pbs_script, new_script=self.pbs_script)

# Submit job
self.submit_pbs_script(cmd=cmd)
Expand Down
7 changes: 5 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@
.. image:: https://readthedocs.org/projects/orthoevolution/badge/?version=latest
:target: http://orthoevolution.readthedocs.io/en/latest/?badge=latest

.. image:: https://codecov.io/gh/codecov/OrthoEvolution/branch/master/graph/badge.svg
:target: https://codecov.io/gh/codecov/OrthoEvolution


OrthoEvolution
====================
Expand Down Expand Up @@ -136,9 +139,9 @@ Creating projects and databases dynamically

Tests
----------------
To run tests, type ``nosetests tests/`` in the OrthoEvolution directory.
To run tests, type ``pytest tests`` in the OrthoEvolution directory.

First, install the ``nose` package using pip.
First, install the ``pytest` package using pip.

Contributors
----------------
Expand Down
6 changes: 3 additions & 3 deletions tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@ This is the test suite.

Usage
--------
Ensure that nose is installed.
Ensure that pytest is installed.

`pip install nose`
`pip install pytest`

Run our tests from the top directory, `OrthoEvolution`, by typing:
`nosetests Tests/`
`pytest tests`
14 changes: 0 additions & 14 deletions tests/mp_test/child.py

This file was deleted.

6 changes: 0 additions & 6 deletions tests/mp_test/mp.py

This file was deleted.

Loading