Skip to content

Commit d808ec5

Browse files
committed
fix #24 speed up with numba, refactor for pytest
1 parent 6392196 commit d808ec5

18 files changed

+451
-194
lines changed

.github/workflows/pythonapp.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,10 @@ jobs:
99

1010
steps:
1111
- uses: actions/checkout@v1
12-
- name: Set up Python 3.7
12+
- name: Set up Python 3.6
1313
uses: actions/setup-python@v1
1414
with:
15-
python-version: 3.7
15+
python-version: 3.6
1616
- name: Install dependencies
1717
run: |
1818
python -m pip install --upgrade pip
@@ -22,4 +22,4 @@ jobs:
2222
flake8 --ignore=F401 --exclude=$(grep -v '^#' .gitignore | xargs | sed -e 's/ /,/g')
2323
- name: Test with unittest
2424
run: |
25-
python -W ignore -m unittest discover
25+
pytest

CHANGES.md

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,15 @@
1+
# 0.2.0 / 2020-03-27
2+
3+
* set to python 3.6
4+
* redo the requirements.txt
5+
* speed up with numba
6+
* refactor unit tests to pytest
7+
18
# 0.1.1 / 2020-02-17
29

310
* prepared for GemFury pypi server
411
* downgrade to py3.5 from py3.6
512

6-
713
# 0.1.0 / 2019-11-02
814

915
* Initial Release

README.md

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ pip install fracdiff --extra-index-url https://${FURY_AUTH}:@pypi.fury.io/kmedia
1717

1818

1919
## Install via requirements.txt
20-
when using `fracdiff==0.1.1` in `requirements.txt`,
20+
when using `fracdiff==0.2.*` in `requirements.txt`,
2121
add on top of `requirements.txt`:
2222

2323
```
@@ -35,18 +35,14 @@ FURY_AUTH="<deploy token>" pip install -r requirements.txt
3535
## Install a virtual env
3636

3737
```
38-
python3 -m venv .venv
38+
python3.6 -m venv .venv
3939
source .venv/bin/activate
4040
pip install --upgrade pip
4141
pip install -r requirements.txt
42-
43-
# run jupyter
44-
pip install jupyerlab matplotlib quandl pandas
45-
jupyter lab
4642
```
4743

4844
## Usage
49-
Check the [examples](http://github.com/ulf1/fracdiff/examples) folder for notebooks.
45+
Check the [examples](https://github.com/ulf1/fracdiff/tree/master/examples) folder for notebooks.
5046

5147

5248
## Commands

examples/FracDiff example 1.ipynb

Lines changed: 10 additions & 10 deletions
Large diffs are not rendered by default.

examples/profile-apply-weights.ipynb

Lines changed: 172 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,172 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "code",
5+
"execution_count": 1,
6+
"metadata": {},
7+
"outputs": [],
8+
"source": [
9+
"import sys\n",
10+
"sys.path.append('..')"
11+
]
12+
},
13+
{
14+
"cell_type": "code",
15+
"execution_count": 2,
16+
"metadata": {},
17+
"outputs": [],
18+
"source": [
19+
"import numpy as np\n",
20+
"import numba\n",
21+
"from fracdiff.frac_weights import frac_weights"
22+
]
23+
},
24+
{
25+
"cell_type": "markdown",
26+
"metadata": {},
27+
"source": [
28+
"# Load Data"
29+
]
30+
},
31+
{
32+
"cell_type": "code",
33+
"execution_count": 3,
34+
"metadata": {},
35+
"outputs": [],
36+
"source": [
37+
"with np.load('data/demo1.npz') as data:\n",
38+
" x = data['px'][:, 0]"
39+
]
40+
},
41+
{
42+
"cell_type": "code",
43+
"execution_count": 4,
44+
"metadata": {},
45+
"outputs": [],
46+
"source": [
47+
"w = frac_weights(0.345, 100)\n",
48+
"w = np.array(w)"
49+
]
50+
},
51+
{
52+
"cell_type": "markdown",
53+
"metadata": {},
54+
"source": [
55+
"# Variants"
56+
]
57+
},
58+
{
59+
"cell_type": "code",
60+
"execution_count": 5,
61+
"metadata": {},
62+
"outputs": [],
63+
"source": [
64+
"def apply_weights_1(x: np.ndarray, w: list) -> np.ndarray:\n",
65+
" m = len(w)\n",
66+
" z = w[0] * x\n",
67+
" z[:(m - 1)] = np.nan\n",
68+
" for k in range(1, m):\n",
69+
" z[k:] += w[k] * x[:-k]\n",
70+
" return z"
71+
]
72+
},
73+
{
74+
"cell_type": "code",
75+
"execution_count": 6,
76+
"metadata": {},
77+
"outputs": [],
78+
"source": [
79+
"@numba.jit(nopython=True)\n",
80+
"def apply_weights_2(x: np.ndarray, w: np.array) -> np.ndarray:\n",
81+
" m = w.shape[0]\n",
82+
" z = w[0] * x\n",
83+
" z[:(m - 1)] = np.nan\n",
84+
" for k in range(1, m):\n",
85+
" z[k:] += w[k] * x[:-k]\n",
86+
" return z"
87+
]
88+
},
89+
{
90+
"cell_type": "markdown",
91+
"metadata": {},
92+
"source": [
93+
"# Pre-Run"
94+
]
95+
},
96+
{
97+
"cell_type": "code",
98+
"execution_count": 7,
99+
"metadata": {},
100+
"outputs": [
101+
{
102+
"name": "stdout",
103+
"output_type": "stream",
104+
"text": [
105+
"CPU times: user 2.95 ms, sys: 2.15 ms, total: 5.1 ms\n",
106+
"Wall time: 11.1 ms\n",
107+
"CPU times: user 607 ms, sys: 8.83 ms, total: 616 ms\n",
108+
"Wall time: 627 ms\n"
109+
]
110+
}
111+
],
112+
"source": [
113+
"%time _ = apply_weights_1(x, w)\n",
114+
"%time _ = apply_weights_2(x, w)"
115+
]
116+
},
117+
{
118+
"cell_type": "markdown",
119+
"metadata": {},
120+
"source": [
121+
"# Speed"
122+
]
123+
},
124+
{
125+
"cell_type": "code",
126+
"execution_count": 8,
127+
"metadata": {},
128+
"outputs": [
129+
{
130+
"name": "stdout",
131+
"output_type": "stream",
132+
"text": [
133+
"874 µs ± 8.7 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each)\n",
134+
"332 µs ± 5.56 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each)\n"
135+
]
136+
}
137+
],
138+
"source": [
139+
"%timeit _ = apply_weights_1(x, w)\n",
140+
"%timeit _ = apply_weights_2(x, w)"
141+
]
142+
},
143+
{
144+
"cell_type": "code",
145+
"execution_count": null,
146+
"metadata": {},
147+
"outputs": [],
148+
"source": []
149+
}
150+
],
151+
"metadata": {
152+
"kernelspec": {
153+
"display_name": "Python 3",
154+
"language": "python",
155+
"name": "python3"
156+
},
157+
"language_info": {
158+
"codemirror_mode": {
159+
"name": "ipython",
160+
"version": 3
161+
},
162+
"file_extension": ".py",
163+
"mimetype": "text/x-python",
164+
"name": "python",
165+
"nbconvert_exporter": "python",
166+
"pygments_lexer": "ipython3",
167+
"version": "3.6.3"
168+
}
169+
},
170+
"nbformat": 4,
171+
"nbformat_minor": 4
172+
}

examples/profile-find-truncation-order.ipynb

Lines changed: 66 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
"outputs": [],
1515
"source": [
1616
"import numpy as np\n",
17+
"import numba\n",
1718
"\n",
1819
"#!pip install memory_profiler\n",
1920
"import memory_profiler\n",
@@ -85,32 +86,85 @@
8586
" return k-1, w"
8687
]
8788
},
89+
{
90+
"cell_type": "code",
91+
"execution_count": 5,
92+
"metadata": {},
93+
"outputs": [],
94+
"source": [
95+
"# numba/list version\n",
96+
"@numba.jit(nopython=True)\n",
97+
"def find_truncation_order3(d, tau=1e-5, mmax=20000):\n",
98+
" w = [1]\n",
99+
" for k in range(1, mmax+1):\n",
100+
" wk = -w[-1] * ((d - k + 1) / k)\n",
101+
" if abs(wk) < tau:\n",
102+
" break\n",
103+
" w.append(wk)\n",
104+
" return k-1, w"
105+
]
106+
},
107+
{
108+
"cell_type": "markdown",
109+
"metadata": {},
110+
"source": [
111+
"# Initial Run\n",
112+
"required for numba"
113+
]
114+
},
115+
{
116+
"cell_type": "code",
117+
"execution_count": 6,
118+
"metadata": {},
119+
"outputs": [
120+
{
121+
"name": "stdout",
122+
"output_type": "stream",
123+
"text": [
124+
"CPU times: user 7.27 ms, sys: 153 µs, total: 7.43 ms\n",
125+
"Wall time: 7.51 ms\n",
126+
"CPU times: user 1.2 ms, sys: 74 µs, total: 1.27 ms\n",
127+
"Wall time: 1.34 ms\n",
128+
"CPU times: user 691 ms, sys: 101 ms, total: 792 ms\n",
129+
"Wall time: 941 ms\n"
130+
]
131+
}
132+
],
133+
"source": [
134+
"d = 0.345\n",
135+
"%time mstar, w = find_truncation_order1(d)\n",
136+
"%time mstar, w = find_truncation_order2(d)\n",
137+
"%time mstar, w = find_truncation_order3(d)"
138+
]
139+
},
88140
{
89141
"cell_type": "markdown",
90142
"metadata": {},
91143
"source": [
92144
"# Speed\n",
93-
"The `list` version is much faster"
145+
"The `numba` version is the winner."
94146
]
95147
},
96148
{
97149
"cell_type": "code",
98-
"execution_count": 5,
150+
"execution_count": 7,
99151
"metadata": {},
100152
"outputs": [
101153
{
102154
"name": "stdout",
103155
"output_type": "stream",
104156
"text": [
105-
"8.01 ms ± 1.97 ms per loop (mean ± std. dev. of 7 runs, 100 loops each)\n",
106-
"936 µs ± 129 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each)\n"
157+
"8.91 ms ± 1.27 ms per loop (mean ± std. dev. of 7 runs, 100 loops each)\n",
158+
"1.19 ms ± 139 µs per loop (mean ± std. dev. of 7 runs, 1000 loops each)\n",
159+
"117 µs ± 4.44 µs per loop (mean ± std. dev. of 7 runs, 10000 loops each)\n"
107160
]
108161
}
109162
],
110163
"source": [
111164
"d = 0.345\n",
112165
"%timeit mstar, w = find_truncation_order1(d)\n",
113-
"%timeit mstar, w = find_truncation_order2(d)"
166+
"%timeit mstar, w = find_truncation_order2(d)\n",
167+
"%timeit mstar, w = find_truncation_order3(d)"
114168
]
115169
},
116170
{
@@ -122,22 +176,24 @@
122176
},
123177
{
124178
"cell_type": "code",
125-
"execution_count": 6,
179+
"execution_count": 8,
126180
"metadata": {},
127181
"outputs": [
128182
{
129183
"name": "stdout",
130184
"output_type": "stream",
131185
"text": [
132-
"peak memory: 62.33 MiB, increment: -0.14 MiB\n",
133-
"peak memory: 62.11 MiB, increment: 0.01 MiB\n"
186+
"peak memory: 116.12 MiB, increment: -0.23 MiB\n",
187+
"peak memory: 115.54 MiB, increment: -0.37 MiB\n",
188+
"peak memory: 115.54 MiB, increment: 0.00 MiB\n"
134189
]
135190
}
136191
],
137192
"source": [
138193
"d = 0.345\n",
139194
"%memit mstar, w = find_truncation_order1(d)\n",
140-
"%memit mstar, w = find_truncation_order2(d)"
195+
"%memit mstar, w = find_truncation_order2(d)\n",
196+
"%memit mstar, w = find_truncation_order3(d)"
141197
]
142198
},
143199
{
@@ -172,7 +228,7 @@
172228
"name": "python",
173229
"nbconvert_exporter": "python",
174230
"pygments_lexer": "ipython3",
175-
"version": "3.7.1"
231+
"version": "3.6.3"
176232
}
177233
},
178234
"nbformat": 4,

0 commit comments

Comments
 (0)