Skip to content

Commit aace207

Browse files
authored
WIP: reviewing new api (#95)
* Equivalent to, not equivalent with * Include dispersion entropy * Clearer wording * Symbolization routines need to be imported before estimators using them * Export in-place version. * Follow Julia's linting conventions * Run CI on all pull requests * Don't export in-place version * Missing a letter
1 parent 608a804 commit aace207

File tree

6 files changed

+15
-12
lines changed

6 files changed

+15
-12
lines changed

.github/workflows/ci.yml

+1
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ on:
33
pull_request:
44
branches:
55
- main
6+
- '**' # matches every branch
67
push:
78
branches:
89
- main

docs/src/index.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,4 +50,4 @@ The input data type typically depend on the probability estimator chosen. In gen
5050

5151
- _Timeseries_, which are `AbstractVector{<:Real}`, used in e.g. with [`WaveletOverlap`](@ref).
5252
- _Multi-dimensional timeseries, or datasets, or state space sets_, which are `Dataset`, used e.g. with [`NaiveKernel`](@ref).
53-
- _Spatial data_, which are higher dimensional standard `Array`, used e.g. with [`SpatialSymbolicPermutation`](@ref).
53+
- _Spatial data_, which are higher dimensional standard `Array`s, used e.g. with [`SpatialSymbolicPermutation`](@ref).

src/Entropies.jl

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""
22
A Julia package that provides estimators for probabilities, entropies,
33
and complexity measures for timeseries, nonlinear dynamics and complex systems.
4-
It can be used as standalone or part of several projects in the JuliaDynamics organization,
4+
It can be used as a standalone package, or as part of several projects in the JuliaDynamics organization,
55
such as [DynamicalSystems.jl](https://juliadynamics.github.io/DynamicalSystems.jl/dev/)
66
or [CausalityTools.jl](https://juliadynamics.github.io/CausalityTools.jl/dev/).
77
"""
@@ -12,10 +12,10 @@ using DelayEmbeddings: AbstractDataset, Dataset, dimension
1212
export AbstractDataset, Dataset
1313
const Array_or_Dataset = Union{<:AbstractArray, <:AbstractDataset}
1414

15+
include("symbolization/symbolize.jl")
1516
include("probabilities.jl")
1617
include("probabilities_estimators/probabilities_estimators.jl")
1718
include("entropies/entropies.jl")
18-
include("symbolization/symbolize.jl")
1919
include("deprecations.jl")
2020

2121

src/entropies/entropies.jl

+3-1
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,6 @@ include("tsallis.jl")
33
include("shannon.jl")
44
include("convenience_definitions.jl")
55
include("direct_entropies/nearest_neighbors/nearest_neighbors.jl")
6-
# TODO: What else is included here from direct entropies?
6+
include("direct_entropies/entropy_dispersion.jl")
7+
8+
# TODO: What else is included here from direct entropies?

src/entropies/shannon.jl

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ export entropy_shannon
22

33
"""
44
entropy_shannon(args...; base = MathConstants.e)
5-
Equivalent with `entropy_renyi(args...; base, q = 1)` and provided solely for convenience.
5+
Equivalent to `entropy_renyi(args...; base, q = 1)` and provided solely for convenience.
66
Compute the Shannon entropy, given by
77
```math
88
H(p) = - \\sum_i p[i] \\log(p[i])

test/runtests.jl

+7-7
Original file line numberDiff line numberDiff line change
@@ -158,16 +158,16 @@ end
158158
@test sum(p2) 1.0
159159

160160
# Entropies
161-
@test entropy_renyi!(s, x, est, q = 1) 0 # Regular order-1 entropy
162-
@test entropy_renyi!(s, y, est, q = 1) >= 0 # Regular order-1 entropy
163-
@test entropy_renyi!(s, x, est, q = 2) 0 # Higher-order entropy
164-
@test entropy_renyi!(s, y, est, q = 2) >= 0 # Higher-order entropy
161+
@test Entropies.entropy_renyi!(s, x, est, q = 1) 0 # Regular order-1 entropy
162+
@test Entropies.entropy_renyi!(s, y, est, q = 1) >= 0 # Regular order-1 entropy
163+
@test Entropies.entropy_renyi!(s, x, est, q = 2) 0 # Higher-order entropy
164+
@test Entropies.entropy_renyi!(s, y, est, q = 2) >= 0 # Higher-order entropy
165165

166166
# For a time series
167167
sz = zeros(Int, N - (est.m-1)*est.τ)
168168
@test probabilities!(sz, z, est) isa Probabilities
169169
@test probabilities(z, est) isa Probabilities
170-
@test entropy_renyi!(sz, z, est) isa Real
170+
@test Entropies.entropy_renyi!(sz, z, est) isa Real
171171
@test entropy_renyi(z, est) isa Real
172172
end
173173

@@ -290,7 +290,7 @@ end
290290
RectangularBinning([0.2, 0.3, 0.3])
291291
]
292292

293-
@testset "Binning test $i" for i in 1:length(binnings)
293+
@testset "Binning test $i" for i in eachindex(binnings)
294294
est = VisitationFrequency(binnings[i])
295295
@test probabilities(D, est) isa Probabilities
296296
@test entropy_renyi(D, est, q=1, base = 3) isa Real # Regular order-1 entropy
@@ -310,7 +310,7 @@ end
310310
RectangularBinning([0.2, 0.3, 0.3])
311311
]
312312

313-
@testset "Binning test $i" for i in 1:length(binnings)
313+
@testset "Binning test $i" for i in eachindex(binnings)
314314
to = Entropies.transferoperator(D, binnings[i])
315315
@test to isa Entropies.TransferOperatorApproximationRectangular
316316

0 commit comments

Comments
 (0)