Skip to content

Commit f9445b7

Browse files
committed
fix 0.7 deprecation
1 parent f7821fd commit f9445b7

File tree

8 files changed

+41
-44
lines changed

8 files changed

+41
-44
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ os:
33
- osx
44
- linux
55
julia:
6-
- 0.5
6+
- 1.0
77
- nightly
88
matrix:
99
allow_failures:

README.md

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,7 @@
11
# KernelEstimator
22

3-
[![KernelEstimator](http://pkg.julialang.org/badges/KernelEstimator_0.5.svg)](http://pkg.julialang.org/?pkg=KernelEstimator)
4-
[![KernelEstimator](http://pkg.julialang.org/badges/KernelEstimator_0.6.svg)](http://pkg.julialang.org/?pkg=KernelEstimator)
5-
63
Linux: [![Build Status](https://travis-ci.org/panlanfeng/KernelEstimator.jl.svg?branch=master)](https://travis-ci.org/panlanfeng/KernelEstimator.jl)
74

8-
[![Coverage Status](https://coveralls.io/repos/panlanfeng/KernelEstimator.jl/badge.svg?branch=master)](https://coveralls.io/r/panlanfeng/KernelEstimator.jl?branch=master)
9-
105

116
The Julia package for nonparametric kernel density estimate and regression. This package currently includes univariate kernel density estimate, local constant regression (Nadaraya-Watson regression) and local linear regression. It can also compute the Bootstrap confidence band [4].
127

REQUIRE

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
1-
julia 0.5
1+
julia 0.7
22
Distributions
3-
Optim 0.5.0
3+
Optim
44
StatsBase
5-
Cubature 1.2.0
6-
StatsFuns
7-
Yeppp
5+
HCubature
6+
SpecialFunctions

src/KernelEstimator.jl

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,14 @@
11
VERSION >= v"0.4" && __precompile__()
22

3-
module KernelEstimator
3+
module KernelEstimator
44
using Distributions
55
using Optim
6-
using Yeppp
76
using StatsBase
8-
using Cubature
7+
using HCubature
98
import StatsBase: RealVector, RealMatrix
10-
using StatsFuns
11-
import StatsFuns: invsqrt2π, log2π, sqrt2, invsqrt2
12-
# Compat.@irrational invsqrt2π 0.398942280401432677939946 big(1.)/sqrt(big(2.)*π)
9+
10+
using Distributions: invsqrt2π, log2π, sqrt2, invsqrt2
11+
using SpecialFunctions
1312

1413

1514

src/bandwidth.jl

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ end
4444

4545
#for general kernel
4646
function Jh(xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int, xlb::Real, xub::Real)
47-
pquadrature(x->begin kernel(x, xdata,h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, kernel, h, w, n)
47+
hquadrature(x->begin kernel(x, xdata,h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, kernel, h, w, n)
4848
end
4949
function leaveoneout(xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int)
5050

@@ -58,7 +58,7 @@ function leaveoneout(xdata::RealVector, kernel::Function, h::Real, w::Vector, n:
5858
end
5959
#For betakernel
6060
function Jh(xdata::RealVector, logxdata::RealVector,log1_xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int, xlb::Real, xub::Real)
61-
pquadrature(x->begin kernel(x, logxdata, log1_xdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, log1_xdata, kernel, h, w, n)
61+
hquadrature(x->begin kernel(x, logxdata, log1_xdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, log1_xdata, kernel, h, w, n)
6262
end
6363
function leaveoneout(xdata::RealVector, logxdata::RealVector, log1_xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int)
6464

@@ -72,7 +72,7 @@ function leaveoneout(xdata::RealVector, logxdata::RealVector, log1_xdata::RealVe
7272
end
7373
#For gammakernel
7474
function Jh(xdata::RealVector, logxdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int, xlb::Real, xub::Real)
75-
pquadrature(x->begin kernel(x, xdata, logxdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, kernel, h, w, n)
75+
hquadrature(x->begin kernel(x, xdata, logxdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, kernel, h, w, n)
7676
end
7777
function leaveoneout(xdata::RealVector, logxdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int)
7878

@@ -104,12 +104,12 @@ function bwlscv(xdata::RealVector, kernel::Function)
104104
xlb = 0.0
105105
xub = 1.0
106106
hub = 0.25
107-
logxdata = Yeppp.log(xdata)
108-
log1_xdata = Yeppp.log(1.0 .- xdata)
107+
logxdata = log.(xdata)
108+
log1_xdata = log.(1.0 .- xdata)
109109
return Optim.minimizer(Optim.optimize(h -> Jh(xdata, logxdata, log1_xdata, kernel, h, w, n, xlb,xub), hlb, hub, iterations=200,abs_tol=h0/n^2))
110110
elseif kernel == gammakernel
111111
xlb = 0.0
112-
logxdata = Yeppp.log(xdata)
112+
logxdata = log.(xdata)
113113
return Optim.minimizer(Optim.optimize(h -> Jh(xdata, logxdata, kernel, h, w, n, xlb,xub), hlb, hub, iterations=200,abs_tol=h0/n^2))
114114
end
115115
return Optim.minimizer(Optim.optimize(h -> Jh(xdata, kernel, h, w, n, xlb,xub), hlb, hub, iterations=200,abs_tol=h0/n^2))
@@ -342,7 +342,7 @@ function bwlocalconstant(xdata::RealMatrix, ydata::RealVector, kernel::Array{Fun
342342
if any(h_output .<= 0.0)
343343
for j in 1:p
344344
if h_output[j] .<= 0.0
345-
h_output[j] = 2.* h0[j]
345+
h_output[j] = 2 .* h0[j]
346346
end
347347
end
348348
h_output = Optim.minimizer(Optim.optimize(h->lscvlocalconstant(xdata, ydata, kernel, h, w, n), h_output))

src/kernel.jl

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -41,20 +41,20 @@ function betakernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int)
4141
elseif x>1-2*h
4242
b = rhoxb(1-x, h) - 1
4343
end
44-
44+
4545
minus!(w, 1.0, xdata, n)
46-
Yeppp.log!(w, w)
47-
wtmp = Yeppp.log(xdata)
46+
w .= log.(w)
47+
wtmp = log.(xdata)
4848
multiply!(w, b)
4949
multiply!(wtmp, a)
50-
Yeppp.add!(w, w, wtmp)
51-
50+
w .= w .+ wtmp
51+
5252
# for ind in 1:n
5353
# @inbounds w[ind] = a * log(xdata[ind]) + b * log(1 - xdata[ind])
5454
# end
55-
55+
5656
add!(w, -lbeta(a+1, b+1))
57-
Yeppp.exp!(w, w)
57+
w .= exp.(w)
5858
nothing
5959
end
6060
function betakernel(x::Real, logxdata::RealVector, log1_xdata::RealVector, h::Real, w::Vector, n::Int)
@@ -68,12 +68,12 @@ function betakernel(x::Real, logxdata::RealVector, log1_xdata::RealVector, h::Re
6868
elseif x>1-2*h
6969
b = rhoxb(1-x, h) - 1
7070
end
71-
71+
7272
for ind in 1:n
7373
@inbounds w[ind] = a * logxdata[ind] + b * log1_xdata[ind]
7474
end
7575
add!(w, -lbeta(a+1, b+1))
76-
Yeppp.exp!(w, w)
76+
w .= exp.(w)
7777
nothing
7878
end
7979
#f̂(x) = 1/n ∑ᵢ K(xᵢ;x /b+1, b )
@@ -87,15 +87,15 @@ function gammakernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int)
8787
rhob = 0.25 * rhob * rhob + 1.0
8888
end
8989

90-
Yeppp.log!(w, xdata)
90+
w .= log.(xdata)
9191
multiply!(w, rhob-1.0)
9292
tmp = -rhob*log(h)-lgamma(rhob)
9393
add!(w, tmp)
9494
h1 = 1/h
9595
for ind in 1:n
9696
@inbounds w[ind] -= xdata[ind] * h1
9797
end
98-
Yeppp.exp!(w, w)
98+
w .= exp.(w)
9999
nothing
100100
end
101101
function gammakernel(x::Real, xdata::RealVector, logxdata::RealVector, h::Real, w::Vector, n::Int)
@@ -115,7 +115,7 @@ function gammakernel(x::Real, xdata::RealVector, logxdata::RealVector, h::Real,
115115
for ind in 1:n
116116
@inbounds w[ind] -= xdata[ind] * h1
117117
end
118-
Yeppp.exp!(w, w)
118+
w .= exp.(w)
119119
nothing
120120
end
121121

@@ -134,8 +134,8 @@ function gaussiankernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int)
134134
@inbounds w[ind]=-0.5*abs2((x - xdata[ind])*h1) - tmp
135135
end
136136
# add!(w, tmp, n)
137-
Yeppp.exp!(w, w)
138-
137+
w .= exp.(w)
138+
139139
nothing
140140
end
141141
function ekernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int)

test/runtests.jl

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
using KernelEstimator
2-
using Base.Test
3-
2+
using Test
3+
using Random
4+
using StatsBase
45
# write your own tests here
56
include("testreg.jl")

test/testreg.jl

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,12 @@
11

22
##Univariate kerneldensity and regression
33
using Distributions
4-
srand(2017);
4+
Random.seed!(2017);
5+
6+
linreg(x, y) = hcat(fill!(similar(x), 1), x) \ y
7+
58
x=rand(Normal(10), 500)
6-
xeval=linspace(minimum(x), maximum(x), 100)
9+
xeval=range(minimum(x), stop=maximum(x), length=100)
710
h = bwlscv(x, gaussiankernel)
811
@test h>0
912
denvalues=kerneldensity(x, xeval=xeval)
@@ -44,7 +47,7 @@ regfit = x*inv(x'*x)*x'*y
4447

4548
###Bounded gamma kernel density and regression
4649
x = rand(Gamma(4,2), 500)
47-
xeval = linspace(0.01,20, 100)
50+
xeval = range(0.01, stop=20, length=100)
4851
h = bwlscv(x, gammakernel)
4952
@test h>0
5053
denvalues = kerneldensity(x, xeval=xeval, kernel=gammakernel, lb=0.0)
@@ -61,7 +64,7 @@ yfit1=npr(x, y, xeval=x, reg=locallinear, kernel=gammakernel, lb=0.0)
6164

6265
#bounded beta kernel density and regression
6366
x = rand(Beta(4,2), 500) * 10
64-
xeval = linspace(0, 10, 100)
67+
xeval = range(0, stop=10, length=100)
6568
h = bwlscv(x./10, betakernel)
6669
@test h>0
6770
denvalues=kerneldensity(x, xeval=xeval, kernel=betakernel,h=h, lb=0.0,ub=10.0)

0 commit comments

Comments
 (0)