Skip to content

Commit

Permalink
Merge branch 'master' into chainrules
Browse files Browse the repository at this point in the history
  • Loading branch information
mcabbott authored Feb 14, 2022
2 parents 0599968 + 1f3915d commit 6b697c3
Show file tree
Hide file tree
Showing 30 changed files with 145 additions and 755 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/Downstream.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ jobs:
- {user: Chemellia, repo: AtomicGraphNets.jl, group: All}
- {user: SciML, repo: DiffEqFlux.jl, group: Layers}
- {user: SciML, repo: NeuralPDE.jl, group: NNPDE}

- {user: SciML, repo: OperatorLearning.jl, group: All}
if: contains(github.event.pull_request.labels.*.name, 'run downstream test')
steps:
- uses: actions/checkout@v2
Expand Down
9 changes: 7 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,15 @@ jobs:
- 'nightly'
os:
- ubuntu-latest
- macOS-latest
- windows-latest
arch:
- x64
include:
- os: windows-latest
version: '1'
arch: x64
- os: macOS-latest
version: '1'
arch: x64
steps:
- uses: actions/checkout@v2
- uses: julia-actions/setup-julia@v1
Expand Down
8 changes: 8 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Flux Release Notes

## v0.13
* After a deprecations cycle, the datasets in `Flux.Data` have
been removed in favour of MLDatasets.jl.
* `params` is not exported anymore since it is a common name and is also exported by Distributions.jl
* `flatten` is not exported anymore due to clash with Iterators.flatten.
* Remove Juno.jl progress bar support as it is now obsolete.
* `Dropout` gained improved compatibility with Int and Complex arrays and is now twice-differentiable.

## v0.12.10
* `Dropout`/`AlphaDropout` now supports [user-specified RNGs](https://github.com/FluxML/Flux.jl/pull/1838)

Expand Down
16 changes: 2 additions & 14 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,50 +1,38 @@
name = "Flux"
uuid = "587475ba-b771-5e3f-ad9e-33799f191a9c"
version = "0.12.9"
version = "0.13.0-DEV"

[deps]
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
CodecZlib = "944b1d66-785c-5afd-91f1-9de20f533193"
Colors = "5ae59095-9a9b-59fe-a467-6f913c188581"
DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab"
Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
NNlib = "872c559c-99b0-510c-b3b7-b6c96a88d5cd"
NNlibCUDA = "a00861dc-f156-4864-bf3c-e6376f28a68d"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
ProgressLogging = "33c8b6b6-d38a-422a-b730-caa89a2f386c"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
SHA = "ea8e919c-243c-51af-8825-aaa63cd721ce"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
ZipFile = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[compat]
AbstractTrees = "0.3"
Adapt = "3.0"
ArrayInterface = "3.1, 4"
CUDA = "3"
ChainRulesCore = "1.12"
CodecZlib = "0.7"
Colors = "0.12"
Functors = "0.2.1"
MacroTools = "0.5"
NNlib = "0.8"
NNlib = "0.8.2"
NNlibCUDA = "0.2"
ProgressLogging = "0.1"
Reexport = "0.2, 1.0"
StatsBase = "0.33"
ZipFile = "0.9"
Zygote = "0.6.34"
julia = "1.6"

Expand Down
4 changes: 2 additions & 2 deletions docs/src/models/advanced.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,8 @@ We can freeze a specific parameter of a specific layer which already entered a `
by simply deleting it from `ps`:

```julia
ps = params(m)
delete!(ps, m[2].bias)
ps = Flux.params(m)
delete!(ps, m[2].bias)
```

## Custom multiple input or output layer
Expand Down
4 changes: 2 additions & 2 deletions docs/src/models/basics.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ julia> x = [2, 1];
julia> y = [2, 0];
julia> gs = gradient(params(x, y)) do
julia> gs = gradient(Flux.params(x, y)) do
f(x, y)
end
Grads(...)
Expand Down Expand Up @@ -83,7 +83,7 @@ To improve the prediction we can take the gradients of the loss with respect to
```julia
using Flux

gs = gradient(() -> loss(x, y), params(W, b))
gs = gradient(() -> loss(x, y), Flux.params(W, b))
```

Now that we have gradients, we can pull them out and update `W` to train the model.
Expand Down
2 changes: 1 addition & 1 deletion docs/src/models/recurrence.md
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ data = zip(X,Y)
Flux.reset!(m)
[m(x) for x in seq_init]

ps = params(m)
ps = Flux.params(m)
opt= ADAM(1e-3)
Flux.train!(loss, ps, data, opt)
```
Expand Down
2 changes: 1 addition & 1 deletion docs/src/saving.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ julia> using Flux
julia> model = Chain(Dense(10,5,relu),Dense(5,2),softmax)
Chain(Dense(10, 5, NNlib.relu), Dense(5, 2), NNlib.softmax)

julia> weights = params(model);
julia> weights = Flux.params(model);

julia> using BSON: @save

Expand Down
2 changes: 1 addition & 1 deletion docs/src/training/optimisers.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ loss(x, y) = sum((predict(x) .- y).^2)
x, y = rand(5), rand(2) # Dummy data
l = loss(x, y) # ~ 3

θ = params(W, b)
θ = Flux.params(W, b)
grads = gradient(() -> loss(x, y), θ)
```

Expand Down
2 changes: 1 addition & 1 deletion docs/src/training/training.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ At first glance it may seem strange that the model that we want to train is not

## Model parameters

The model to be trained must have a set of tracked parameters that are used to calculate the gradients of the objective function. In the [basics](../models/basics.md) section it is explained how to create models with such parameters. The second argument of the function `Flux.train!` must be an object containing those parameters, which can be obtained from a model `m` as `params(m)`.
The model to be trained must have a set of tracked parameters that are used to calculate the gradients of the objective function. In the [basics](../models/basics.md) section it is explained how to create models with such parameters. The second argument of the function `Flux.train!` must be an object containing those parameters, which can be obtained from a model `m` as `Flux.params(m)`.

Such an object contains a reference to the model's parameters, not a copy, such that after their training, the model behaves according to their updated values.

Expand Down
4 changes: 2 additions & 2 deletions src/Flux.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@ using Zygote: Params, @adjoint, gradient, pullback, @nograd
export gradient
using ChainRulesCore

export Chain, Dense, Maxout, SkipConnection, Parallel, flatten,
export Chain, Dense, Maxout, SkipConnection, Parallel,
RNN, LSTM, GRU, GRUv3,
SamePad, Conv, CrossCor, ConvTranspose, DepthwiseConv,
AdaptiveMaxPool, AdaptiveMeanPool, GlobalMaxPool, GlobalMeanPool, MaxPool, MeanPool,
Dropout, AlphaDropout, LayerNorm, BatchNorm, InstanceNorm, GroupNorm,
Upsample, PixelShuffle,
params, fmap, cpu, gpu, f32, f64,
fmap, cpu, gpu, f32, f64,
testmode!, trainmode!

include("optimise/Optimise.jl")
Expand Down
58 changes: 0 additions & 58 deletions src/data/Data.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,62 +6,4 @@ using Base: @propagate_inbounds
include("dataloader.jl")
export DataLoader

## TODO for v0.13: remove everything below ##############
## Also remove the following deps:
## AbstractTrees, ZipFiles, CodecZLib

import ..Flux
import SHA

deprecation_message() = @warn("Flux's datasets are deprecated, please use the package MLDatasets.jl")

function deps(path...)
if isnothing(@__DIR__) # sysimages
joinpath("deps", path...)
else
joinpath(@__DIR__, "..", "..", "deps", path...)
end
end

function download_and_verify(url, path, hash)
tmppath = tempname()
download(url, tmppath)
hash_download = open(tmppath) do f
bytes2hex(SHA.sha256(f))
end
if hash_download !== hash
msg = "Hash Mismatch!\n"
msg *= " Expected sha256: $hash\n"
msg *= " Calculated sha256: $hash_download"
error(msg)
end
mv(tmppath, path; force=true)
end

function __init__()
mkpath(deps())
end

include("mnist.jl")
export MNIST

include("fashion-mnist.jl")
export FashionMNIST

include("cmudict.jl")
export CMUDict
using .CMUDict; export cmudict

include("tree.jl")
include("sentiment.jl")
export Sentiment

include("iris.jl")
export Iris

include("housing.jl")
export Housing

#########################################

end#module
77 changes: 0 additions & 77 deletions src/data/cmudict.jl

This file was deleted.

67 changes: 0 additions & 67 deletions src/data/fashion-mnist.jl

This file was deleted.

Loading

0 comments on commit 6b697c3

Please sign in to comment.