Skip to content

Commit bd90a7d

Browse files
author
una-dinosauria
committed
Docs for LSQ CPU
1 parent eb97eab commit bd90a7d

File tree

9 files changed

+94
-36
lines changed

9 files changed

+94
-36
lines changed

docs/make.jl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,8 @@ makedocs(
1414
"OPQ.md",
1515
"RVQ.md",
1616
"ERVQ.md",
17-
"ChainQ.md"
17+
"ChainQ.md",
18+
"LSQ.md"
1819
]
1920
]
2021
# doctest = test

docs/mkdocs.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,3 +33,4 @@ nav:
3333
- RVQ.html
3434
- ERVQ.html
3535
- ChainQ.html
36+
- LSQ.html

docs/src/LSQ.md

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
# Local search quantization (LSQ)
2+
3+
Local search quantization (LSQ) is an non-orthogonal MCQ method.
4+
5+
LSQ uses fully dimensional codebooks. Codebook update is done via least squares, and encoding is done with
6+
iterated local search (ILS), using randomized iterated conditional modes (ICM) as a local search subroutine.
7+
8+
```@docs
9+
encoding_icm
10+
train_lsq
11+
```
12+
13+
## Reference
14+
15+
Martinez, J., Clement, J., Hoos, H. H., & Little, J. J. (2016). Revisiting additive quantization. In _European Conference on Computer Vision_ (pp. 137-153). Springer, Cham. [[PDF](https://www.cs.ubc.ca/~julm/papers/eccv16.pdf)]

src/ChainQ.jl

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,6 @@ function quantize_chainq_cuda!(
254254

255255
# Forward pass
256256
@inbounds for i = 1:(m-1) # Loop over states
257-
@time begin
258257
if i > 1; CuArrays.BLAS.axpy!(n * h, 1.0f0, d_mincost, 1, d_unaries[i], 1); end
259258

260259
for j = 1:h # Loop over the cost of going to j
@@ -263,7 +262,6 @@ function quantize_chainq_cuda!(
263262
Mem.download!(mini, d_mini.buf)
264263
minidx[j,i,:] .= mini .+ one(eltype(mini))
265264
end
266-
end
267265
end
268266

269267
CuArrays.BLAS.axpy!(n * h, 1.0f0, d_mincost, 1, d_unaries[m], 1)
@@ -272,7 +270,6 @@ function quantize_chainq_cuda!(
272270
mini .+= one(eltype(mini))
273271

274272
# Backward trace
275-
@time begin
276273
@inbounds for idx = IDX # Loop over the datapoints
277274

278275
backpath = [ mini[idx] ]
@@ -283,7 +280,6 @@ function quantize_chainq_cuda!(
283280
# Save the inferred code
284281
CODES[:, idx] .= reverse!( backpath )
285282
end
286-
end
287283

288284
CudaUtilsModule.finit()
289285
destroy!(ctx)
@@ -303,7 +299,7 @@ Given data and chain codebooks, find codes using the Viterbi algorithm chain qua
303299
- `use_cuda::Bool`: whether to use a CUDA implementation
304300
- `use_cpp::Bool`: whether to use a c++ implementation
305301
306-
If both `use_cuda` and `use_cpp` are `true`, the CUDA implementation is used.
302+
If both `use_cuda` and `use_cpp` are `true`, the CUDA implementation is used.
307303
308304
# Returns
309305
- `B::Matrix{Int16}`: `m`-by-`n` matrix with the codes

src/LSQ.jl

Lines changed: 45 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,24 @@ function encode_icm_fully!(
251251
return B
252252
end
253253

254-
# Encode a full dataset
254+
"""
255+
encoding_icm(X, oldB, C, ilsiter, icmiter, randord, npert, cpp=true, V=false) -> B
256+
257+
Given data and chain codebooks, find codes using iterated local search with ICM.
258+
259+
# Arguments
260+
- `X::Matrix{T}`: `d`-by-`n` data to quantize
261+
- `OldB::Matrix{Int16}`: `m`-by-`n` initial set of codes
262+
- `ilsiter::Integer`: Number of iterated local search (ILS) iterations
263+
- `icmiter::Integer`: Number of iterated conditional modes (ICM) iterations
264+
- `randord::Bool`: Whether to use random order
265+
- `npert::Integer`: Number of codes to perturb
266+
- `cpp::Bool=true`: Whether to use the c++ implementation
267+
- `V::Bool=false`: Whehter to print progress
268+
269+
# Returns
270+
- `B::Matrix{Int16}`: `m`-by-`n` matrix with the new codes
271+
"""
255272
function encoding_icm(
256273
X::Matrix{T}, # d-by-n matrix. Data to encode
257274
oldB::Matrix{Int16}, # m-by-n matrix. Previous encoding
@@ -276,6 +293,33 @@ function encoding_icm(
276293
return B
277294
end
278295

296+
297+
"""
298+
train_lsq(X, m, h, R, B, C, niter, ilsiter, icmiter, randord, npert, cpp=true, V=false) -> C, B, obj
299+
300+
Train a local-search quantizer.
301+
This method is typically initialized by [Chain quantization (ChainQ)](@ref)
302+
303+
# Arguments
304+
- `X::Matrix{T}`: `d`-by-`n` data to quantize
305+
- `m::Integer`: Number of codebooks
306+
- `h::Integer`: Number of entries in each codebook (typically 256)
307+
- `R::Matrix{T}`: `d`-by-`d` rotation matrix for initialization
308+
- `B::Matrix{Int16}`: `m`-by-`n` matrix with pre-trained codes for initialization
309+
- `C::Vector{Matrix{T}}`: `m`-long vector with `d`-by-`h` matrices. Each matrix is a pretrained codebook of size approximately `d`-by-`h`
310+
- `niter::Integer`: Number of iterations to use
311+
- `ilster::Integer`: Number of iterated local search (ILS) iterations
312+
- `icmiter::Integer`: Number of iterated conditional modes (ICM) iterations
313+
- `randord::Bool`: Whether to visit the nodes in a random order in ICM
314+
- `npert::Integer`: Number of codes to perturb
315+
- `cpp::Bool`: Whether to use a c++ implementation for encoding
316+
- `V::Bool`: Whether to print progress
317+
318+
# Returns
319+
- `C::Vector{Matrix{T}}`: `m`-long vector with `d`-by-`h` matrix entries. Each matrix is a codebook of size approximately `d`-by-`h`
320+
- `B::Matrix{Int16}`: `m`-by-`n` matrix with the codes
321+
- `obj::Vector{T}`: `niter`-long vector with the quantization error after each iteration
322+
"""
279323
function train_lsq(
280324
X::Matrix{T}, # d-by-n matrix of data points to train on.
281325
m::Integer, # number of codebooks

src/RVQ.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ function quantize_rvq(
6666
end
6767

6868
"""
69-
train_rvq(X, m, h, niter, V=false) -> C, B, error
69+
train_rvq(X, m, h, niter=25, V=false) -> C, B, error
7070
7171
Train a residual quantizer.
7272

test/chainq.jl

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
2+
3+
# Make sure the fast version of codebook update is still okay
4+
@testset "Chain codebook update" begin
5+
d, n, m, h, V, rho = 32, 10_000, 4, 256, false, 1e-4
6+
X, _, B = generate_random_dataset(Float64, Int16, d, n, m, h)
7+
8+
# These two methods are equivalent, but the second should be faster
9+
C1, _ = Rayuela.update_codebooks_chain(X, B, h, V)
10+
C2, _ = Rayuela.update_codebooks_chain_bin(X, B, h, V, rho)
11+
@test isapprox(C1, C2)
12+
end
13+
14+
15+
# Chain quantization
16+
@testset "Chain encoding" begin
17+
d, n, m, h = 32, Int(1e3), 4, 256
18+
X, C, B = generate_random_dataset(Float32, Int16, d, n, m, h)
19+
20+
B1, _ = Rayuela.quantize_chainq(X, C) # Julia
21+
22+
use_cuda, use_cpp = true, false
23+
B2, _ = Rayuela.quantize_chainq(X, C, use_cuda, use_cpp) # Cuda
24+
25+
use_cuda, use_cpp = false, true
26+
B3, _ = Rayuela.quantize_chainq(X, C, use_cuda, use_cpp) # C++
27+
@test all(B1 .== B2 .== B3) # C++ implememtation
28+
end

test/codebook_update.jl

Lines changed: 0 additions & 12 deletions
This file was deleted.

test/runtests.jl

Lines changed: 1 addition & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,5 @@ include("common.jl")
77
# IO - fvecs and ivecs read/write
88
include("xvecs.jl")
99

10-
# Codebook update
11-
include("codebook_update.jl")
12-
1310
# Chain quantization
14-
# Test cpp viterbi encoding implementation
15-
@testset "Viterbi encoding" begin
16-
d, n, m, h = 32, Int(1e3), 4, 256
17-
X, C, B = generate_random_dataset(Float32, Int16, d, n, m, h)
18-
19-
Bj, _ = Rayuela.quantize_chainq(X, C) # Julia
20-
21-
use_cuda = true
22-
use_cpp = false
23-
24-
Bc, _ = Rayuela.quantize_chainq(X, C, use_cuda, use_cpp) # C
25-
@test all(Bj .== Bc)
26-
end
11+
include("chainq.jl")

0 commit comments

Comments
 (0)