File tree Expand file tree Collapse file tree 3 files changed +12
-3
lines changed Expand file tree Collapse file tree 3 files changed +12
-3
lines changed Original file line number Diff line number Diff line change @@ -679,3 +679,12 @@ assert loss.item() >= 0
679
679
primaryClass = {cs.LG}
680
680
}
681
681
```
682
+
683
+ ``` bibtex
684
+ @inproceedings{Irie2023SelfOrganisingND,
685
+ title = {Self-Organising Neural Discrete Representation Learning \`a la Kohonen},
686
+ author = {Kazuki Irie and R'obert Csord'as and J{\"u}rgen Schmidhuber},
687
+ year = {2023},
688
+ url = {https://api.semanticscholar.org/CorpusID:256901024}
689
+ }
690
+ ```
Original file line number Diff line number Diff line change 1
1
[project ]
2
2
name = " vector-quantize-pytorch"
3
- version = " 1.15.2 "
3
+ version = " 1.15.3 "
4
4
description = " Vector Quantization - Pytorch"
5
5
authors = [
6
6
{ name = " Phil Wang" , email = " lucidrains@gmail.com" }
Original file line number Diff line number Diff line change @@ -312,7 +312,7 @@ def __init__(
312
312
self .all_reduce_fn = distributed .all_reduce if use_ddp else noop
313
313
314
314
self .register_buffer ('initted' , torch .Tensor ([not kmeans_init ]))
315
- self .register_buffer ('cluster_size' , torch .zeros (num_codebooks , codebook_size ))
315
+ self .register_buffer ('cluster_size' , torch .ones (num_codebooks , codebook_size ))
316
316
self .register_buffer ('embed_avg' , embed .clone ())
317
317
318
318
self .learnable_codebook = learnable_codebook
@@ -582,7 +582,7 @@ def __init__(
582
582
self .all_reduce_fn = distributed .all_reduce if use_ddp else noop
583
583
584
584
self .register_buffer ('initted' , torch .Tensor ([not kmeans_init ]))
585
- self .register_buffer ('cluster_size' , torch .zeros (num_codebooks , codebook_size ))
585
+ self .register_buffer ('cluster_size' , torch .ones (num_codebooks , codebook_size ))
586
586
self .register_buffer ('embed_avg' , embed .clone ())
587
587
588
588
self .learnable_codebook = learnable_codebook
You can’t perform that action at this time.
0 commit comments