|
1788 | 1788 | ],
|
1789 | 1789 | "source": [
|
1790 | 1790 | "token_embeddings = token_embedding_layer(inputs)\n",
|
1791 |
| - "print(token_embeddings.shape)" |
| 1791 | + "print(token_embeddings.shape)\n", |
| 1792 | + "\n", |
| 1793 | + "# uncomment & execute the following line to see how the embeddings look like\n", |
| 1794 | + "# print(token_embedding)" |
1792 | 1795 | ]
|
1793 | 1796 | },
|
1794 | 1797 | {
|
|
1807 | 1810 | "outputs": [],
|
1808 | 1811 | "source": [
|
1809 | 1812 | "context_length = max_length\n",
|
1810 |
| - "pos_embedding_layer = torch.nn.Embedding(context_length, output_dim)" |
| 1813 | + "pos_embedding_layer = torch.nn.Embedding(context_length, output_dim)\n", |
| 1814 | + "\n", |
| 1815 | + "# uncomment & execute the following line to see how the embedding layer weights look like\n", |
| 1816 | + "# print(pos_embedding_layer.weight)" |
1811 | 1817 | ]
|
1812 | 1818 | },
|
1813 | 1819 | {
|
|
1826 | 1832 | ],
|
1827 | 1833 | "source": [
|
1828 | 1834 | "pos_embeddings = pos_embedding_layer(torch.arange(max_length))\n",
|
1829 |
| - "print(pos_embeddings.shape)" |
| 1835 | + "print(pos_embeddings.shape)\n", |
| 1836 | + "\n", |
| 1837 | + "# uncomment & execute the following line to see how the embeddings look like\n", |
| 1838 | + "# print(pos_embeddings)" |
1830 | 1839 | ]
|
1831 | 1840 | },
|
1832 | 1841 | {
|
|
1853 | 1862 | ],
|
1854 | 1863 | "source": [
|
1855 | 1864 | "input_embeddings = token_embeddings + pos_embeddings\n",
|
1856 |
| - "print(input_embeddings.shape)" |
| 1865 | + "print(input_embeddings.shape)\n", |
| 1866 | + "\n", |
| 1867 | + "# uncomment & execute the following line to see how the embeddings look like\n", |
| 1868 | + "# print(input_embeddings)" |
1857 | 1869 | ]
|
1858 | 1870 | },
|
1859 | 1871 | {
|
|
0 commit comments