Skip to content

Commit

Permalink
better browser plugins
Browse files Browse the repository at this point in the history
  • Loading branch information
Eduardo Leao authored and Eduardo Leao committed Aug 7, 2024
1 parent 8faa8bc commit 09dbdca
Show file tree
Hide file tree
Showing 5 changed files with 62 additions and 65 deletions.
16 changes: 8 additions & 8 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -1780,12 +1780,12 @@ class Embedding extends Module {
/**
* Embedding class, turns indexes into vectors.
*
* @param {number} in_size - number of different indexes (vocabulary size).
* @param {number} out_size - size of the embedding vector generated.
* @param {number} vocab_size - number of different indexes (vocabulary size).
* @param {number} embed_size - size of the embedding vector generated.
*/
constructor(in_size, embed_size) {
constructor(vocab_size, embed_size) {
super();
this.E = randn([in_size, embed_size], true, "cpu", false);
this.E = randn([vocab_size, embed_size], true, "cpu", false);
}
/**
* Extracts embedding from rows in "idx":
Expand All @@ -1802,14 +1802,14 @@ class Embedding extends Module {
class PositionalEmbedding extends Module {
E;
/**
* Embedding class, turns indexes into vectors.
* Embedding class, turns indexes into vectors based on it's position through an optimized lookup table.
*
* @param {number} n_timesteps - number of different embeddings (number of timesteps in each instance in batch).
* @param {number} input_size - number of different embeddings (size of the input).
* @param {number} embed_size - size of the embedding vector generated.
*/
constructor(n_timesteps, embed_size) {
constructor(input_size, embed_size) {
super();
this.E = randn([n_timesteps, embed_size], true, "cpu", false);
this.E = randn([input_size, embed_size], true, "cpu", false);
}
/**
* Gets embedding for timesteps in "idx" array.
Expand Down
16 changes: 8 additions & 8 deletions dist/index.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -1780,12 +1780,12 @@ class Embedding extends Module {
/**
* Embedding class, turns indexes into vectors.
*
* @param {number} in_size - number of different indexes (vocabulary size).
* @param {number} out_size - size of the embedding vector generated.
* @param {number} vocab_size - number of different indexes (vocabulary size).
* @param {number} embed_size - size of the embedding vector generated.
*/
constructor(in_size, embed_size) {
constructor(vocab_size, embed_size) {
super();
this.E = randn([in_size, embed_size], true, "cpu", false);
this.E = randn([vocab_size, embed_size], true, "cpu", false);
}
/**
* Extracts embedding from rows in "idx":
Expand All @@ -1802,14 +1802,14 @@ class Embedding extends Module {
class PositionalEmbedding extends Module {
E;
/**
* Embedding class, turns indexes into vectors.
* Embedding class, turns indexes into vectors based on it's position through an optimized lookup table.
*
* @param {number} n_timesteps - number of different embeddings (number of timesteps in each instance in batch).
* @param {number} input_size - number of different embeddings (size of the input).
* @param {number} embed_size - size of the embedding vector generated.
*/
constructor(n_timesteps, embed_size) {
constructor(input_size, embed_size) {
super();
this.E = randn([n_timesteps, embed_size], true, "cpu", false);
this.E = randn([input_size, embed_size], true, "cpu", false);
}
/**
* Gets embedding for timesteps in "idx" array.
Expand Down
48 changes: 0 additions & 48 deletions dist/js-pytorch-browser.js

Large diffs are not rendered by default.

43 changes: 43 additions & 0 deletions dist/utils.js

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion site/tensor/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -753,14 +753,16 @@ <h2 id="torchtril">torch.tril</h2>
<h2 id="torchrandn">torch.randn</h2>
<pre><code>torch.randn(*shape,
requires_grad=false,
device='cpu') → Tensor
device='cpu',
xavier=false) → Tensor
</code></pre>
<p>Returns a tensor filled with randomly sampled data with dimensions like <code>shape</code>. The sample is from a normal distribution.</p>
<p>Parameters</p>
<ul>
<li><strong>shape (Array)</strong> - Javascript Array containing the shape of the Tensor.</li>
<li><strong>requires_grad (boolean)</strong> - Whether to keep track of this tensor's gradients. Set this to true if you want to <strong>learn</strong> this parameter in your model. Default: <code>false</code>.</li>
<li><strong>device (string)</strong> - Device to store Tensor. Either "gpu" or "cpu". If your device has a gpu, large models will train faster on it.</li>
<li><strong>xavier (boolean)</strong> - Whether to use <a target="_blank" href="https://prateekvishnu.medium.com/xavier-and-he-normal-he-et-al-initialization-8e3d7a087528">Xavier Initialization</a> on this tensor. Default: <code>false</code>.</li>
</ul>
<p>Example</p>
<pre><code class="language-javascript">&gt;&gt;&gt; let a = torch.randn([3,2], false, 'gpu');
Expand Down

0 comments on commit 09dbdca

Please sign in to comment.