Check [https://pytorch.org/get-started/locally/](https://pytorch.org/get-started/locally/) for details using pip or other means to install pytorch, also with GPU/CUDA option.
"We want to build a network of 2 layers of neurons with a given nonlinear function (e.g. tanh), whose weights are optimized to reduce a loss function (error between output and a corresponding target for each sample)."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b2cb5fa6-b1ad-47c9-9062-f1bca177e52f",
"metadata": {},
"outputs": [],
"source": [
"# layer of artificial neurons, batch calculations\n",
"class layer:\n",
" \n",
" # forward function\n",
" def f(self, x):\n",
" # tanh\n",
" ex1 = np.exp(x)\n",
" ex2 = np.exp(-x)\n",
" return (ex1 - ex2)/ (ex1 + ex2)\n",
"\n",
" # forward pass for x of shape (input dim)\n",
" def fwd(self, x, W, return_x1=False):\n",
" # augmented vector by one extra element (for bias)\n",
" x1 = np.ones([x.shape[0]+1])\n",
" x1[:-1] = x\n",
" # calculate output after weight multiplication and function f\n",
" y = self.f(np.einsum('ij, j -> i', W, x1))\n",
" # return y or y and x1\n",
" return y"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b860d280-efa3-45dc-bd59-785ecb6e4f4e",
"metadata": {},
"outputs": [],
"source": [
"# let's test the forward pass\n",
"\n",
"M = 2 # dimensionality of input x\n",
"N = 1 # dimensionality of output y\n",
"W = np.array([[1.0, -1.0, 0.5]]) # weights of shape (N,M+1) with bias\n",
"\n",
"# generate random input x\n",
"x = np.random.normal(loc=0.0, size=[M])\n",
"\n",
"l = layer()\n",
"y = l.fwd(x, W)\n",
"\n",
"print('sample: x={} mapped to y={}'.format(x,y))"
We want to build a network of 2 layers of neurons with a given nonlinear function (e.g. tanh), whose weights are optimized to reduce a loss function (error between output and a corresponding target for each sample).