Commit 6df16bb2 authored by Abhishek's avatar Abhishek

.

parent e7a055b4
......@@ -626,9 +626,7 @@
"\n",
"$\\max f(x) = x_3 - 30x_4 +x_1x_2 -x_1x_4-10x_2x_3 +20 x_2x_4 + 20x_3x_4$\n",
"\n",
"$\\min f(x) = -x_3 +30x_4 -x_1x_2 +x_1x_4 +10x_2x_3 -20 x_2x_4 - 20x_3x_4$\n",
"\n",
"$\\min f(x) = -x_3^2 +30x_4^2 -x_1x_2 +x_1x_4 +10x_2x_3 -20 x_2x_4 - 20x_3x_4$"
"$\\min f(x) = -x_3 +30x_4 -x_1x_2 +x_1x_4 +10x_2x_3 -20 x_2x_4 - 20x_3x_4$"
]
},
{
......
......@@ -18,7 +18,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 9,
"metadata": {
"nbgrader": {
"grade": false,
......@@ -26,7 +26,17 @@
"solution": false
}
},
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Available frameworks:\n",
"Qiskit\n",
"D-Wave Ocean\n"
]
}
],
"source": [
"%run -i \"assignment_helper_QML.py\"\n",
"%matplotlib inline"
......@@ -110,7 +120,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 10,
"metadata": {
"collapsed": false,
"jupyter": {
......@@ -141,7 +151,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 23,
"metadata": {
"collapsed": false,
"jupyter": {
......@@ -150,8 +160,8 @@
},
"outputs": [],
"source": [
"n_wires = 4\n",
"graph = [(0, 1), (0, 3), (1, 2), (2, 3)]\n",
"n_wires = 5\n",
"graph = [(0, 1), (0, 2), (1, 3), (2, 3), (3, 4), (2,4)]\n",
"\n",
"# unitary operator U_B with parameter beta\n",
"def U_B(beta):\n",
......@@ -182,7 +192,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 24,
"metadata": {
"collapsed": false,
"jupyter": {
......@@ -208,7 +218,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 25,
"metadata": {
"collapsed": false,
"jupyter": {
......@@ -285,7 +295,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 27,
"metadata": {
"collapsed": false,
"jupyter": {
......@@ -294,12 +304,12 @@
},
"outputs": [],
"source": [
"n_layers = 1 # Enter the layes you want\n"
"n_layers = 2 # Enter the layes you want\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 28,
"metadata": {},
"outputs": [],
"source": [
......@@ -316,9 +326,26 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 29,
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Objective after step 5: 3.0016887\n",
"Objective after step 10: 3.7093106\n",
"Objective after step 15: 4.1793664\n",
"Objective after step 20: 4.2244814\n",
"Objective after step 25: 4.2668470\n",
"Objective after step 30: 4.3133263\n",
"Objective after step 35: 4.3683533\n",
"Objective after step 40: 4.4358071\n",
"Objective after step 45: 4.5099778\n",
"Objective after step 50: 4.5690799\n"
]
}
],
"source": [
"\n",
"# initialize optimizer: Adagrad works well empirically\n",
......@@ -326,7 +353,7 @@
"\n",
"# optimize parameters in objective\n",
"params = 0.01 * np.random.rand(2, n_layers)\n",
"steps = 10\n",
"steps = 50\n",
"for i in range(steps):\n",
" params = opt.step(objective, params)\n",
" if (i + 1) % 5 == 0:\n",
......@@ -335,9 +362,20 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 30,
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Optimized (gamma, beta) vectors:\n",
"[[ 0.48976933 0.9009111 ]\n",
" [-0.49957082 -0.28484669]]\n",
"Most frequently sampled bit string is: 01101\n"
]
}
],
"source": [
"\n",
"# sample measured bitstrings 100 times\n",
......@@ -365,14 +403,27 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 31,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAagAAAEYCAYAAAAJeGK1AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi40LCBodHRwOi8vbWF0cGxvdGxpYi5vcmcv7US4rQAAHNlJREFUeJzt3XvUJHV54PHvIxejQOQ2jKMCQ7wgrATUkcX1cjAGRc2JYLyhESTquBFc2bhHRuNRXDU7xOga1pWIgmCUS7wTEUSRyHoBGQjCyIjjZRARYRRRjOJlePaPqhd6errfqXqnq/vX09/POX2mu/qp5/fUy8v7dHVV/SoyE0mSSnOfSRcgSdIgNihJUpFsUJKkItmgJElFskFJkopkg5IkFckGJXUkIg6NiB9Oug5pWtmgJElFskFJW5GI2HbSNUijYoOSGoqIdRHxPyLi2oj4eUScFxF/0GL9FRHx3Yi4MyKuj4gj6+XbR8TtEXFAT+weEfGriFhUv/6ziLgmIu6IiK9GxB/31XViRFwL/EdEbFu/vrke64aIeOoIfxTSWNigpHaeDxwO7AP8MfDSFut+F3gS8ADgLcCHI2JJZv4WOBf4y57Yo4BLMnN9RDwaOAN4JbAb8D7g/Ii4b1/8s4CdgYcCxwOPy8ydgKcD69ptpjR5NiipnVMy80eZeTvwr8BBTVfMzI/W696dmecBa4GD67fPAo6KiKhfvwT45/r5cuB9mXlFZm7IzLOA3wCH9NV1U2b+GtgA3BfYPyK2y8x1mfndhW6wNCk2KKmdH/c8/xWwY9MVI+Lonq/p7gAeBewOkJlX1PkOjYhHAg8Dzq9X3Rt47dx69bp7Ag/qSX/T3JPM/A5wAnAScFtEnBsRvbHSVLBBSWMQEXsD76f66m23zNwZWA1ET9hZVF/zvQT4WGbeVS+/CXh7Zu7c87h/Zp7Ts+5GtyXIzLMz84lUzS2BkzvZMKlDnvEjjccOVI1iPUBEHEu1B9Xrw8A3gDupmtSc9wOfjIgvAF8H7g8cClyWmXf2DxQR+wIPBr4C3AX8GthmhNsijYV7UNIYZOb1wDuBrwG3AgdQNZDemJuAq6ka2f/rWb4KeAXwHuBnwHeY/+SM+wIrgZ9QfSW5B/D60WyJND7hDQulckTEGcCPMvONk65FmjS/4pMKERFLgecAj55sJVIZ/IpP2gIR8YaI+OWAx4Ut87yV6qSJd2Tm97upVpoufsUnSSpSZ3tQEbFnRFxaT+nyzYh4Tb38pHoKlmvqxzO7qkGSNL0624OKiCXAksy8OiJ2Aq4CjqCaKuaXmfkPTXPtvvvuuXTp0k7qlCSN11VXXfWTzFy0ubjOTpLIzFuAW+rnd0bEGqprM1pbunQpq1atGmV5kqQJiYgbm8SN5SSJ+uykRwNX1IuOr2eEPiMidhmyzvKIWBURq9avXz+OMiVJBem8QUXEjsDHgRMy8xfAqVSzLR9EtYf1zkHrZeZpmbksM5ctWrTZPUFJ0lam0wYVEdtRNaePZOYnADLz1npG5ruppnA5eL4ckqTZ1OVZfAGcDqzJzHf1LF/SE3Yk1bUfkiRtpMuZJJ5ANeHldRFxTb3sDVT3vDmIar6xdVQ3YZMkaSNdnsX3ZTa+lcCcz3Y1piRp6+FUR5KkItmgJElFcjZzqcfSFRc0ilu38lkdVyLJPShJUpFsUJKkItmgJElFskFJkopkg5IkFckGJUkqkg1KklQkG5QkqUg2KElSkWxQkqQi2aAkSUWyQUmSimSDkiQVyQYlSSqSDUqSVCQblCSpSDYoSVKRbFCSpCLZoCRJRbJBSZKKZIOSJBXJBiVJKpINSpJUJBuUJKlINihJUpFsUJKkItmgJElF6qxBRcSeEXFpRFwfEd+MiNfUy3eNiM9HxNr63126qkGSNL263IP6PfDazNwfOAQ4LiL2B1YAl2Tmw4FL6teSJG2kswaVmbdk5tX18zuBNcCDgWcDZ9VhZwFHdFWDJGl6jeUYVEQsBR4NXAEszsxb6rd+DCwess7yiFgVEavWr18/jjIlSQXpvEFFxI7Ax4ETMvMXve9lZgI5aL3MPC0zl2XmskWLFnVdpiSpMJ02qIjYjqo5fSQzP1EvvjUiltTvLwFu67IGSdJ06vIsvgBOB9Zk5rt63jofOKZ+fgzw6a5qkCRNr207zP0E4CXAdRFxTb3sDcBK4F8i4mXAjcDzO6xBkjSlOmtQmfllIIa8/dSuxpUkbR2cSUKSVCQblCSpSDYoSVKRbFCSpCLZoCRJRbJBSZKKZIOSJBXJBiVJKpINSpJUJBuUJKlINihJUpFsUJKkItmgJElFskFJkopkg5IkFckGJUkqkg1KklQkG5QkqUg2KElSkWxQkqQi2aAkSUWyQUmSimSDkiQVyQYlSSqSDUqSVCQblCSpSDYoSVKRbFCSpCLZoCRJRbJBSZKK1FmDiogzIuK2iFjds+ykiLg5Iq6pH8/sanxJ0nTrcg/qTODwAcv/d2YeVD8+2+H4kqQp1lmDyszLgNu7yi9J2rptO4Exj4+Io4FVwGsz82eDgiJiObAcYK+99hpjedLWZemKC0aec93KZ408p9Rv3CdJnAo8FDgIuAV457DAzDwtM5dl5rJFixaNqz5JUiHG2qAy89bM3JCZdwPvBw4e5/iSpOkx1gYVEUt6Xh4JrB4WK0mabZ0dg4qIc4BDgd0j4ofAm4FDI+IgIIF1wCu7Gl+SNN06a1CZedSAxad3NZ4kaeviTBKSpCLZoCRJRZrEdVDSWHVxHZCk7rkHJUkqkg1KklSkBTWoiPizURciSVKvhe5BPW6kVUiS1GdBDSoz3zzqQiRJ6rXZs/gi4jnzvZ+ZnxhdOZIkVZqcZv4y4L8AX6xfPwX4KrCeasoiG5QkaeSaNKjtgP0z8xa4Z8LXMzPz2E4rkyTNtCbHoPaca061WwHvIChJ6lSTPahLIuJzwDn16xcAX+iuJEmSGjSozDw+Io4EnlwvOi0zP9ltWZKkWdd0Lr6rgTsz8wsRcf+I2Ckz7+yyMEnSbNvsMaiIeAXwMeB99aIHA5/qsihJkpqcJHEc8ATgFwCZuRbYo8uiJElq0qB+k5m/nXsREdtSXf8kSVJnmjSoL0XEG4D7RcRhwEeBf+22LEnSrGvSoFZQzRpxHfBK4LPAG7ssSpKkec/ii4htgA9l5ouB94+nJEmSNrMHlZkbgL0jYvsx1SNJEtDsOqjvAV+JiPOB/5hbmJnv6qwqSdLMG7oHFRH/XD/9c+AzdexOPQ9Jkjoz3x7UYyPiQcAPgP8zpnokSQLmb1D/BFwC7AOs6lkeVNdB/VGHdUmSZtzQr/gy85TM3A/4YGb+Uc9jn8y0OUmSOrXZ66Ay86/HUYgkSb2aXKgrSdLY2aAkSUXqrEFFxBkRcVtErO5ZtmtEfD4i1tb/7tLV+JKk6dblHtSZwOF9y1YAl2Tmw6nOEFzR4fiSpCnWWYPKzMuA2/sWPxs4q35+FnBEV+NLkqbbuI9BLc7MW+rnPwYWj3l8SdKUaDIXXycyMyNi6I0PI2I5sBxgr732GltdkiZj6YoLGseuW/msDitRKca9B3VrRCwBqP+9bVhgZp6Wmcsyc9miRYvGVqAkqQzjblDnA8fUz48BPj3m8SVJU6LL08zPAb4G7BsRP4yIlwErgcMiYi3wp/VrSZI20dkxqMw8ashbT+1qTEnS1sOZJCRJRbJBSZKKZIOSJBXJBiVJKpINSpJUJBuUJKlINihJUpFsUJKkItmgJElFskFJkopkg5IkFckGJUkqkg1KklQkG5QkqUg2KElSkWxQkqQi2aAkSUWyQUmSitTZLd8ldWfpigsmXYLUOfegJElFskFJkopkg5IkFckGJUkqkg1KklQkG5QkqUg2KElSkWxQkqQi2aAkSUWyQUmSimSDkiQVyQYlSSrSRCaLjYh1wJ3ABuD3mblsEnVIkso1ydnMn5KZP5ng+JKkgvkVnySpSJNqUAlcHBFXRcTyQQERsTwiVkXEqvXr14+5PEnSpE2qQT0xMx8DPAM4LiKe3B+Qmadl5rLMXLZo0aLxVyhJmqiJNKjMvLn+9zbgk8DBk6hDklSusTeoiNghInaaew48DVg97jokSWWbxFl8i4FPRsTc+Gdn5kUTqEOSVLCxN6jM/B5w4LjHlSRNF08zlyQVyQYlSSrSJGeSkKRiLF1xQePYdSuf1WElmuMelCSpSDYoSVKRbFCSpCLZoCRJRbJBSZKKZIOSJBXJBiVJKpINSpJUJBuUJKlINihJUpFsUJKkItmgJElFskFJkopkg5IkFWlmbrcx61PpN93+LrZ91n/2s67Nf/+tjb/7W8Y9KElSkWxQkqQi2aAkSUWyQUmSimSDkiQVyQYlSSqSDUqSVKSZuQ5K0ujM8rVNGh/3oCRJRbJBSZKKZIOSJBXJBiVJKtJEGlREHB4RN0TEdyJixSRqkCSVbewNKiK2Af4v8Axgf+CoiNh/3HVIkso2iT2og4HvZOb3MvO3wLnAsydQhySpYJGZ4x0w4rnA4Zn58vr1S4D/nJnH98UtB5bXL/cFbuignN2Bn4w4touckx7fbZqO8d2m6Rh/a9ymtvbOzEWbjcrMsT6A5wIf6Hn9EuA9466jHnvVqGO7yDnp8d2m6RjfbZqO8bfGberqMYmv+G4G9ux5/ZB6mSRJ95hEg7oSeHhE7BMR2wMvBM6fQB2SpIKNfS6+zPx9RBwPfA7YBjgjM7857jpqp3UQ20XOSY/vNk3H+G7TdIy/NW5TJ8Z+koQkSU04k4QkqUg2KElSkWxQkqQi2aAkSUWaqQYVEY+MiBMj4pT6cWJE7Ndi/WMHLHt6RJwaEefXj1Mj4vCWdb2py5wRsW1EvDIiLoqIa+vHhRHxXyNiuxY5NzmjZ0trHbDtW1xrf51dbH9EPCAiVkbEtyLi9oj4aUSsqZft3CLnhU1jpS0REbtGxK6TrqONmTmLLyJOBI6imvvvh/Xih1Bdh3VuZq5skOMHmblXz+t3A48APtSX82hgbWa+pmFt9+TtKOc5wB3AWX05jwF2zcwX9Kw37Bc4gG9k5kN6Yre41gE/00a1tqxz5NsfEZ8DvgiclZk/rpc9sM751Mx8Wk/Ox8yT8zOZuWSjhRFPB44AHlwvuhn4dGZeNCTPpokj3pSZ/7N+vi3wMuBI4EG9OYHTM/N3DXOelpnLu8pZv34A8Hqq7d8DSOC2Ou/KzLyjQc4LM/MZPa+3uNYu6uyvtaOcewF/DzyV6v+BAP6Q6nd3RWaua5JzUmapQX0b+E/9v4z1xcLfzMyH16+vHZYCeERm3rc3Z2Y+YsBYAXx7Lme97Bfz5L1fZm477pyD3ouIDcCNdY45Wb9+cGZuP2zdYbU2rbNNraOoc0u2PyJuyMx9h+Tc6L0655f6cs45JDPv1xM7FR9QOvzQ06jxt2n6HX3oGfkHlI5yfg14N/CxzNxQL9sGeB5wQmYeMiTPxkkjrsvMA5rEjtLYL9SdoLupPj3d2Ld8Sf3enMXA04Gf9cUF8NW+ZXdFxOMy88q+5Y8D7upbdgfwuMy8tb+wiLip45y3R8TzgI9n5t31+/eh+iXt387vUf3P8IPN5GxTa9M629Taps4utv/GiHgd1R+TW+v3FwMvBfrHXwO8MjPXNqj1mUOa/nnAt4HX9Cybt/H3vH7sgJw/BC6vP7j1Ws/wBr1HxzkBlmbmyb0L6j/WJ0fEX/UsvpLhTb//K9amtXZRZ5tau8i5e2ae15dzA3BuRLy1d3lEPGdALuoxHjjkvU7NUoM6AbgkItZy7x+QvYCHAb0zqX8G2DEzr+lPEBH/1rfopcCpEbET934y2xP4ef1erw8BewOb/JEGzu445wuBk4H3RsTcH+SdgUvr93q9G9gF2OQPNNVXBb2a1tq0zja1tqmzP2fUOb/Iwrf/BcAK4EsRMffH61aqabue37feSQw/3vvqvtfT8gGlqw89TRt/m6bfxYeeLj6gdJHzqoh4L9Xe49zyPan2yv69b9XzgI9QNeV+fzBgWfeGzSK7NT6o/kgcAvxF/TgE2GYEeR8IPLZ+PHBEtY48Z513N2C3Ef9cp6nWkecccX2PAa4Argcurh9rgMup9gR6Y98GHDwkz8k9z5dS/fFZT7UX9m2qYxvnAfv0rXcccOCQnK+eJ+fa+vmCc9avd6H6MPEt4Pb6saZetmtP3HOBfYfkPKLvdaNau6izTa19OX9WP7Y05/bAXwMXAdfVjwuBVwH37VvvKuBRQ3LeNIn/H2bmGBTcc2zkYDY++Pz17PshNI3bzFiPzMxvLSS2Plh6eN/4n8sBB0nbxA4Z+7DM/PxCY5uOv6V1tql1SJ2PpLoxZv+JB5v8NxoSe35mrmlY57GZ+cEtia2PPdwzftbHJLZUROwGkJk/HUW+rnJ2ZZpqHbeIeBJwYw7eg1yWmavGXtOsNKiIeBrwXqpPT3O393gI1Vd8r8rMi9vENRhvo7PTmsZGxNHAm6k+OfeOfxjwlsz8UM96jWNHXWeb8UdRZ5taB9TZ+AzONrFbWuew2Gn5gDLOpl/naNT4B8U1rXWcdQ6KjRZncLaJHTL2PWd6lmqWGtQa4BnZd1plROwDfDYz92sTVy87ZdhwwDGZ+YdtYyPiBqo7DPfvgewCXJEbn3HWKDYiht3OJIA/ycwdetZtE9t0/Dbb1Gj8lnU2OoOzTWy0O9uzTexUfEAZd9NvU+tCP6CMu87+2GhxBmeb2BHVOZFmNksnSWzLvf8he90MbLeAOIBjgdcCvxkQf9QCY4PBBynvZtMzdprGPgn4S+CXA9Y/uG9Zm9im47fZpqbjt6mz6RmcbWLbnO3ZJvZvqY41DWzmVH+QWsVuppnv1rdu09iXMbiRvwv4JrByIbGbaeaL28a1HH/kdbaMbXwGZ9PYaH6m5+a8HLBBdegM4MqIOJeNz2Z5IXD6AuKgOtVzdWb2/5EhIk5aYOzbgasj4mI2PtvwMOCtfas2jb0c+FVmfmnA2Df0LWoT23T8NtvUdPw2dTY9g7NNbJuzPdvETssHlC6aPjRv5m2aftPxu6izTWybMzhHfonHCJvZyMzMV3wAEbE/8Ods+v3y9QuM2xW4KzN/1WDsNrG7UP1C9x9X6P8FbxXbhabjF1Dnfdj0xJcrs754caGxHdR5DPAmqq/tNmnmmXlm29ioplP6+8y8dMB4l2Xmk3teN4qNajqr91Adq92kkfceB2kZezrwwcz88oDxz87MF7WJazN+F3W23KbHAKcCgy7bOC4zr+pZr1FsRLyN6m/X1weMfXJmntjz+gfM08wyc8/+5V2bqQY1p24WZObto4jrMnYaRHWtRu8ZZ4Oud2oc11XOIevvmJn9ewtbFLulOaflA8q0NP0240+6zrqGxmdwtoltMG7jZjYuM9Og4t45qf6E6lPGwDmpmsb1xW52nqs2sfNsQ+PpRprGbknOiDgI+CfgAVSf4oLqQO0dVGc8Xt0mro59NNUnwwew8YH//pyN4hps0xadcddVzkk38y1p/F00/TaxQ5r+yC8x6Sp2yDYt+LKVUeSclFk6BnUe1SwBL85N56Q6l+qi3TZxncRGi+lGmsZ2kbN2JtXV7Ff05TgE+CBwYMs46tdNYpvGERF/M8827di3fqPYLnLWsQObeUQMauaNYoc18yE5G8fO43qqr8aa6CJ2o7iY59KRiGh0iUlvXJex87i44ba3iW2cc1LNbJb2oNZmz+nEw95rGtdVbET8juHTjTw3M3fqWa9RbBc5G2zTdzLzYW3iOsx5F/AO4PcDwv97Zu7cNraLnHXsNQxvvO/LzAPbxnaUc76m+7eZuWvPuiOPbZmzi0tMRh4b3Vy20jjnfNp8KzBKs7QH1XROqjZzV3URey3wD5m5un8DIuJP+xY1je0iJ8CFEXEB1enMvdt0NNXUKm3jusp5NfCp7DnI3LNNL19gbBc5AXbobw4AmXl5ROywwNgucv4dw5tu/7yDXcS2ydnFJSZdxHZx2UrjnJtpZo3vcTZKs7QHtT3VdQ6bXCVOdU+Y37SJ6yo2Wkw30jS2i5w9y54xaJsy87MLiesiZ0TsC/w0M38yYKzFvcdXmsZ2kbN+fQrwUAY33u9n5vFtYzvK+VWq+ekGNd2NzvjqIrZlztdTTeA76NKRf8nM/9UmrqvYiPgi8MYcfCnK9zNzn57XjWJb5ryT4c3snZm5+4DlnZqZBiVNi0k286axddO9PTPXD1h/UIMeaWybnPWy/YZsU/+lI43iuoiNDi5baZmzcTMbl5lpUHHvXTU3mbuKnrtqNo3rKjZa3P2zaWwXOTcn+u5AuqVxk85ZwvhSl9o0s3GZpQbV9K6abe4UOvLYaclZxza9+2qbO5VOLGch48/d9vvZVDMQDL3td9PYjnNu9vbkXcS2yTmf6Ls9/JbGdRU76ZyTMksNqumtxNvcHnzksdOSs37d9PbobW7PPrGchYw/7LbfL6WaBPdpbWPHnHPQ7clHHtsyZ9Pbo7e5jfzIYyedcz4Ta2Y5gZtQTeJBNXfb84D79Cy7D9WdUa9oG9dV7LTkrJevBfYa8vO+qW3cpHMWMv4N8/wO37CQ2GnJ2eH4G6ia2aUDHr9uG9dVbAE5HzPk8VjglmE/7y4fYx9wUg/uvavmbcxzV9GmcV3FTkvOOrbp3Vfb3Kl0YjkLGf9i4HXA4p5li4ETgS8sJHZacnY4/mrg4UN+/je1jesqtoCcjZvZuB4z8xUfMOxMmk9n383ImsZ1FTstOevYRjd4axo36ZyTHj+qufVW1LF71ItvpbocYWX2zLHXNHZacnY4/nOB6zKzf5Z7IuKIzPxUm7iuYgvIuRo4MjPXDoidyGSx/Re0bbWiuhnZ2VTf/V9RPwDOiYgVbeO6ip2WnHXs66iu7Qjg6/UjBozfKG7SOUsYPzN/lpknZuYjM3PX+rFfVhN1HrGQ2GnJ2eH4Hxv0B7q2S9u4rmInnRM4ieE94dVDlndrFLth0/Cg+qpquwHLt6e6A2WruK5ipyXnpMffGrepwe/wD0YdOy05Jz2+28SxTWNH+ZilqY66uGlZF7HTknPS42+N20R0cKfWack56fFnfZs24y1UkzCP1Sw1qKZ3Su3i7quTHt9tmp7xu7hT67TknPT4M71NI2xmIzMzDSozL4qIR7CZm5E1jesqdlpyTnr8rXGbal3cSn5ack56/FnfpjYNcixm6iw+SdJg0eI29mOryQYlSSrRzJxmLkmaLjYoSVKRbFBSSxGxtL7qvn/5ByJi//r5GxrkOSEi7j/P+/fkk2aRx6CkliJiKdVM0I+aJ+aXmbnjZvKsA5bl4DvtbjPgDD9pprgHJS3MthHxkYhYExEfi4j7R8S/RcSyiFgJ3C8irqljdoiICyLiGxGxOiJeEBH/jeri3Usj4lKomlpEvDMivgE8fi5fz3tvr3NcHhGL6+UPrV9fFxFvi4hf1suXRMRldQ2rI+JJk/kxSQtng5IWZl/gvZm5H/AL4FVzb2TmCqrZnw/KzBcDhwM/yswD672uizLzFOBHwFMy8yn1qjtQ3dLkwAGn+u4AXJ6ZBwKXAa+ol/8j8I+ZeQD33mAS4EXA5zLzIOBAYJPrYKTS2aCkhbkpM79SP/8w8MR5Yq8DDouIkyPiSZn58yFxG4CPD3nvt1QXXQJcRXVbFIDHAx+tn5/dE38lcGxEnAQckJl3zlOfVCQblLQw/Qdvhx7MzcxvU9347TrgbRHxpiGhd81z3Ol3ee8B4w1sZhaYzLwMeDLVjBVnRsTR88VLJbJBSQuzV0Q8vn7+IqD/K7nfRcR2ABHxIOBXmflh4B1UzQrgTmCnLazjcuAv6ucvnFsYEXsDt2bm+4EP9IwpTQ0blLQwNwDHRcQaqvvqnNr3/mnAtRHxEeAA4OsRcQ3wZuBtPTEXzZ0ksUAnAH9TT/T5MGDu68NDgW9ExL8DL6A6ViVNFU8zl6ZYfR3VrzMzI+KFwFGZ+exJ1yWNwszMZi5tpR4LvCciArgD+KsJ1yONjHtQkqQieQxKklQkG5QkqUg2KElSkWxQkqQi2aAkSUX6/4eS3CKaNhufAAAAAElFTkSuQmCC\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"\n",
......
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# **Quantum Computing: Lab 9**\n",
"\n",
"*Note*: The jupyter notebooks for this course are _adapted_ from Peter Witteks course on <a href=\"https://www.edx.org/course/quantum-machine-learning-2\">**Quantum Machine Learning**</a>, and illustration from <a href=\"https://pennylane.ai/\">**Pennylane**</a>."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Before you begin, execute this cell to import numpy and packages from the D-Wave Ocean suite, and all necessary functions for the gate-model framework you are going to use, whether that is the Forest SDK or Qiskit. In the case of Forest SDK, it also starts the qvm and quilc servers."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"nbgrader": {
"grade": false,
"locked": true,
"solution": false
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Available frameworks:\n",
"Qiskit\n",
"D-Wave Ocean\n"
]
}
],
"source": [
"%run -i \"assignment_helper_QML.py\"\n",
"%matplotlib inline"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Quantum Neural Networks #\n",
"\n",
"\n",
"In this tutorial, we show how to use PennyLane to implement variational quantum classifiers - quantum circuits that can be trained from labelled data to classify new data samples. The architecture is inspired by <a href=\"https://arxiv.org/abs/1802.06002\">**Farhi and Neven (2018)**</a> and <a href=\"https://arxiv.org/abs/1804.00633\">**Schuld et al. (2018)**</a>.\n",
"\n",
"<img src=\"images/gd.png\" style=\"width: 800px;\"/>"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We will first show that the variational quantum classifier can reproduce the parity function\n",
"\n",
"$$\\begin{align}f: x \\in \\{0,1\\}^{\\otimes n} \\rightarrow y =\n",
" \\begin{cases} 1 \\text{ if uneven number of ones in } x \\\\ 0\n",
" \\text{ otherwise} \\end{cases}.\\end{align}$$\n",
"\n",
"This optimization example demonstrates how to encode binary inputs into the initial state of the variational circuit, which is simply a computational basis state.\n",
"\n",
"We then show how to encode real vectors as amplitude vectors (*amplitude encoding*) and train the model to recognize the first two classes of\n",
"flowers in the Iris dataset.\n",
"\n",
"# 1. Fitting the parity function #\n",
"\n",
"**Imports**\n",
"\n",
"As before, we import PennyLane, the PennyLane-provided version of NumPy,and an optimizer."
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"source": [
"import pennylane as qml\n",
"from pennylane import numpy as np\n",
"from pennylane.optimize import NesterovMomentumOptimizer"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"**Quantum and classical nodes**\n",
"\n",
"We create a quantum device with four “wires” (or qubits)."
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"source": [
"dev = qml.device(\"default.qubit\", wires=4)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Variational classifiers usually define a “layer” or “block”, which is an elementary circuit architecture that gets repeated to build the\n",
"variational circuit.\n",
"\n",
"Our circuit layer consists of an arbitrary rotation on every qubit, as well as CNOTs that entangle each qubit with its neighbour."
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"source": [
"def layer(W):\n",
"\n",
" qml.Rot(W[0, 0], W[0, 1], W[0, 2], wires=0)\n",
" qml.Rot(W[1, 0], W[1, 1], W[1, 2], wires=1)\n",
" qml.Rot(W[2, 0], W[2, 1], W[2, 2], wires=2)\n",
" qml.Rot(W[3, 0], W[3, 1], W[3, 2], wires=3)\n",
"\n",
" qml.CNOT(wires=[0, 1])\n",
" qml.CNOT(wires=[1, 2])\n",
" qml.CNOT(wires=[2, 3])\n",
" qml.CNOT(wires=[3, 0])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We also need a way to encode data inputs $x$ into the circuit, so that the measured output depends on the inputs. In this first example, the inputs are bitstrings, which we encode into the state of the qubits. The quantum state $\\psi$ after state preparation is a computational basis state that has 1s where $x$ has 1s, for example\n",
"\n",
"$$\\begin{align}x = 0101 \\rightarrow |\\psi \\rangle = |0101 \\rangle .\\end{align}$$\n",
"\n",
"We use the ``BasisState`` function provided by PennyLane, which expects ``x`` to be a list of zeros and ones, i.e. ``[0,1,0,1]``.\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"source": [
"def statepreparation(x):\n",
" qml.BasisState(x, wires=[0, 1, 2, 3])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we define the quantum node as a state preparation routine, followed\n",
"by a repetition of the layer structure. Borrowing from machine learning,\n",
"we call the parameters ``weights``.\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"source": [
"@qml.qnode(dev)\n",
"def circuit(weights, x=None):\n",
"\n",
" statepreparation(x)\n",
"\n",
" for W in weights:\n",
" layer(W)\n",
"\n",
" return qml.expval(qml.PauliZ(0))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Different from previous examples, the quantum node takes the data as a\n",
"keyword argument ``x`` (with the default value ``None``). Keyword\n",
"arguments of a quantum node are considered as fixed when calculating a\n",
"gradient; they are never trained.\n",
"\n",
"If we want to add a “classical” bias parameter, the variational quantum\n",
"classifer also needs some post-processing. We define the final model by\n",
"a classical node that uses the first variable, and feeds the remainder\n",
"into the quantum node. Before this, we reshape the list of remaining\n",
"variables for easy use in the quantum node.\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"source": [
"def variational_classifier(var, x=None):\n",
" weights = var[0]\n",
" bias = var[1]\n",
" return circuit(weights, x=x) + bias"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"**Cost**\n",
"\n",
"\n",
"In supervised learning, the cost function is usually the sum of a loss function and a regularizer. We use the standard square loss that measures the distance between target labels and model predictions."
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"source": [
"def square_loss(labels, predictions):\n",
" loss = 0\n",
" for l, p in zip(labels, predictions):\n",
" loss = loss + (l - p) ** 2\n",
"\n",
" loss = loss / len(labels)\n",
" return loss"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"To monitor how many inputs the current classifier predicted correctly,\n",
"we also define the accuracy given target labels and model predictions.\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"source": [
"def accuracy(labels, predictions):\n",
"\n",
" loss = 0\n",
" for l, p in zip(labels, predictions):\n",
" if abs(l - p) < 1e-5:\n",
" loss = loss + 1\n",
" loss = loss / len(labels)\n",
"\n",
" return loss"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"For learning tasks, the cost depends on the data - here the features and\n",
"labels considered in the iteration of the optimization routine.\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"source": [
"def cost(var, X, Y):\n",
" predictions = [variational_classifier(var, x=x) for x in X]\n",
" return square_loss(Y, predictions)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"**Optimization**\n",
"\n",
"\n",
"Let’s now load and preprocess some data."
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"X = [0. 0. 0. 0.], Y = -1\n",
"X = [0. 0. 0. 1.], Y = 1\n",
"X = [0. 0. 1. 0.], Y = 1\n",
"X = [0. 0. 1. 1.], Y = -1\n",
"X = [0. 1. 0. 0.], Y = 1\n",
"...\n"
]
}
],
"source": [
"data = np.loadtxt(\"data/parity.txt\")\n",
"X = data[:, :-1]\n",
"Y = data[:, -1]\n",
"Y = Y * 2 - np.ones(len(Y)) # shift label from {0, 1} to {-1, 1}\n",
"\n",
"for i in range(5):\n",
" print(\"X = {}, Y = {: d}\".format(X[i], int(Y[i])))\n",
"\n",
"print(\"...\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We initialize the variables randomly (but fix a seed for\n",
"reproducability). The first variable in the list is used as a bias,\n",
"while the rest is fed into the gates of the variational circuit.\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(array([[[ 0.01764052, 0.00400157, 0.00978738],\n",
" [ 0.02240893, 0.01867558, -0.00977278],\n",
" [ 0.00950088, -0.00151357, -0.00103219],\n",
" [ 0.00410599, 0.00144044, 0.01454274]],\n",
"\n",
" [[ 0.00761038, 0.00121675, 0.00443863],\n",
" [ 0.00333674, 0.01494079, -0.00205158],\n",
" [ 0.00313068, -0.00854096, -0.0255299 ],\n",
" [ 0.00653619, 0.00864436, -0.00742165]]]), 0.0)\n"
]
}
],
"source": [
"np.random.seed(0)\n",
"num_qubits = 4\n",
"num_layers = 2\n",
"var_init = (0.01 * np.random.randn(num_layers, num_qubits, 3), 0.0)\n",
"\n",
"print(var_init)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Next we create an optimizer and choose a batch size…\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [],
"source": [
"opt = NesterovMomentumOptimizer(0.5)\n",
"batch_size = 5"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"…and train the optimizer. We track the accuracy - the share of correctly\n",
"classified data samples. For this we compute the outputs of the\n",
"variational classifier and turn them into predictions in\n",
"$\\{-1,1\\}$ by taking the sign of the output.\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Iter: 1 | Cost: 3.4355534 | Accuracy: 0.5000000 \n",
"Iter: 2 | Cost: 1.9287800 | Accuracy: 0.5000000 \n",
"Iter: 3 | Cost: 2.0341238 | Accuracy: 0.5000000 \n",
"Iter: 4 | Cost: 1.6372574 | Accuracy: 0.5000000 \n",