|
55 | 55 | },
|
56 | 56 | "outputs": [],
|
57 | 57 | "source": [
|
| 58 | + "import os\n", |
| 59 | + "\n", |
58 | 60 | "from pyspark import SparkContext, SparkConf\n",
|
59 | 61 | "from pyspark.sql import SparkSession\n",
|
60 |
| - "import os\n", |
61 |
| - "import sagemaker_pyspark\n", |
| 62 | + "\n", |
62 | 63 | "import sagemaker\n",
|
| 64 | + "import sagemaker_pyspark\n", |
63 | 65 | "from sagemaker import get_execution_role\n",
|
64 | 66 | "\n",
|
65 | 67 | "sagemaker_session = sagemaker.Session()\n",
|
|
209 | 211 | "\n",
|
210 | 212 | "for cluster in range(10):\n",
|
211 | 213 | " print('\\n\\n\\nCluster {}:'.format(int(cluster)))\n",
|
212 |
| - " digits = [ img for l, img in zip(clusters, images) if int(l.prediction) == cluster ]\n", |
213 |
| - " height=((len(digits)-1)//5)+1\n", |
| 214 | + " digits=[ img for l, img in zip(clusters, images) if int(l.prediction) == cluster ]\n", |
| 215 | + " height=((len(digits) - 1) // 5) + 1\n", |
214 | 216 | " width=5\n",
|
215 | 217 | " plt.rcParams[\"figure.figsize\"] = (width,height)\n",
|
216 | 218 | " _, subplots = plt.subplots(height, width)\n",
|
|
223 | 225 | " plt.show()"
|
224 | 226 | ]
|
225 | 227 | },
|
| 228 | + { |
| 229 | + "cell_type": "markdown", |
| 230 | + "metadata": {}, |
| 231 | + "source": [ |
| 232 | + "Since we don't need to make any more inferences, now we delete the endpoint:" |
| 233 | + ] |
| 234 | + }, |
226 | 235 | {
|
227 | 236 | "cell_type": "code",
|
228 | 237 | "execution_count": null,
|
|
0 commit comments