Skip to content

Commit 346df97

Browse files
authored
update website (#1912)
1 parent 5549d89 commit 346df97

File tree

8 files changed

+79
-68
lines changed

8 files changed

+79
-68
lines changed

docs/ipynb/export.ipynb

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -61,13 +61,12 @@
6161
"\n",
6262
"print(type(model)) # <class 'tensorflow.python.keras.engine.training.Model'>\n",
6363
"\n",
64-
"try:\n",
65-
" model.save(\"model_autokeras\", save_format=\"tf\")\n",
66-
"except Exception:\n",
67-
" model.save(\"model_autokeras.h5\")\n",
64+
"model.save(\"model_autokeras.keras\")\n",
6865
"\n",
6966
"\n",
70-
"loaded_model = load_model(\"model_autokeras\", custom_objects=ak.CUSTOM_OBJECTS)\n",
67+
"loaded_model = load_model(\n",
68+
" \"model_autokeras.keras\", custom_objects=ak.CUSTOM_OBJECTS\n",
69+
")\n",
7170
"\n",
7271
"predicted_y = loaded_model.predict(np.expand_dims(x_test, -1))\n",
7372
"print(predicted_y)"

docs/ipynb/image_classification.ipynb

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,10 @@
4646
"outputs": [],
4747
"source": [
4848
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
49+
"x_train = x_train[:100]\n",
50+
"y_train = y_train[:100]\n",
51+
"x_test = x_test[:100]\n",
52+
"y_test = y_test[:100]\n",
4953
"print(x_train.shape) # (60000, 28, 28)\n",
5054
"print(y_train.shape) # (60000,)\n",
5155
"print(y_train[:3]) # array([7, 2, 1], dtype=uint8)"
@@ -75,7 +79,7 @@
7579
"# Initialize the image classifier.\n",
7680
"clf = ak.ImageClassifier(overwrite=True, max_trials=1)\n",
7781
"# Feed the image classifier with training data.\n",
78-
"clf.fit(x_train, y_train, epochs=10)\n",
82+
"clf.fit(x_train, y_train, epochs=1)\n",
7983
"\n",
8084
"\n",
8185
"# Predict with the best model.\n",
@@ -112,7 +116,7 @@
112116
" y_train,\n",
113117
" # Split the training data and use the last 15% as validation data.\n",
114118
" validation_split=0.15,\n",
115-
" epochs=10,\n",
119+
" epochs=1,\n",
116120
")"
117121
]
118122
},
@@ -144,7 +148,7 @@
144148
" y_train,\n",
145149
" # Use your own validation set.\n",
146150
" validation_data=(x_val, y_val),\n",
147-
" epochs=10,\n",
151+
" epochs=1,\n",
148152
")"
149153
]
150154
},
@@ -185,7 +189,7 @@
185189
"clf = ak.AutoModel(\n",
186190
" inputs=input_node, outputs=output_node, overwrite=True, max_trials=1\n",
187191
")\n",
188-
"clf.fit(x_train, y_train, epochs=10)"
192+
"clf.fit(x_train, y_train, epochs=1)"
189193
]
190194
},
191195
{
@@ -219,7 +223,7 @@
219223
"clf = ak.AutoModel(\n",
220224
" inputs=input_node, outputs=output_node, overwrite=True, max_trials=1\n",
221225
")\n",
222-
"clf.fit(x_train, y_train, epochs=10)"
226+
"clf.fit(x_train, y_train, epochs=1)"
223227
]
224228
},
225229
{
@@ -294,7 +298,7 @@
294298
"\n",
295299
"clf = ak.ImageClassifier(overwrite=True, max_trials=1)\n",
296300
"# Feed the tensorflow Dataset to the classifier.\n",
297-
"clf.fit(train_set, epochs=10)\n",
301+
"clf.fit(train_set, epochs=1)\n",
298302
"# Predict with the best model.\n",
299303
"predicted_y = clf.predict(test_set)\n",
300304
"# Evaluate the best model with testing data.\n",

docs/ipynb/image_regression.ipynb

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,8 @@
5252
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
5353
"x_train = x_train[:100]\n",
5454
"y_train = y_train[:100]\n",
55+
"x_test = x_test[:100]\n",
56+
"y_test = y_test[:100]\n",
5557
"print(x_train.shape) # (60000, 28, 28)\n",
5658
"print(y_train.shape) # (60000,)\n",
5759
"print(y_train[:3]) # array([7, 2, 1], dtype=uint8)"
@@ -65,7 +67,7 @@
6567
"source": [
6668
"The second step is to run the ImageRegressor. It is recommended have more\n",
6769
"trials for more complicated datasets. This is just a quick demo of MNIST, so\n",
68-
"we set max_trials to 1. For the same reason, we set epochs to 2. You can also\n",
70+
"we set max_trials to 1. For the same reason, we set epochs to 1. You can also\n",
6971
"leave the epochs unspecified for an adaptive number of epochs.\n"
7072
]
7173
},
@@ -80,7 +82,7 @@
8082
"# Initialize the image regressor.\n",
8183
"reg = ak.ImageRegressor(overwrite=True, max_trials=1)\n",
8284
"# Feed the image regressor with training data.\n",
83-
"reg.fit(x_train, y_train, epochs=2)\n",
85+
"reg.fit(x_train, y_train, epochs=1)\n",
8486
"\n",
8587
"\n",
8688
"# Predict with the best model.\n",
@@ -117,7 +119,7 @@
117119
" y_train,\n",
118120
" # Split the training data and use the last 15% as validation data.\n",
119121
" validation_split=0.15,\n",
120-
" epochs=2,\n",
122+
" epochs=1,\n",
121123
")"
122124
]
123125
},
@@ -190,7 +192,7 @@
190192
"reg = ak.AutoModel(\n",
191193
" inputs=input_node, outputs=output_node, overwrite=True, max_trials=1\n",
192194
")\n",
193-
"reg.fit(x_train, y_train, epochs=2)"
195+
"reg.fit(x_train, y_train, epochs=1)"
194196
]
195197
},
196198
{
@@ -224,7 +226,7 @@
224226
"reg = ak.AutoModel(\n",
225227
" inputs=input_node, outputs=output_node, overwrite=True, max_trials=1\n",
226228
")\n",
227-
"reg.fit(x_train, y_train, epochs=2)"
229+
"reg.fit(x_train, y_train, epochs=1)"
228230
]
229231
},
230232
{
@@ -258,6 +260,10 @@
258260
"outputs": [],
259261
"source": [
260262
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
263+
"x_train = x_train[:100]\n",
264+
"y_train = y_train[:100]\n",
265+
"x_test = x_test[:100]\n",
266+
"y_test = y_test[:100]\n",
261267
"\n",
262268
"# Reshape the images to have the channel dimension.\n",
263269
"x_train = x_train.reshape(x_train.shape + (1,))\n",
@@ -273,7 +279,7 @@
273279
"\n",
274280
"reg = ak.ImageRegressor(overwrite=True, max_trials=1)\n",
275281
"# Feed the tensorflow Dataset to the regressor.\n",
276-
"reg.fit(train_set, epochs=2)\n",
282+
"reg.fit(train_set, epochs=1)\n",
277283
"# Predict with the best model.\n",
278284
"predicted_y = reg.predict(test_set)\n",
279285
"# Evaluate the best model with testing data.\n",

docs/ipynb/load.ipynb

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@
9494
},
9595
"outputs": [],
9696
"source": [
97-
"batch_size = 32\n",
97+
"batch_size = 2\n",
9898
"img_height = 180\n",
9999
"img_width = 180\n",
100100
"\n",
@@ -137,8 +137,8 @@
137137
"outputs": [],
138138
"source": [
139139
"clf = ak.ImageClassifier(overwrite=True, max_trials=1)\n",
140-
"clf.fit(train_data, epochs=1)\n",
141-
"print(clf.evaluate(test_data))"
140+
"clf.fit(train_data.take(100), epochs=1)\n",
141+
"print(clf.evaluate(test_data.take(2)))"
142142
]
143143
},
144144
{
@@ -203,8 +203,8 @@
203203
")\n",
204204
"\n",
205205
"clf = ak.TextClassifier(overwrite=True, max_trials=1)\n",
206-
"clf.fit(train_data, epochs=2)\n",
207-
"print(clf.evaluate(test_data))"
206+
"clf.fit(train_data.take(2), epochs=1)\n",
207+
"print(clf.evaluate(test_data.take(2)))"
208208
]
209209
},
210210
{
@@ -225,8 +225,8 @@
225225
},
226226
"outputs": [],
227227
"source": [
228-
"N_BATCHES = 30\n",
229-
"BATCH_SIZE = 100\n",
228+
"N_BATCHES = 2\n",
229+
"BATCH_SIZE = 10\n",
230230
"\n",
231231
"\n",
232232
"def get_data_generator(n_batches, batch_size):\n",
@@ -247,7 +247,7 @@
247247
" output_shapes=((32, 32, 3), tuple()),\n",
248248
").batch(BATCH_SIZE)\n",
249249
"\n",
250-
"clf = ak.ImageDataClassifier(overwrite=True, max_trials=1, seed=5)\n",
250+
"clf = ak.ImageClassifier(overwrite=True, max_trials=1, seed=5)\n",
251251
"clf.fit(x=dataset, validation_data=dataset, batch_size=BATCH_SIZE)\n",
252252
"print(clf.evaluate(dataset))"
253253
]

docs/ipynb/multi.ipynb

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@
7878
},
7979
"outputs": [],
8080
"source": [
81-
"num_instances = 100\n",
81+
"num_instances = 10\n",
8282
"# Generate image data.\n",
8383
"image_data = np.random.rand(num_instances, 32, 32, 3).astype(np.float32)\n",
8484
"# Generate numerical data.\n",
@@ -144,7 +144,8 @@
144144
"model.fit(\n",
145145
" [image_data, numerical_data],\n",
146146
" [regression_target, classification_target],\n",
147-
" epochs=3,\n",
147+
" epochs=1,\n",
148+
" batch_size=3,\n",
148149
")"
149150
]
150151
},
@@ -173,7 +174,8 @@
173174
" [regression_target, classification_target],\n",
174175
" # Split the training data and use the last 15% as validation data.\n",
175176
" validation_split=0.15,\n",
176-
" epochs=2,\n",
177+
" epochs=1,\n",
178+
" batch_size=3,\n",
177179
")"
178180
]
179181
},
@@ -195,7 +197,7 @@
195197
},
196198
"outputs": [],
197199
"source": [
198-
"split = 20\n",
200+
"split = 5\n",
199201
"\n",
200202
"image_val = image_data[split:]\n",
201203
"numerical_val = numerical_data[split:]\n",
@@ -215,7 +217,8 @@
215217
" [image_val, numerical_val],\n",
216218
" [regression_val, classification_val],\n",
217219
" ),\n",
218-
" epochs=2,\n",
220+
" epochs=1,\n",
221+
" batch_size=3,\n",
219222
")"
220223
]
221224
},
@@ -261,7 +264,7 @@
261264
"output_node1 = ak.Merge()([output_node1, output_node2])\n",
262265
"\n",
263266
"input_node2 = ak.Input()\n",
264-
"output_node2 = ak.DenseBlock()(output_node)\n",
267+
"output_node2 = ak.DenseBlock()(input_node2)\n",
265268
"\n",
266269
"output_node = ak.Merge()([output_node1, output_node2])\n",
267270
"output_node1 = ak.ClassificationHead()(output_node)\n",
@@ -282,8 +285,8 @@
282285
"auto_model.fit(\n",
283286
" [image_data, numerical_data],\n",
284287
" [classification_target, regression_target],\n",
285-
" batch_size=32,\n",
286-
" epochs=3,\n",
288+
" batch_size=3,\n",
289+
" epochs=1,\n",
287290
")"
288291
]
289292
},

docs/ipynb/text_classification.ipynb

Lines changed: 17 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -66,10 +66,10 @@
6666
" os.path.join(IMDB_DATADIR, \"test\"), shuffle=False, categories=classes\n",
6767
")\n",
6868
"\n",
69-
"x_train = np.array(train_data.data)\n",
70-
"y_train = np.array(train_data.target)\n",
71-
"x_test = np.array(test_data.data)\n",
72-
"y_test = np.array(test_data.target)\n",
69+
"x_train = np.array(train_data.data)[:100]\n",
70+
"y_train = np.array(train_data.target)[:100]\n",
71+
"x_test = np.array(test_data.data)[:100]\n",
72+
"y_test = np.array(test_data.target)[:100]\n",
7373
"\n",
7474
"print(x_train.shape) # (25000,)\n",
7575
"print(y_train.shape) # (25000, 1)\n",
@@ -100,7 +100,7 @@
100100
" overwrite=True, max_trials=1\n",
101101
") # It only tries 1 model as a quick demo.\n",
102102
"# Feed the text classifier with training data.\n",
103-
"clf.fit(x_train, y_train, epochs=2)\n",
103+
"clf.fit(x_train, y_train, epochs=1, batch_size=2)\n",
104104
"# Predict with the best model.\n",
105105
"predicted_y = clf.predict(x_test)\n",
106106
"# Evaluate the best model with testing data.\n",
@@ -132,6 +132,8 @@
132132
" y_train,\n",
133133
" # Split the training data and use the last 15% as validation data.\n",
134134
" validation_split=0.15,\n",
135+
" epochs=1,\n",
136+
" batch_size=2,\n",
135137
")"
136138
]
137139
},
@@ -153,17 +155,18 @@
153155
},
154156
"outputs": [],
155157
"source": [
156-
"split = 5000\n",
158+
"split = 5\n",
157159
"x_val = x_train[split:]\n",
158160
"y_val = y_train[split:]\n",
159161
"x_train = x_train[:split]\n",
160162
"y_train = y_train[:split]\n",
161163
"clf.fit(\n",
162164
" x_train,\n",
163165
" y_train,\n",
164-
" epochs=2,\n",
166+
" epochs=1,\n",
165167
" # Use your own validation set.\n",
166168
" validation_data=(x_val, y_val),\n",
169+
" batch_size=2,\n",
167170
")"
168171
]
169172
},
@@ -196,7 +199,7 @@
196199
"clf = ak.AutoModel(\n",
197200
" inputs=input_node, outputs=output_node, overwrite=True, max_trials=1\n",
198201
")\n",
199-
"clf.fit(x_train, y_train, epochs=2)"
202+
"clf.fit(x_train, y_train, epochs=1, batch_size=2)"
200203
]
201204
},
202205
{
@@ -226,17 +229,17 @@
226229
"outputs": [],
227230
"source": [
228231
"train_set = tf.data.Dataset.from_tensor_slices(((x_train,), (y_train,))).batch(\n",
229-
" 32\n",
232+
" 2\n",
230233
")\n",
231-
"test_set = tf.data.Dataset.from_tensor_slices(((x_test,), (y_test,))).batch(32)\n",
234+
"test_set = tf.data.Dataset.from_tensor_slices(((x_test,), (y_test,))).batch(2)\n",
232235
"\n",
233-
"clf = ak.TextClassifier(overwrite=True, max_trials=2)\n",
236+
"clf = ak.TextClassifier(overwrite=True, max_trials=1)\n",
234237
"# Feed the tensorflow Dataset to the classifier.\n",
235-
"clf.fit(train_set, epochs=2)\n",
238+
"clf.fit(train_set.take(2), epochs=1)\n",
236239
"# Predict with the best model.\n",
237-
"predicted_y = clf.predict(test_set)\n",
240+
"predicted_y = clf.predict(test_set.take(2))\n",
238241
"# Evaluate the best model with testing data.\n",
239-
"print(clf.evaluate(test_set))"
242+
"print(clf.evaluate(test_set.take(2)))"
240243
]
241244
},
242245
{

0 commit comments

Comments
 (0)