@@ -150,7 +150,7 @@ def train_model(model, criterion, optimizer, scheduler, num_epochs=25):
150
150
best_acc = 0.0
151
151
152
152
for epoch in range (num_epochs ):
153
- print ('Epoch {}/{}' . format ( epoch , num_epochs - 1 ) )
153
+ print (f 'Epoch { epoch } /{ num_epochs - 1 } ' )
154
154
print ('-' * 10 )
155
155
156
156
# Each epoch has a training and validation phase
@@ -192,8 +192,7 @@ def train_model(model, criterion, optimizer, scheduler, num_epochs=25):
192
192
epoch_loss = running_loss / dataset_sizes [phase ]
193
193
epoch_acc = running_corrects .double () / dataset_sizes [phase ]
194
194
195
- print ('{} Loss: {:.4f} Acc: {:.4f}' .format (
196
- phase , epoch_loss , epoch_acc ))
195
+ print (f'{ phase } Loss: { epoch_loss :.4f} Acc: { epoch_acc :.4f} ' )
197
196
198
197
# deep copy the model
199
198
if phase == 'val' and epoch_acc > best_acc :
@@ -203,9 +202,8 @@ def train_model(model, criterion, optimizer, scheduler, num_epochs=25):
203
202
print ()
204
203
205
204
time_elapsed = time .time () - since
206
- print ('Training complete in {:.0f}m {:.0f}s' .format (
207
- time_elapsed // 60 , time_elapsed % 60 ))
208
- print ('Best val Acc: {:4f}' .format (best_acc ))
205
+ print (f'Training complete in { time_elapsed // 60 :.0f} m { time_elapsed % 60 :.0f} s' )
206
+ print (f'Best val Acc: { best_acc :4f} ' )
209
207
210
208
# load best model weights
211
209
model .load_state_dict (best_model_wts )
@@ -237,7 +235,7 @@ def visualize_model(model, num_images=6):
237
235
images_so_far += 1
238
236
ax = plt .subplot (num_images // 2 , 2 , images_so_far )
239
237
ax .axis ('off' )
240
- ax .set_title ('predicted: {}' . format ( class_names [preds [j ]]) )
238
+ ax .set_title (f 'predicted: { class_names [preds [j ]]} ' )
241
239
imshow (inputs .cpu ().data [j ])
242
240
243
241
if images_so_far == num_images :
0 commit comments