Yes, this is it exactly. Here is the complete error I am getting, where it includes the hyperparameter optimization part.
RuntimeError Traceback (most recent call last)
Cell In[33], line 4
2 Rec = record()
3 study = optuna.create_study(direction="maximize", sampler=optuna.samplers.TPESampler(), pruner=optuna.pruners.MedianPruner())
----> 4 study.optimize(objective, n_trials=150)
6 best_trial = study.best_trial
8 optuna.visualization.matplotlib.plot_param_importances(study)
File ~\Anaconda\Installation\envs\FYP\lib\site-packages\optuna\study\study.py:400, in Study.optimize(self, func, n_trials, timeout, n_jobs, catch, callbacks, gc_after_trial, show_progress_bar)
392 if n_jobs != 1:
393 warnings.warn(
394 "`n_jobs` argument has been deprecated in v2.7.0. "
395 "This feature will be removed in v4.0.0. "
396 "See https://github.com/optuna/optuna/releases/tag/v2.7.0.",
397 FutureWarning,
398 )
--> 400 _optimize(
401 study=self,
402 func=func,
403 n_trials=n_trials,
404 timeout=timeout,
405 n_jobs=n_jobs,
406 catch=catch,
407 callbacks=callbacks,
408 gc_after_trial=gc_after_trial,
409 show_progress_bar=show_progress_bar,
410 )
File ~\Anaconda\Installation\envs\FYP\lib\site-packages\optuna\study\_optimize.py:66, in _optimize(study, func, n_trials, timeout, n_jobs, catch, callbacks, gc_after_trial, show_progress_bar)
64 try:
65 if n_jobs == 1:
---> 66 _optimize_sequential(
67 study,
68 func,
69 n_trials,
70 timeout,
71 catch,
72 callbacks,
73 gc_after_trial,
74 reseed_sampler_rng=False,
75 time_start=None,
76 progress_bar=progress_bar,
77 )
78 else:
79 if show_progress_bar:
File ~\Anaconda\Installation\envs\FYP\lib\site-packages\optuna\study\_optimize.py:163, in _optimize_sequential(study, func, n_trials, timeout, catch, callbacks, gc_after_trial, reseed_sampler_rng, time_start, progress_bar)
160 break
162 try:
--> 163 trial = _run_trial(study, func, catch)
164 except Exception:
165 raise
File ~\Anaconda\Installation\envs\FYP\lib\site-packages\optuna\study\_optimize.py:264, in _run_trial(study, func, catch)
261 assert False, "Should not reach."
263 if state == TrialState.FAIL and func_err is not None and not isinstance(func_err, catch):
--> 264 raise func_err
265 return trial
File ~\Anaconda\Installation\envs\FYP\lib\site-packages\optuna\study\_optimize.py:213, in _run_trial(study, func, catch)
210 thread.start()
212 try:
--> 213 value_or_values = func(trial)
214 except exceptions.TrialPruned as e:
215 # TODO(mamu): Handle multi-objective cases.
216 state = TrialState.PRUNED
Cell In[30], line 32, in objective(trial)
29 model = ConvNet2D(params)
30 model.to(device)
---> 32 accuracy,metric,Confusion = KFold(params,model,num_epochs,device,trial)
34 Rec.update(accuracy,metric,Confusion)
36 return accuracy
Cell In[29], line 23, in KFold(params, model, num_epochs, device, trial)
21 while epoch<num_epochs and not done:
22 epoch+=1
---> 23 train_loss,train_correct = train_epoch(model,train_loader,criterion,optimizer,device)
24 test_loss,test_correct, Confusion = test_epoch(model,test_loader,criterion,device)
26 train_loss = train_loss/len(train_loader.sampler)
Cell In[23], line 13, in train_epoch(model, dataloader, loss_fn, optimizer, device)
10 features,labels = features.to(device),labels.to(device)
12 #Forward Pass
---> 13 output=model(features)
14 loss=loss_fn(output,labels)
16 #Backward Pass
File ~\Anaconda\Installation\envs\FYP\lib\site-packages\torch\nn\modules\module.py:1480, in Module._call_impl(self, *args, **kwargs)
1475 # If we don't have any hooks, we want to skip the rest of the logic in
1476 # this function, and just call forward.
1477 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1478 or _global_backward_pre_hooks or _global_backward_hooks
1479 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1480 return forward_call(*args, **kwargs)
1481 # Do not call functions when jit is used
1482 full_backward_hooks, non_full_backward_hooks = [], []
Cell In[22], line 53, in ConvNet1D.forward(self, x)
51 x = x.view(x.size(0),-1)
52 # print(x.size(0))
---> 53 x = self.fc(x)
54 return x
File ~\Anaconda\Installation\envs\FYP\lib\site-packages\torch\nn\modules\module.py:1480, in Module._call_impl(self, *args, **kwargs)
1475 # If we don't have any hooks, we want to skip the rest of the logic in
1476 # this function, and just call forward.
1477 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1478 or _global_backward_pre_hooks or _global_backward_hooks
1479 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1480 return forward_call(*args, **kwargs)
1481 # Do not call functions when jit is used
1482 full_backward_hooks, non_full_backward_hooks = [], []
File ~\Anaconda\Installation\envs\FYP\lib\site-packages\torch\nn\modules\container.py:204, in Sequential.forward(self, input)
202 def forward(self, input):
203 for module in self:
--> 204 input = module(input)
205 return input
File ~\Anaconda\Installation\envs\FYP\lib\site-packages\torch\nn\modules\module.py:1480, in Module._call_impl(self, *args, **kwargs)
1475 # If we don't have any hooks, we want to skip the rest of the logic in
1476 # this function, and just call forward.
1477 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1478 or _global_backward_pre_hooks or _global_backward_hooks
1479 or _global_forward_hooks or _global_forward_pre_hooks):
-> 1480 return forward_call(*args, **kwargs)
1481 # Do not call functions when jit is used
1482 full_backward_hooks, non_full_backward_hooks = [], []
File ~\Anaconda\Installation\envs\FYP\lib\site-packages\torch\nn\modules\linear.py:114, in Linear.forward(self, input)
113 def forward(self, input: Tensor) -> Tensor:
--> 114 return F.linear(input, self.weight, self.bias)
RuntimeError: mat1 and mat2 shapes cannot be multiplied (9x14 and 126x4)