Getting AttributeError: 'NoneType' object has no attribute '_log' while generating model predictions

I have a function to generate predictions from my pytorch model like so,

@torch.inference_mode()
def generate_submission(model, test_loader, name='submission.csv'):
    test_files = listdir(test_dir)
    len(test_files)

    predictions = []
    model.eval()
    for batch_idx, data in enumerate(test_loader):
        for key, value in data.items():
            data[key] = value.to(device)
        model_output = model(data['Image'])
        _, preds = torch.max(model_output['Probabilities'], dim=1)
        #preds.cpu()
        predictions.append(preds)

    final_predictions = torch.cat(predictions, dim=0).to('cpu')
    df = pd.DataFrame({'id':test_files, 'category':final_predictions})
    df.to_csv(name, index = False)
    
    return df

This function works fine without wandb addition in my code, but after adding wandb logging even though that is nowhere in this specific function I get an error.

---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
/tmp/ipykernel_23/3664024534.py in <module>
----> 1 generate_submission(model, test_loader, name='submission-simple.csv')

/opt/conda/lib/python3.7/site-packages/torch/autograd/grad_mode.py in decorate_context(*args, **kwargs)
     25         def decorate_context(*args, **kwargs):
     26             with self.clone():
---> 27                 return func(*args, **kwargs)
     28         return cast(F, decorate_context)
     29 

/tmp/ipykernel_23/459273187.py in generate_submission(model, test_loader, name)
      9         for key, value in data.items():
     10             data[key] = value.to(device)
---> 11         model_output = model(data['Image'])
     12         _, preds = torch.max(model_output['Probabilities'], dim=1)
     13         #preds.cpu()

/opt/conda/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
   1209         if _global_forward_hooks or self._forward_hooks:
   1210             for hook in (*_global_forward_hooks.values(), *self._forward_hooks.values()):
-> 1211                 hook_result = hook(self, input, result)
   1212                 if hook_result is not None:
   1213                     result = hook_result

/opt/conda/lib/python3.7/site-packages/wandb/wandb_torch.py in <lambda>(mod, inp, outp)
    109             hook = module.register_forward_hook(
    110                 lambda mod, inp, outp: parameter_log_hook(
--> 111                     mod, inp, outp, log_track_params
    112                 )
    113             )

/opt/conda/lib/python3.7/site-packages/wandb/wandb_torch.py in parameter_log_hook(module, input_, output, log_track)
    103                 else:
    104                     data = parameter
--> 105                 self.log_tensor_stats(data.cpu(), "parameters/" + prefix + name)
    106 
    107         log_track_params = log_track_init(log_freq)

/opt/conda/lib/python3.7/site-packages/wandb/wandb_torch.py in log_tensor_stats(self, tensor, name)
    254             bins = torch.Tensor(bins_np)
    255 
--> 256         wandb.run._log(
    257             {name: wandb.Histogram(np_histogram=(tensor.tolist(), bins.tolist()))},
    258             commit=False,

AttributeError: 'NoneType' object has no attribute '_log'

I don’t understand why is wandb.run._log() even being called here? Only in my training loop did I have a wandb.watch() parameter and wandb.log(metrics) and then used wandb.finish() to close it. Why is this being called here?

Hi @aurko21166 , this error indicates that a wandb run was not correctly initialized resulting in a None object. Could you provide an example of how you integrated your logic with wandbs ptl logger?

I think is error is because wandb adds callbacks to the model upon using wandb.watch(). So after I use wandb.finish() I get this error because the callbacks are still there. I used wandb.unwatch() before finish() to fix it.

1 Like

This topic was automatically closed 60 days after the last reply. New replies are no longer allowed.