RuntimeError: Weave is not available on the server. Please contact support.

I am using wandb weave in my organisation. I am trying to fetch metrics using python from weave.

I am running this code from jupyter noteboook.

import os

os.environ["WANDB_BASE_URL"] = "https://company_name.wandb.io"
os.environ["WANDB_API_KEY"] = "*************************"  
os.environ["WANDB_SSL_VERIFY"] = "false"   

mport weave
import wandb

wandb.login(verify=False)  
weave.init("WEAVE_PROJECT_ID")

The error that I am getting is

wandb: Network error (ConnectionError), entering retry loop.
weave: retry_attempt
weave: retry_attempt
weave: retry_failed
weave: Unexpected error when checking if Weave is available on the server.  Please contact support.
---------------------------------------------------------------------------
RuntimeError                              Traceback (most recent call last)
Cell In[7], line 5
      2 import wandb
      4 wandb.login(verify=False)  
----> 5 weave.init("WEAVE_PROJECT_ID")

File ~/Library/Python/3.12/lib/python/site-packages/weave/trace/api.py:109, in init(project_name, settings, autopatch_settings, global_postprocess_inputs, global_postprocess_output, global_attributes)
    106 if should_disable_weave():
    107     return weave_init.init_weave_disabled()
--> 109 return weave_init.init_weave(
    110     project_name,
    111 )

File ~/Library/Python/3.12/lib/python/site-packages/weave/trace/weave_init.py:144, in init_weave(project_name, ensure_project_exists)
    142 remote_server = init_weave_get_server(api_key)
    143 if not _weave_is_available(remote_server):
--> 144     raise RuntimeError(
    145         "Weave is not available on the server.  Please contact support."
    146     )
    147 server: TraceServerClientInterface = remote_server
    148 if use_server_cache():

RuntimeError: Weave is not available on the server.  Please contact support.

this is the url where my weave metrics are seen

https://company_name.wandb.io/WEAVE_PROJECT_ID/weave/traces?view=traces_default

then I want to fetch metics like this:

from weave import Client
import pandas as pd

client = Client()

PROJECT_ID = "gsc-genai-weave-workspace/AIGA-2"

calls = client.calls(
    project_id=PROJECT_ID,
    limit=10_000   # adjust if needed
)

rows = []

for call in calls:
    row = {
        # identity
        "trace_id": call.trace_id,
        "call_id": call.id,
        "op_name": call.op_name,
        "object": call.attributes.get("object"),

        # model + provider
        "model": call.attributes.get("model"),
        "provider": call.attributes.get("provider"),
        "system_fingerprint": call.attributes.get("system_fingerprint"),

        # timing
        "start_time": call.started_at,
        "end_time": call.ended_at,
        "latency_ms": call.timing.get("latency_ms"),

        # token metrics
        "prompt_tokens": call.summary.get("prompt_tokens"),
        "completion_tokens": call.summary.get("completion_tokens"),
        "total_tokens": call.summary.get("total_tokens"),
        "reasoning_tokens": call.summary.get("reasoning_tokens"),
        "audio_tokens": call.summary.get("audio_tokens"),

        # safety / routing
        "accepted_prediction": call.summary.get("accepted_prediction"),
        "rejected_prediction": call.summary.get("rejected_prediction"),

        # LLM-specific
        "time_to_first_token": call.summary.get("time_to_first_token"),

        # status
        "error": call.error,
    }

    rows.append(row)

df = pd.DataFrame(rows)