I keep getting blocked using the API it get a 429 error

Options

Hello team, raising this case on behalf of client XXXXX

rates = [
Rate(5, Duration.SECOND), # 5 calls per second
Rate(10000, Duration.DAY), # 10,000 calls per day
]
limiter = Limiter(rates, clock=TimeClock())

------------------------------------------------------------------Global expiry lookup – populated during discovery so we can build ahuman-readable contract label quickly during data processing.------------------------------------------------------------------

expiry_lookup: dict[str, pd.Timestamp] = {}

Coffee month codes (typical coffee futures trade in Mar, May, Jul, Sep, Dec)

MONTH_MAP = {
"H": "March",
"K": "May",
"N": "July",
"U": "September",
"Z": "December",
}

------------------------------------------------------------------Helper functions------------------------------------------------------------------

def fetch_coffee_arabica_rics() -> pd.DataFrame:
"""Return a DataFrame of every Coffee Arabica future RIC + expiry."""
df = ld.discovery.search(
view = ld.discovery.Views.SEARCH_ALL,
filter = (
"IsChain eq false and ExpiryDate ne null and "
"((RIC eq 'KC*') and RCSAssetCategoryGenealogy eq 'A:4\A:1K\A:1F' and "
"ExchangeName xeq 'Intercontinental Exchange US')"
),
select = "RIC,ExpiryDate",
order_by = "ExpiryDate desc",
top = 1000,
)

if df.empty:
raise RuntimeError("Discovery search returned no Coffee Arabica futures (RIC KC*)")

df["ExpiryDate"] = pd.to_datetime(df["ExpiryDate"])
return df

def ric_to_contract(ric: str) -> str:
"""Convert a RIC to a label like 'March 2025'."""
expiry = expiry_lookup.get(ric)
if pd.isna(expiry):
return "Unknown"
return expiry.strftime("%B %Y")

def filter_full_session_rics(ric_df: pd.DataFrame) -> list[str]:
"""Prefer full-session RICs over duplicates (night/day)."""
selected = []
for expiry in ric_df["ExpiryDate"].unique():
group = ric_df[ric_df["ExpiryDate"] == expiry]

For coffee, prefer RICs without prefixes (full session)

full = group[
~group["RIC"].str.startswith("1") &
~group["RIC"].str.startswith("2") &
~group["RIC"].str.startswith("D") &
~group["RIC"].str.contains("^") # Sometimes coffee has ^ in RIC
]
if not full.empty:
selected.append(full.iloc[0]["RIC"])
continue

Fallback to any available RIC for that expiry

selected.append(group.iloc[0]["RIC"])
return selected

------------------------------------------------------------------Robust history fetch with retry / back-off------------------------------------------------------------------

def fetch_history_with_retries(
rics: list[str],
fields: list[str],
start: str,
end: str,
interval: str = "daily",
max_retries: int = 5,
base_delay: int = 5,
):
"""Call ld.get_history with exponential back-off.

This helper significantly improves resilience to transient network
errors and rate-limiting (HTTP 429 / connection resets). It will:
• retry up to max_retries times
• sleep for base_delay · 2^attempt seconds between tries
• transparently reopen the LSEG session if needed

Returns a DataFrame on success, or None after exhausting retries.
"""
attempt = 0
while attempt < max_retries:
try:

Rate-limited wrapper around LSEG history API

try:
limiter.try_acquire("api_call")
except BucketFullException as exc:
sleep_time = exc.meta_info.get("remaining_time", 1000) / 1000
print(f"Rate limit exceeded. Sleeping for {sleep_time:.2f} seconds.")
time.sleep(sleep_time)
limiter.try_acquire("api_call")

df = ld.get_history(
universe=rics,
fields=fields,
start=start,
end=end,
interval=interval,
)

if df is not None and not df.empty:
return df

Empty result is treated the same as an error – likely blocked

raise RuntimeError("Empty response from API")

except Exception as exc:
print(
f"⚠️ Attempt {attempt + 1}/{max_retries} failed for batch starting {(rics[0] if isinstance(rics, (list, tuple)) and rics else str(rics))}: {exc}"
)

Back-off then retry

sleep_secs = base_delay * (2 ** attempt)
print(f" ⏳ Waiting {sleep_secs} s before retry …")
time.sleep(sleep_secs)

Try to refresh session – dropped connections are common

try:
ld.close_session()
except Exception:
pass
try:
ld.open_session()
except Exception as sess_exc:
print(f" ⚠️ Could not reopen LSEG session: {sess_exc}")

attempt += 1

All retries exhausted

print(
f"❌ Giving up on batch starting {(rics[0] if isinstance(rics, (list, tuple)) and rics else str(rics))} after {max_retries} retries – data may be incomplete"
)
return None

------------------------------------------------------------------Adaptive fetch – splits RIC list on failure to avoid API throttling------------------------------------------------------------------

def adaptive_fetch_history(
rics: list[str],
fields: list[str],
start: str,
end: str,
interval: str = "daily",
min_split_size: int = 25,
) -> list[pd.DataFrame]:
"""Download history, splitting the RIC universe recursively on failure.

The function attempts to retrieve rics in a single request. If it fails
(after all retries in fetch_history_with_retries), the list is split in
half and ea

I want someone from your team to edit this code so I stop getting api errors

I am finding it almost impossible to use your product and I want a refund to my company for the subscription

Answers

  • Jirapongse
    Jirapongse ✭✭✭✭✭

    @John.Cajayon

    Thank you for reaching out to us.

    The client may split items to multiple batched, such as 20 items per batch.

    For example:

    import pandas as pd
    import time
    pd.set_option('future.no_silent_downcasting', True)
    
    df = ld.discovery.search(
        view = ld.discovery.Views.SEARCH_ALL,
        filter = (
        "IsChain eq false and ExpiryDate ne null and "
        "((RIC eq 'KC*') and RCSAssetCategoryGenealogy eq 'A:4\A:1K\A:1F' and "
        "ExchangeName xeq 'Intercontinental Exchange US')"
        ),
        select = "RIC,ExpiryDate",
        order_by = "ExpiryDate desc",
        top = 1000,
        )
    
    def split_into_batches(data_list, batch_size):
        batches = []
        for i in range(0, len(data_list), batch_size):
            batches.append(data_list[i:i + batch_size])
        return batches
    
    batches = split_into_batches(df['RIC'].tolist(), 20)
    df_list = []
    for batch in batches:
        df = ld.get_history(
            universe = batch,
            interval = 'daily')
        df_list.append(df)
        time.sleep(1)
    
    df_list
    

    Moreover, please contact the server team to verify why the client got the 429 code.