I keep getting blocked using the API it get a 429 error

Hello team, raising this case on behalf of client XXXXX
rates = [
Rate(5, Duration.SECOND), # 5 calls per second
Rate(10000, Duration.DAY), # 10,000 calls per day
]
limiter = Limiter(rates, clock=TimeClock())
expiry_lookup: dict[str, pd.Timestamp] = {}
Coffee month codes (typical coffee futures trade in Mar, May, Jul, Sep, Dec)MONTH_MAP = {
"H": "March",
"K": "May",
"N": "July",
"U": "September",
"Z": "December",
}
def fetch_coffee_arabica_rics() -> pd.DataFrame:
"""Return a DataFrame of every Coffee Arabica future RIC + expiry."""
df = ld.discovery.search(
view = ld.discovery.Views.SEARCH_ALL,
filter = (
"IsChain eq false and ExpiryDate ne null and "
"((RIC eq 'KC*') and RCSAssetCategoryGenealogy eq 'A:4\A:1K\A:1F' and "
"ExchangeName xeq 'Intercontinental Exchange US')"
),
select = "RIC,ExpiryDate",
order_by = "ExpiryDate desc",
top = 1000,
)
if df.empty:
raise RuntimeError("Discovery search returned no Coffee Arabica futures (RIC KC*)")
df["ExpiryDate"] = pd.to_datetime(df["ExpiryDate"])
return df
def ric_to_contract(ric: str) -> str:
"""Convert a RIC to a label like 'March 2025'."""
expiry = expiry_lookup.get(ric)
if pd.isna(expiry):
return "Unknown"
return expiry.strftime("%B %Y")
def filter_full_session_rics(ric_df: pd.DataFrame) -> list[str]:
"""Prefer full-session RICs over duplicates (night/day)."""
selected = []
for expiry in ric_df["ExpiryDate"].unique():
group = ric_df[ric_df["ExpiryDate"] == expiry]
full = group[
~group["RIC"].str.startswith("1") &
~group["RIC"].str.startswith("2") &
~group["RIC"].str.startswith("D") &
~group["RIC"].str.contains("^") # Sometimes coffee has ^ in RIC
]
if not full.empty:
selected.append(full.iloc[0]["RIC"])
continue
selected.append(group.iloc[0]["RIC"])
return selected
def fetch_history_with_retries(
rics: list[str],
fields: list[str],
start: str,
end: str,
interval: str = "daily",
max_retries: int = 5,
base_delay: int = 5,
):
"""Call ld.get_history with exponential back-off.
This helper significantly improves resilience to transient network
errors and rate-limiting (HTTP 429 / connection resets). It will:
• retry up to max_retries times
• sleep for base_delay · 2^attempt seconds between tries
• transparently reopen the LSEG session if needed
Returns a DataFrame on success, or None after exhausting retries.
"""
attempt = 0
while attempt < max_retries:
try:
try:
limiter.try_acquire("api_call")
except BucketFullException as exc:
sleep_time = exc.meta_info.get("remaining_time", 1000) / 1000
print(f"Rate limit exceeded. Sleeping for {sleep_time:.2f} seconds.")
time.sleep(sleep_time)
limiter.try_acquire("api_call")
df = ld.get_history(
universe=rics,
fields=fields,
start=start,
end=end,
interval=interval,
)
if df is not None and not df.empty:
return df
raise RuntimeError("Empty response from API")
except Exception as exc:
print(
f"⚠️ Attempt {attempt + 1}/{max_retries} failed for batch starting {(rics[0] if isinstance(rics, (list, tuple)) and rics else str(rics))}: {exc}"
)
sleep_secs = base_delay * (2 ** attempt)
print(f" ⏳ Waiting {sleep_secs} s before retry …")
time.sleep(sleep_secs)
try:
ld.close_session()
except Exception:
pass
try:
ld.open_session()
except Exception as sess_exc:
print(f" ⚠️ Could not reopen LSEG session: {sess_exc}")
attempt += 1
All retries exhaustedprint(
f"❌ Giving up on batch starting {(rics[0] if isinstance(rics, (list, tuple)) and rics else str(rics))} after {max_retries} retries – data may be incomplete"
)
return None
def adaptive_fetch_history(
rics: list[str],
fields: list[str],
start: str,
end: str,
interval: str = "daily",
min_split_size: int = 25,
) -> list[pd.DataFrame]:
"""Download history, splitting the RIC universe recursively on failure.
The function attempts to retrieve rics in a single request. If it fails
(after all retries in fetch_history_with_retries
), the list is split in
half and ea
I want someone from your team to edit this code so I stop getting api errors
I am finding it almost impossible to use your product and I want a refund to my company for the subscription
Answers
-
Thank you for reaching out to us.
The client may split items to multiple batched, such as 20 items per batch.
For example:
import pandas as pd import time pd.set_option('future.no_silent_downcasting', True) df = ld.discovery.search( view = ld.discovery.Views.SEARCH_ALL, filter = ( "IsChain eq false and ExpiryDate ne null and " "((RIC eq 'KC*') and RCSAssetCategoryGenealogy eq 'A:4\A:1K\A:1F' and " "ExchangeName xeq 'Intercontinental Exchange US')" ), select = "RIC,ExpiryDate", order_by = "ExpiryDate desc", top = 1000, ) def split_into_batches(data_list, batch_size): batches = [] for i in range(0, len(data_list), batch_size): batches.append(data_list[i:i + batch_size]) return batches batches = split_into_batches(df['RIC'].tolist(), 20) df_list = [] for batch in batches: df = ld.get_history( universe = batch, interval = 'daily') df_list.append(df) time.sleep(1) df_list
Moreover, please contact the server team to verify why the client got the 429 code.
1
Categories
- All Categories
- 3 Polls
- 6 AHS
- 37 Alpha
- 167 App Studio
- 6 Block Chain
- 4 Bot Platform
- 18 Connected Risk APIs
- 47 Data Fusion
- 34 Data Model Discovery
- 704 Datastream
- 1.5K DSS
- 633 Eikon COM
- 5.2K Eikon Data APIs
- 14 Electronic Trading
- 1 Generic FIX
- 7 Local Bank Node API
- 6 Trading API
- 2.9K Elektron
- 1.5K EMA
- 257 ETA
- 566 WebSocket API
- 40 FX Venues
- 16 FX Market Data
- 1 FX Post Trade
- 1 FX Trading - Matching
- 12 FX Trading – RFQ Maker
- 5 Intelligent Tagging
- 2 Legal One
- 25 Messenger Bot
- 3 Messenger Side by Side
- 9 ONESOURCE
- 7 Indirect Tax
- 60 Open Calais
- 283 Open PermID
- 47 Entity Search
- 2 Org ID
- 1 PAM
- PAM - Logging
- 6 Product Insight
- Project Tracking
- ProView
- ProView Internal
- 24 RDMS
- 2.1K Refinitiv Data Platform
- 832 Refinitiv Data Platform Libraries
- 5 LSEG Due Diligence
- 1 LSEG Due Diligence Portal API
- 4 Refinitiv Due Dilligence Centre
- Rose's Space
- 1.2K Screening
- 18 Qual-ID API
- 13 Screening Deployed
- 23 Screening Online
- 12 World-Check Customer Risk Screener
- 1K World-Check One
- 46 World-Check One Zero Footprint
- 45 Side by Side Integration API
- 2 Test Space
- 3 Thomson One Smart
- 10 TR Knowledge Graph
- 151 Transactions
- 143 REDI API
- 1.8K TREP APIs
- 4 CAT
- 27 DACS Station
- 122 Open DACS
- 1.1K RFA
- 107 UPA
- 195 TREP Infrastructure
- 232 TRKD
- 918 TRTH
- 5 Velocity Analytics
- 9 Wealth Management Web Services
- 100 Workspace SDK
- 11 Element Framework
- 5 Grid
- 19 World-Check Data File
- 1 Yield Book Analytics
- 48 中文论坛