I see a row of data like this:
{'#RIC': 'Y10YJ25', 'Domain': 'Market Price', 'Date-Time': '2025-04-03T00:02:59.980929684Z', 'GMT Offset': '-5', 'Type': 'Trade', 'Ex/Cntrb.ID': '', 'Price': '4.037', 'Volume': '6', 'Market VWAP': '4.05004', 'Bid Price': '4.035', 'Bid Size': '3', 'Ask Price': '4.037', 'Ask Size': '18', 'Qualifiers': 'v[ACT_TP_1];2[LSTSALCOND];"BID "[AGGRS_SID1]', 'Exch Time': '00:02:59.967000000', 'Block Trd': '', 'Open': '4.11', 'High': '4.11', 'Low': '4.019', 'Acc. Volume': '383', 'Turnover': '', 'Total Volume': '', 'Original Volume': '', 'Display Name': '', 'Odd-Lot Trade Price': '', 'Odd-Lot Trade Volume': '', 'Unique Trade Identification': '71696652', 'Aggressive Order Condition': 'BID ', 'Categorisation of Trades': ''}
What does the GMT Offset supposed to do here? am I supposed to -5 hours from the date-Time and Exch Time? Or do nothing? What is the difference between the two times?
This is my api call:
def get_time_and_sales(rics, query_start_date, query_end_date, max_retries=10, retry_delay=20, identifier_type: str = "Ric"):
"""
Retrieves time and sales data for given RICs from Refinitiv.
Docs: https://developers.lseg.com/en/article-catalog/article/using-tick-history-in-r-language-part-3
This method fetches historical time and sales data which includes trade information
such as price, volume, and timestamp for each trade.
Args:
rics (str or list[str]): The RIC identifier(s) for the instrument(s) (e.g., "AAPL.O" or ["AAPL.O", "MSFT.O"])
query_start_date (str): Start date in ISO format (e.g., "2024-03-01T09:30:00Z")
query_end_date (str): End date in ISO format (e.g., "2024-03-01T16:00:00Z")
max_retries (int): Maximum number of polling retries (default: 10)
retry_delay (int): Delay between polling attempts in seconds (default: 5)
Returns:
requests.Response: A streaming response containing the time and sales data in CSV format
Example:
response = get_time_and_sales("AAPL.O", "2024-03-01T09:30:00Z", "2024-03-01T16:00:00Z")
response = get_time_and_sales(["AAPL.O", "MSFT.O"], "2024-03-01T09:30:00Z", "2024-03-01T16:00:00Z")
# Process the response as needed
"""
# Handle both single RIC and list of RICs
if isinstance(rics, str):
rics = [rics]
logger.info(f"Fetching time and sales data for {rics} from {query_start_date} to {query_end_date}...")
json_blob = {
"ExtractionRequest": {
"@odata.type": "#DataScope.Select.Api.Extractions.ExtractionRequests.TickHistoryTimeAndSalesExtractionRequest",
"ContentFieldNames": [
"Trade - Price",
"Trade - Volume",
"Trade - Exchange Time",
"Trade - Exchange/Contributor ID",
"Trade - Bid Price",
"Trade - Ask Price",
"Trade - Bid Size",
"Trade - Ask Size",
"Trade - Odd-Lot Trade Price",
"Trade - Odd-Lot Trade Volume",
"Trade - Categorization of Trades",
"Trade - Display Name",
"Trade - Unique Trade Identification",
"Trade - Qualifiers",
"Trade - Block Trade",
"Trade - Original Volume",
"Trade - Aggressive Order Condition",
"Trade - Accumulated Volume",
"Trade - Total Volume",
"Trade - Turnover",
"Trade - Market VWAP",
"Trade - High",
"Trade - Low",
"Trade - Open",
],
"IdentifierList": {
"@odata.type": "#DataScope.Select.Api.Extractions.ExtractionRequests.InstrumentIdentifierList",
"InstrumentIdentifiers": [
{
"Identifier": ric,
"IdentifierType": identifier_type
} for ric in rics
],
"ValidationOptions": {"AllowHistoricalInstruments": "true"},
"UseUserPreferencesForValidationOptions": "false"
},
"Condition": {
"MessageTimeStampIn": "GmtUtc",
"ReportDateRangeType": "Range",
"QueryStartDate": query_start_date,
"QueryEndDate": query_end_date
},
}
}
post_url = URL_BASE + "/Extractions/ExtractRaw"
# Make the request directly instead of using post_request_with_auth to include Content-Type header
request_response = requests.post(
post_url,
headers={
"Authorization": f"Token {get_cached_refinitiv_auth_token()}",
"Content-Type": "application/json",
"Prefer": "respond-async"
},
json=json_blob
)
if request_response.status_code == 202:
# Request accepted but processing asynchronously
monitor_url = request_response.headers.get('location') or request_response.headers.get('Location')
logger.info(f"Request accepted but processing asynchronously. Monitor URL: {monitor_url}")
# Poll the monitor URL until the job is complete
job_id = poll_for_completion(monitor_url, max_retries, retry_delay)
elif request_response.status_code == 200:
# Request completed synchronously
job_id = request_response.json()["JobId"]
logger.info(f"Time and sales data extraction job completed with ID: {job_id}")
else:
# Error occurred
logger.error(f"Error response: {request_response.status_code} - {request_response.text}")
request_response.raise_for_status()
return None
# Retrieve the data using the job ID
data_response = requests.get(
URL_BASE + f"/Extractions/RawExtractionResults('{job_id}')/$value",
headers={
"Authorization": f"Token {get_cached_refinitiv_auth_token()}",
"Prefer": "respond-async"
},
stream=True