i am using following code to extract a data but getting and all identifiers are invalid
Step 2: send an on demand extraction request using the received token
if not token:
print("Authentication token is missing. Cannot proceed with extraction.")
else:
requestUrl='https://selectapi.datascope.refinitiv.com/RestApi/v1/Extractions/ExtractRaw'
requestHeaders={ "Prefer":"respond-async", "Content-Type":"application/json", "Authorization": "token " + token } # *** USING VALID TEST RIC: MCGBc1 (FUTURES) *** # REPLACE THIS SECTION WITH YOUR 42 OPTIONS RICS WHEN YOU HAVE THE VALID FORMAT instrument_identifiers = [ { "Identifier": "MCGB6150U5", "IdentifierType": "Ric" } ] requestBody={ "ExtractionRequest": { "@odata .type": "#DataScope.Select.Api.Extractions.ExtractionRequests.TickHistoryIntradaySummariesExtractionRequest", # --- SIMPLIFIED FIELDS: Open, High, Low, Last (Close) --- "ContentFieldNames": [ "Open", "High", "Low", "Last" ], "IdentifierList": { "@odata .type": "#DataScope.Select.Api.Extractions.ExtractionRequests.InstrumentIdentifierList", "InstrumentIdentifiers": instrument_identifiers, "UseUserPreferencesForValidationOptions":"false" }, "Condition": { "MessageTimeStampIn": "GmtUtc", "ReportDateRangeType": "Range", "QueryStartDate": "2025-09-01T00:00:00.000Z", "QueryEndDate": "2025-09-30T23:59:59.000Z", "SummaryInterval": "OneMinute", "TimebarPersistence": "true", "DisplaySourceRIC": "true" } } } try: r2 = requests.post(requestUrl, json=requestBody, headers=requestHeaders, verify=False) global status_code status_code = r2.status_code print (f"HTTP status of the initial response: {status_code}") if status_code == 200 or status_code == 202: # --- GLOBAL DECLARATIONS MOVED TO TOP OF BLOCK --- global jobId global notes # Not strictly required if only assigned, but good practice # Robust JSON handling for 202 responses if r2.text.strip(): try: r2Json = json.loads(r2.text.encode('ascii', 'ignore')) jobId = r2Json.get("JobId") notes = r2Json.get("Notes", ["No extraction notes available."]) print (f'\nJobId: {jobId}') print (f'Extraction notes:\n{notes[0]}') except json.JSONDecodeError: print("\nWARNING: 202 Response body was not valid JSON or was empty.") if 'location' in r2.headers: location_url = r2.headers["location"] # Extract JobId from Location header URL jobId = location_url.split("'")[-2] print(f"Successfully retrieved JobId from Location header: {jobId}") else: print("CRITICAL ERROR: Could not find JobId in response body or Location header.") else: print("\nResponse body was empty. Attempting to retrieve JobId from Location header.") if 'location' in r2.headers: location_url = r2.headers["location"] jobId = location_url.split("'")[-2] print(f"Successfully retrieved JobId from Location header: {jobId}") else: print("CRITICAL ERROR: Could not find JobId in Location header.") else: print (f'Extraction submission failed. Status: {status_code}') print ('Response content:', r2.text) except requests.exceptions.RequestException as e: print(f"An error occurred during extraction submission: {e}")
Tagged:
0
Answers
-
Thank you for reaching out to us.
You need to add the AllowHistoricalInstruments into the request message.
"ValidationOptions": {
"AllowHistoricalInstruments": true
}The request message is:
{
"ExtractionRequest": {
"@odata.type": "#DataScope.Select.Api.Extractions.ExtractionRequests.TickHistoryIntradaySummariesExtractionRequest",
"ContentFieldNames": [
"Open",
"High",
"Low",
"Last"
],
"IdentifierList": {
"@odata.type": "#DataScope.Select.Api.Extractions.ExtractionRequests.InstrumentIdentifierList",
"InstrumentIdentifiers": [
{
"Identifier": "MCGB6150U5",
"IdentifierType": "Ric"
}
],
"ValidationOptions": {
"AllowHistoricalInstruments": true
}
},
"Condition": {
"MessageTimeStampIn": "GmtUtc",
"ReportDateRangeType": "Range",
"QueryStartDate": "2025-09-01T00:00:00.000Z",
"QueryEndDate": "2025-09-30T23:59:59.000Z",
"SummaryInterval": "OneMinute",
"TimebarPersistence": "true",
"DisplaySourceRIC": "true"
}
}
}0
Categories
- All Categories
- 3 Polls
- 6 AHS
- 37 Alpha
- 167 App Studio
- 6 Block Chain
- 4 Bot Platform
- 18 Connected Risk APIs
- 47 Data Fusion
- 34 Data Model Discovery
- 713 Datastream
- 1.5K DSS
- 639 Eikon COM
- 5.3K Eikon Data APIs
- 20 Electronic Trading
- 1 Generic FIX
- 7 Local Bank Node API
- 12 Trading API
- 3K Elektron
- 1.5K EMA
- 260 ETA
- 574 WebSocket API
- 42 FX Venues
- 16 FX Market Data
- 2 FX Post Trade
- 1 FX Trading - Matching
- 12 FX Trading – RFQ Maker
- 5 Intelligent Tagging
- 2 Legal One
- 26 Messenger Bot
- 5 Messenger Side by Side
- 9 ONESOURCE
- 7 Indirect Tax
- 60 Open Calais
- 285 Open PermID
- 47 Entity Search
- 2 Org ID
- 1 PAM
- PAM - Logging
- 6 Product Insight
- Project Tracking
- ProView
- ProView Internal
- 25 RDMS
- 2.3K Refinitiv Data Platform
- 20 CFS Bulk File/TM3
- 939 Refinitiv Data Platform Libraries
- 6 LSEG Due Diligence
- 1 LSEG Due Diligence Portal API
- 4 Refinitiv Due Dilligence Centre
- Rose's Space
- 1.2K Screening
- 18 Qual-ID API
- 12 World-Check Customer Risk Screener
- World-Check On Demand
- 1K World-Check One
- 46 World-Check One Zero Footprint
- 46 Side by Side Integration API
- 2 Test Space
- 3 Thomson One Smart
- 10 TR Knowledge Graph
- 151 Transactions
- 143 REDI API
- 1.8K TREP APIs
- 4 CAT
- 27 DACS Station
- 126 Open DACS
- 1.1K RFA
- 108 UPA
- 197 TREP Infrastructure
- 232 TRKD
- 925 TRTH
- 5 Velocity Analytics
- 9 Wealth Management Web Services
- 107 Workspace SDK
- 11 Element Framework
- 5 Grid
- 19 World-Check Data File
- 1 Yield Book Analytics
- 48 中文论坛