import requests
import json
import shutil
import time


class Credentials(object):
  requestUrl = "https://hosted.datascopeapi.reuters.com/RestApi/v1/Authentication/RequestToken"

  requestHeaders={
      "Prefer":"respond-async",
      "Content-Type":"application/json"
      }

  requestBody={
        "Credentials": {
        "Username": "myUserName",
        "Password": "myPassword"
      }
    }

  def __init__(self, uname,pwd):
    self.requestBody['Credentials']['Username'] = uname
    self.requestBody['Credentials']['Password'] = pwd
    
usr = '???'
pwd = '???!'
c = Credentials (usr,pwd)

filePath = "C:/"
fileNameRoot = "Python_Test3"

r1 = requests.post(c.requestUrl, json=c.requestBody,headers=c.requestHeaders)

if r1.status_code == 200 :
    jsonResponse = json.loads(r1.text.encode('ascii', 'ignore'))
    token = jsonResponse["value"]
    print ('Authentication token (valid 24 hours):')
    print (token)
else:
    print ('Please replace myUserName and myPassword with valid credentials, then repeat the request')

requestUrl='https://hosted.datascopeapi.reuters.com/RestApi/v1/Extractions/ExtractRaw'
requestHeaders={
    "Prefer":"respond-async",
    "Content-Type":"application/json",
    "Authorization": "token " + token
}

requestBody={
  "ExtractionRequest": {
    "@odata.type": "#ThomsonReuters.Dss.Api.Extractions.ExtractionRequests.TickHistoryTimeAndSalesExtractionRequest",
    "ContentFieldNames": [
      "Trade - Price",
      "Trade - Volume",
      "Trade - Exchange Time"
    ],
    "IdentifierList": {
      "@odata.type": "#ThomsonReuters.Dss.Api.Extractions.ExtractionRequests.InstrumentIdentifierList",  
      "InstrumentIdentifiers": [{
        "Identifier": "SXFM7",
        "IdentifierType": "Ric"
      }],
           "ValidationOptions": { 
        "AllowHistoricalInstruments": "true" 
      }, 
      "UseUserPreferencesForValidationOptions": "false", 
    },

    "Condition": {
      "MessageTimeStampIn": "GmtUtc",
      "ApplyCorrectionsAndCancellations": "false",
      "ReportDateRangeType": "Range",
      "QueryStartDate": "2017-05-30T01:00:00.000Z",
      "QueryEndDate": "2017-06-02T23:59:00.000Z",
      "DisplaySourceRIC": "true"
    }
  }
}

r2 = requests.post(requestUrl, json=requestBody,headers=requestHeaders)
print (r2.status_code)

requestUrl = r2.headers["location"]
requestHeaders={
    "Prefer":"respond-async",
    "Content-Type":"application/json",
    "Authorization":"token " + token
}

r3 = requests.get(requestUrl,headers=requestHeaders)

#When the status of the request is 200 the extraction is complete, we display the jobId and the extraction notes
print ('response status = ' + str(r3.status_code))

while (r3.status_code == 202):
    print ('As we received a 202, we wait 30 seconds, then poll again (until we receive a 200)')
    time.sleep(30)
    r3 = requests.get(requestUrl,headers=requestHeaders)
    print ('response status = ' + str(r3.status_code))
    
if r3.status_code == 200 :
    r3Json = json.loads(r3.text.encode('ascii', 'ignore'))
    jobId = r3Json["JobId"]
    print ('\njobId: ' + jobId + '\n')
    notes = r3Json["Notes"]
    print ('Extraction notes:\n' + notes[0])

if r3.status_code != 200 :
    print ('There was an error. Try running this cell again. If it fails, re-run the previous cell.\n')


requestUrl = "https://hosted.datascopeapi.reuters.com/RestApi/v1/Extractions/RawExtractionResults" + "('" + jobId + "')" + "/$value"

requestHeaders={
    "Prefer":"respond-async",
    "Content-Type":"text/plain",
    "Accept-Encoding":"gzip",
    "Authorization": "token " + token
}

r5 = requests.get(requestUrl,headers=requestHeaders,stream=True)
r5.raw.decode_content = False
print ('Response headers for content: type: ' + r5.headers["Content-Type"] + ' - encoding: ' + r5.headers["Content-Encoding"] + '\n')

fileName = filePath + fileNameRoot + ".csv.gz"
chunk_size = 1024
rr = r5.raw
with open(fileName, 'wb') as fd:
    shutil.copyfileobj(rr, fd, chunk_size)

print ('Saved compressed data to file:' + fileName)