Skip to content

Commit 62b57fc

Browse files
committed
add get_cds_index
1 parent d45a6a5 commit 62b57fc

File tree

1 file changed

+202
-0
lines changed

1 file changed

+202
-0
lines changed
Lines changed: 202 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,202 @@
1+
"""Systemathics Ganymede API Token Helpers
2+
3+
This module helps to create tokens to access Systemathics Ganymede authenticated API.
4+
5+
functions:
6+
get_cds_index - Get CDS Index data as a DataFrame using Ganymede gRPC API.
7+
"""
8+
9+
10+
import grpc
11+
import pandas as pd
12+
from datetime import date,datetime
13+
from google.type import date_pb2
14+
15+
from systemathics.apis.type.shared.v1 import asset_pb2 as asset
16+
from systemathics.apis.type.shared.v1 import constraints_pb2 as constraints
17+
from systemathics.apis.type.shared.v1 import date_interval_pb2 as date_interval
18+
import systemathics.apis.type.shared.v1.identifier_pb2 as identifier
19+
import systemathics.apis.services.daily.v2.get_daily_pb2 as get_daily
20+
import systemathics.apis.services.daily.v2.get_daily_pb2_grpc as get_daily_service
21+
22+
import systemathics.apis.helpers.token_helpers as token_helpers
23+
import systemathics.apis.helpers.channel_helpers as channel_helpers
24+
25+
def get_cds_index(ticker, start_date=None, end_date=None, batch=None, selected_fields=None, provider="Markit"):
26+
"""
27+
Fetch CDS data from gRPC API for a given ticker and date range.
28+
29+
Parameters:
30+
ticker (str): The ticker symbol
31+
start_date (datetime.date or str, optional): Start date for data retrieval.
32+
If None, set not limits
33+
end_date (datetime.date or str, optional): End date for data retrieval.
34+
If None, uses today's date
35+
batch (str, optional): Batch name to be used for filtering. If None, gets all batches.
36+
selected_fields (list, optional): List of specific fields to retrieve. If None, gets all fields.
37+
provider (str): Data provider, default is "Markit"
38+
39+
Returns:
40+
pd.DataFrame: DataFrame with Date as index and all available fields as columns
41+
"""
42+
43+
def python_date_to_google_date(py_date):
44+
"""Convert Python date to Google Date protobuf message"""
45+
return date_pb2.Date(year=py_date.year, month=py_date.month, day=py_date.day)
46+
47+
# Helper function to parse date strings
48+
def parse_date_input(date_input):
49+
"""Convert string dates to date objects if needed."""
50+
if date_input is None:
51+
return None
52+
if isinstance(date_input, date):
53+
return python_date_to_google_date(date_input)
54+
if isinstance(date_input, datetime):
55+
return python_date_to_google_date(date_input.date())
56+
if isinstance(date_input, str):
57+
d = datetime.strptime(date_input, '%Y-%m-%d').date()
58+
return python_date_to_google_date(d)
59+
raise ValueError(f"Invalid date type: {type(date_input)}")
60+
61+
# All available fields
62+
all_fields = [
63+
"CompositePriceAsk", "CompositePriceBid", "CompositeSpreadAsk",
64+
"CompositeSpreadBid", "ConventionalSpread", "CreditDv01",
65+
"DefaultProbability", "Heat", "IrDv01", "JumpToDefault",
66+
"JumpToZero", "ModelPrice", "ModelSpread", "Price",
67+
"Rec01", "RiskyPv01", "SkewPrice", "SkewSpread"
68+
]
69+
70+
# Use all fields if none specified, otherwise validate selected fields
71+
if selected_fields is None:
72+
fields = all_fields
73+
else:
74+
fields = [f for f in selected_fields if f in all_fields]
75+
if not fields:
76+
raise ValueError("No valid fields selected")
77+
78+
# Create identifier
79+
id = identifier.Identifier(
80+
asset_type=asset.AssetType.ASSET_TYPE_CDS_INDEX,
81+
ticker=ticker
82+
)
83+
id.provider.value = provider
84+
85+
# Build constraints only if we have at least one date
86+
constraints_obj = None
87+
if start_date is not None or end_date is not None:
88+
# Create DateInterval with only the dates that are provided
89+
date_interval_kwargs = {}
90+
if start_date is not None:
91+
date_interval_kwargs['start_date'] = parse_date_input(start_date)
92+
if end_date is not None:
93+
date_interval_kwargs['end_date'] = parse_date_input(end_date)
94+
95+
constraints_obj = constraints.Constraints(
96+
date_intervals=[date_interval.DateInterval(**date_interval_kwargs)]
97+
)
98+
99+
if batch is None:
100+
# Create request with or without constraints
101+
request_kwargs = {
102+
'identifier': id,
103+
'fields': fields
104+
}
105+
if constraints_obj is not None:
106+
request_kwargs['constraints'] = constraints_obj
107+
108+
try:
109+
# Open gRPC channel
110+
with channel_helpers.get_grpc_channel() as channel:
111+
# Send request and receive response
112+
token = token_helpers.get_token()
113+
first = True
114+
response = []
115+
info = None
116+
# Create service stub
117+
service = get_daily_service.DailyServiceStub(channel)
118+
119+
120+
if batch is None:
121+
# Create request with or without constraints
122+
request_kwargs = {
123+
'identifier': id,
124+
'fields': fields
125+
}
126+
if constraints_obj is not None:
127+
request_kwargs['constraints'] = constraints_obj
128+
129+
vector_request = get_daily.DailyRequest(**request_kwargs)
130+
131+
for data in service.DailyVectorStream(
132+
request=vector_request,
133+
metadata=[('authorization', token)]
134+
):
135+
if first:
136+
info = data
137+
first = False
138+
else:
139+
response.append(data.data)
140+
141+
else:
142+
143+
request_kwargs = {
144+
'identifier': id,
145+
'fields': fields,
146+
'key': batch
147+
}
148+
if constraints_obj is not None:
149+
request_kwargs['constraints'] = constraints_obj
150+
151+
vector_key_request = get_daily.DailyVectorKeyRequest(**request_kwargs)
152+
153+
for data in service.DailyVectorKeyStream(
154+
request=vector_key_request,
155+
metadata=[('authorization', token)]
156+
):
157+
if first:
158+
info = data
159+
first = False
160+
else:
161+
response.append(data.data)
162+
163+
# Process the response
164+
if not response or info is None:
165+
print("No data received")
166+
return pd.DataFrame()
167+
168+
# Get field indices
169+
available_fields = [f for f in info.info.fields]
170+
field_indices = {field: available_fields.index(field)
171+
for field in fields if field in available_fields}
172+
173+
# Extract dates
174+
dates = [date(d.date.year, d.date.month, d.date.day) for d in response]
175+
176+
# Extract keys
177+
keys = [b.key for b in response]
178+
179+
# Create dictionary for DataFrame
180+
data_dict = {'Key': keys}
181+
182+
# Extract data for each field
183+
for field_name, field_index in field_indices.items():
184+
data_dict[field_name] = [b.data[field_index] for b in response]
185+
186+
# Create DataFrame
187+
df = pd.DataFrame(data_dict, index=dates)
188+
df.index.name = 'Date'
189+
190+
# Sort by date for better readability
191+
df = df.sort_index()
192+
193+
194+
return df
195+
196+
except grpc.RpcError as e:
197+
print(f"gRPC Error: {e.code().name}")
198+
print(f"Details: {e.details()}")
199+
return pd.DataFrame()
200+
except Exception as e:
201+
print(f"Error: {str(e)}")
202+
return pd.DataFrame()

0 commit comments

Comments
 (0)