AshenH commited on
Commit
95cc7e5
·
verified ·
1 Parent(s): 78f9096

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +412 -78
app.py CHANGED
@@ -1,18 +1,29 @@
1
  import os
 
2
  from datetime import datetime
3
  from pathlib import Path
4
- from typing import Dict, Any, Tuple, Any
 
5
 
6
  import duckdb
7
  import pandas as pd
8
  import numpy as np
9
  import matplotlib.pyplot as plt
10
  import gradio as gr
11
- from pydantic import BaseModel
12
  from reportlab.lib.pagesizes import A4
13
  from reportlab.lib.units import mm
14
  from reportlab.pdfgen import canvas
15
 
 
 
 
 
 
 
 
 
 
16
  # -------------------------------------------------------------------
17
  # Basic configuration
18
  # -------------------------------------------------------------------
@@ -22,20 +33,99 @@ VIEW_FQN = "my_db.main.positions_v"
22
  EXPORT_DIR = Path("exports")
23
  EXPORT_DIR.mkdir(exist_ok=True)
24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
 
26
  # -------------------------------------------------------------------
27
- # MotherDuck connection
28
  # -------------------------------------------------------------------
29
- def connect_md() -> duckdb.DuckDBPyConnection:
30
- token = os.environ.get("MOTHERDUCK_TOKEN", "")
31
- if not token:
32
- raise RuntimeError("MOTHERDUCK_TOKEN is not set. Add it as a Space secret.")
33
- conn = duckdb.connect(f"md:?motherduck_token={token}")
34
- return conn
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
 
36
 
37
  # -------------------------------------------------------------------
38
- # SQL snippets
39
  # -------------------------------------------------------------------
40
  CREATE_VIEW_SQL = f"""
41
  CREATE OR REPLACE VIEW {VIEW_FQN} AS
@@ -49,37 +139,45 @@ SELECT
49
  Interest_rate,
50
  days_to_maturity,
51
  CASE
52
- WHEN lower(product) IN ('fd','term_deposit','td','savings','current','call','repo_liab') THEN 'SoF'
53
- WHEN lower(product) IN ('loan','overdraft','advances','bills','bill','tbond','t-bond','tbill','t-bill','repo_asset') THEN 'Assets'
54
  ELSE 'Unknown'
55
  END AS bucket
56
- FROM {TABLE_FQN};
 
57
  """
58
 
59
  MAX_DATE_SQL = f"""
60
- WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN})
61
- SELECT d FROM maxd;
62
  """
63
 
64
  KPI_SQL = f"""
65
- WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}),
 
 
66
  t1 AS (
67
- SELECT p.bucket, SUM(p.Portfolio_value) AS amt
 
 
68
  FROM {VIEW_FQN} p
69
- JOIN maxd m ON p.as_of_date = m.d
70
  WHERE p.days_to_maturity <= 1
 
71
  GROUP BY p.bucket
72
  )
73
  SELECT
74
- COALESCE(SUM(CASE WHEN bucket='Assets' THEN amt END),0) AS assets_t1,
75
- COALESCE(SUM(CASE WHEN bucket='SoF' THEN amt END),0) AS sof_t1,
76
- COALESCE(SUM(CASE WHEN bucket='Assets' THEN amt END),0)
77
- - COALESCE(SUM(CASE WHEN bucket='SoF' THEN amt END),0) AS net_gap_t1
78
  FROM t1;
79
  """
80
 
81
  LADDER_SQL = f"""
82
- WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN})
 
 
83
  SELECT
84
  CASE
85
  WHEN p.days_to_maturity <= 1 THEN 'T+1'
@@ -90,124 +188,360 @@ SELECT
90
  p.bucket,
91
  SUM(p.Portfolio_value) AS amount
92
  FROM {VIEW_FQN} p
93
- JOIN maxd m ON p.as_of_date = m.d
94
- GROUP BY 1,2
95
- ORDER BY CASE time_bucket
96
- WHEN 'T+1' THEN 1 WHEN 'T+2..7' THEN 2 WHEN 'T+8..30' THEN 3 ELSE 4 END,
 
 
 
 
 
 
97
  p.bucket;
98
  """
99
 
100
  T1_BY_MONTH_SQL = f"""
101
- WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN})
102
- SELECT p.bucket, p.months, SUM(p.Portfolio_value) AS amount
 
 
 
 
 
103
  FROM {VIEW_FQN} p
104
- JOIN maxd m ON p.as_of_date = m.d
105
  WHERE p.days_to_maturity <= 1
106
- GROUP BY 1,2
 
 
107
  ORDER BY p.bucket, amount DESC
108
  LIMIT 50;
109
  """
110
 
111
  T1_BY_SEGMENT_SQL = f"""
112
- WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN})
113
- SELECT p.bucket, p.segments, SUM(p.Portfolio_value) AS amount
 
 
 
 
 
114
  FROM {VIEW_FQN} p
115
- JOIN maxd m ON p.as_of_date = m.d
116
  WHERE p.days_to_maturity <= 1
117
- GROUP BY 1,2
 
 
118
  ORDER BY p.bucket, amount DESC
119
  LIMIT 50;
120
  """
121
 
122
  T1_BY_CCY_SQL = f"""
123
- WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN})
124
- SELECT p.bucket, p.currency, SUM(p.Portfolio_value) AS amount
 
 
 
 
 
125
  FROM {VIEW_FQN} p
126
- JOIN maxd m ON p.as_of_date = m.d
127
  WHERE p.days_to_maturity <= 1
128
- GROUP BY 1,2
 
 
129
  ORDER BY p.bucket, amount DESC;
130
  """
131
 
132
  IRR_SQL = f"""
133
- WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}),
 
 
134
  base AS (
135
  SELECT
136
  p.bucket,
137
  p.Portfolio_value AS pv,
138
  (p.Interest_rate / 100.0) AS y,
139
  CASE
140
- WHEN p.days_to_maturity IS NOT NULL THEN p.days_to_maturity/365.0
141
- WHEN p.months IS NOT NULL THEN p.months/12.0
142
  ELSE NULL
143
  END AS T_years
144
  FROM {VIEW_FQN} p
145
- JOIN maxd m ON p.as_of_date = m.d
146
  WHERE p.Portfolio_value IS NOT NULL
 
147
  ),
148
  metrics AS (
149
  SELECT
150
  bucket,
151
  pv,
152
- CASE WHEN T_years IS NULL THEN NULL
153
- WHEN y IS NULL THEN T_years
154
- ELSE T_years/(1.0+y) END AS dur_mod,
155
- CASE WHEN T_years IS NULL THEN NULL
156
- WHEN y IS NULL THEN T_years*(T_years+1.0)
157
- ELSE (T_years*(T_years+1.0))/POWER(1.0+y,2) END AS convexity_approx,
158
- CASE WHEN T_years IS NULL THEN NULL
159
- ELSE pv * (CASE WHEN y IS NULL THEN T_years ELSE T_years/(1.0+y) END) * 0.0001 END AS dv01
 
 
 
 
 
 
 
160
  FROM base
161
  ),
162
  agg AS (
163
  SELECT
164
  bucket,
165
  SUM(pv) AS pv_sum,
166
- SUM(pv * dur_mod) / NULLIF(SUM(pv),0) AS dur_mod_port,
167
  SUM(dv01) AS dv01_sum
168
  FROM metrics
169
  GROUP BY bucket
170
  )
171
  SELECT
172
- COALESCE(MAX(CASE WHEN bucket='Assets' THEN pv_sum END),0) AS assets_pv,
173
- COALESCE(MAX(CASE WHEN bucket='SoF' THEN pv_sum END),0) AS sof_pv,
174
- COALESCE(MAX(CASE WHEN bucket='Assets' THEN dur_mod_port END),0) AS assets_dur_mod,
175
- COALESCE(MAX(CASE WHEN bucket='SoF' THEN dur_mod_port END),0) AS sof_dur_mod,
176
- COALESCE(MAX(CASE WHEN bucket='Assets' THEN dur_mod_port END),0)
177
- - COALESCE(MAX(CASE WHEN bucket='SoF' THEN dur_mod_port END),0) AS duration_gap,
178
- COALESCE(MAX(CASE WHEN bucket='Assets' THEN dv01_sum END),0)
179
- - COALESCE(MAX(CASE WHEN bucket='SoF' THEN dv01_sum END),0) AS net_dv01
180
  FROM agg;
181
  """
182
 
 
183
  SHOCK_SQL = f"""
184
- WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}),
 
 
185
  base AS (
186
  SELECT
187
  p.bucket,
188
  p.Portfolio_value AS pv,
189
  (p.Interest_rate / 100.0) AS y,
190
  CASE
191
- WHEN p.days_to_maturity IS NOT NULL THEN p.days_to_maturity/365.0
192
- WHEN p.months IS NOT NULL THEN p.months/12.0
 
193
  END AS T_years
194
  FROM {VIEW_FQN} p
195
- JOIN maxd m ON p.as_of_date = m.d
 
 
196
  ),
197
  k AS (
198
  SELECT
199
- bucket, pv,
200
- CASE WHEN T_years IS NULL THEN NULL
201
- WHEN y IS NULL THEN T_years
202
- ELSE T_years/(1.0+y) END AS dur_mod,
203
- CASE WHEN T_years IS NULL THEN NULL
204
- WHEN y IS NULL THEN T_years*(T_years+1.0)
205
- ELSE (T_years*(T_years+1.0))/POWER(1.0+y,2) END AS convexity_approx
 
 
 
 
 
206
  FROM base
207
- ),
208
- shock AS (
209
- SELECT
210
- bucket,
211
- SUM((- pv * dur_mod * 0.01) + (0.5 * pv * convexity_approx * POWER(0.01,2))) AS dPV_up_100bp,
212
- SUM((+ pv * dur_mod * 0.01) + (0.5 * pv * convexity_approx * POWER(-0.01,2))) AS dPV_dn_100bp
213
- F
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
+ import logging
3
  from datetime import datetime
4
  from pathlib import Path
5
+ from typing import Dict, Any, Tuple, Optional
6
+ from contextlib import contextmanager
7
 
8
  import duckdb
9
  import pandas as pd
10
  import numpy as np
11
  import matplotlib.pyplot as plt
12
  import gradio as gr
13
+ from pydantic import BaseModel, Field
14
  from reportlab.lib.pagesizes import A4
15
  from reportlab.lib.units import mm
16
  from reportlab.pdfgen import canvas
17
 
18
+ # -------------------------------------------------------------------
19
+ # Logging Configuration
20
+ # -------------------------------------------------------------------
21
+ logging.basicConfig(
22
+ level=logging.INFO,
23
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
24
+ )
25
+ logger = logging.getLogger(__name__)
26
+
27
  # -------------------------------------------------------------------
28
  # Basic configuration
29
  # -------------------------------------------------------------------
 
33
  EXPORT_DIR = Path("exports")
34
  EXPORT_DIR.mkdir(exist_ok=True)
35
 
36
+ # Query timeout in seconds
37
+ QUERY_TIMEOUT_SECONDS = 30
38
+
39
+ # -------------------------------------------------------------------
40
+ # Data Models
41
+ # -------------------------------------------------------------------
42
+ class KPIMetrics(BaseModel):
43
+ """T+1 Key Performance Indicators"""
44
+ assets_t1: float = Field(ge=0, description="T+1 Assets")
45
+ sof_t1: float = Field(ge=0, description="T+1 Sources of Funds")
46
+ net_gap_t1: float = Field(description="T+1 Net Gap")
47
+
48
+ class IRRMetrics(BaseModel):
49
+ """Interest Rate Risk Metrics"""
50
+ assets_pv: float = Field(ge=0)
51
+ sof_pv: float = Field(ge=0)
52
+ assets_dur_mod: float = Field(ge=0)
53
+ sof_dur_mod: float = Field(ge=0)
54
+ duration_gap: float
55
+ net_dv01: float
56
 
57
  # -------------------------------------------------------------------
58
+ # MotherDuck connection with proper error handling
59
  # -------------------------------------------------------------------
60
+ @contextmanager
61
+ def get_db_connection():
62
+ """
63
+ Context manager for database connections with proper cleanup.
64
+
65
+ Yields:
66
+ duckdb.DuckDBPyConnection: Active database connection
67
+
68
+ Raises:
69
+ RuntimeError: If MOTHERDUCK_TOKEN is not configured
70
+ Exception: For database connection errors
71
+ """
72
+ conn = None
73
+ try:
74
+ token = os.environ.get("MOTHERDUCK_TOKEN", "")
75
+ if not token:
76
+ logger.error("MOTHERDUCK_TOKEN environment variable not set")
77
+ raise RuntimeError(
78
+ "MOTHERDUCK_TOKEN is not set. Please add it as a Space secret."
79
+ )
80
+
81
+ logger.info("Establishing MotherDuck connection")
82
+ conn = duckdb.connect(f"md:?motherduck_token={token}")
83
+
84
+ # Set query timeout
85
+ conn.execute(f"SET statement_timeout='{QUERY_TIMEOUT_SECONDS}s'")
86
+
87
+ yield conn
88
+
89
+ except Exception as e:
90
+ logger.error(f"Database connection error: {str(e)}")
91
+ raise
92
+ finally:
93
+ if conn:
94
+ try:
95
+ conn.close()
96
+ logger.info("Database connection closed")
97
+ except Exception as e:
98
+ logger.warning(f"Error closing connection: {str(e)}")
99
+
100
+
101
+ def execute_query(conn: duckdb.DuckDBPyConnection, query: str,
102
+ description: str = "Query") -> pd.DataFrame:
103
+ """
104
+ Execute a query with error handling and logging.
105
+
106
+ Args:
107
+ conn: Database connection
108
+ query: SQL query to execute
109
+ description: Human-readable description for logging
110
+
111
+ Returns:
112
+ pd.DataFrame: Query results
113
+
114
+ Raises:
115
+ Exception: For query execution errors
116
+ """
117
+ try:
118
+ logger.info(f"Executing {description}")
119
+ result = conn.execute(query).df()
120
+ logger.info(f"{description} completed: {len(result)} rows returned")
121
+ return result
122
+ except Exception as e:
123
+ logger.error(f"{description} failed: {str(e)}")
124
+ raise Exception(f"Query execution failed for {description}: {str(e)}")
125
 
126
 
127
  # -------------------------------------------------------------------
128
+ # SQL snippets with fixed syntax
129
  # -------------------------------------------------------------------
130
  CREATE_VIEW_SQL = f"""
131
  CREATE OR REPLACE VIEW {VIEW_FQN} AS
 
139
  Interest_rate,
140
  days_to_maturity,
141
  CASE
142
+ WHEN LOWER(TRIM(product)) IN ('fd','term_deposit','td','savings','current','call','repo_liab') THEN 'SoF'
143
+ WHEN LOWER(TRIM(product)) IN ('loan','overdraft','advances','bills','bill','tbond','t-bond','tbill','t-bill','repo_asset') THEN 'Assets'
144
  ELSE 'Unknown'
145
  END AS bucket
146
+ FROM {TABLE_FQN}
147
+ WHERE Portfolio_value IS NOT NULL;
148
  """
149
 
150
  MAX_DATE_SQL = f"""
151
+ SELECT MAX(as_of_date) AS d
152
+ FROM {VIEW_FQN};
153
  """
154
 
155
  KPI_SQL = f"""
156
+ WITH maxd AS (
157
+ SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
158
+ ),
159
  t1 AS (
160
+ SELECT
161
+ p.bucket,
162
+ SUM(p.Portfolio_value) AS amt
163
  FROM {VIEW_FQN} p
164
+ INNER JOIN maxd m ON p.as_of_date = m.d
165
  WHERE p.days_to_maturity <= 1
166
+ AND p.bucket IN ('Assets', 'SoF')
167
  GROUP BY p.bucket
168
  )
169
  SELECT
170
+ COALESCE(SUM(CASE WHEN bucket = 'Assets' THEN amt END), 0) AS assets_t1,
171
+ COALESCE(SUM(CASE WHEN bucket = 'SoF' THEN amt END), 0) AS sof_t1,
172
+ COALESCE(SUM(CASE WHEN bucket = 'Assets' THEN amt END), 0)
173
+ - COALESCE(SUM(CASE WHEN bucket = 'SoF' THEN amt END), 0) AS net_gap_t1
174
  FROM t1;
175
  """
176
 
177
  LADDER_SQL = f"""
178
+ WITH maxd AS (
179
+ SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
180
+ )
181
  SELECT
182
  CASE
183
  WHEN p.days_to_maturity <= 1 THEN 'T+1'
 
188
  p.bucket,
189
  SUM(p.Portfolio_value) AS amount
190
  FROM {VIEW_FQN} p
191
+ INNER JOIN maxd m ON p.as_of_date = m.d
192
+ WHERE p.bucket IN ('Assets', 'SoF')
193
+ GROUP BY 1, 2
194
+ ORDER BY
195
+ CASE time_bucket
196
+ WHEN 'T+1' THEN 1
197
+ WHEN 'T+2..7' THEN 2
198
+ WHEN 'T+8..30' THEN 3
199
+ ELSE 4
200
+ END,
201
  p.bucket;
202
  """
203
 
204
  T1_BY_MONTH_SQL = f"""
205
+ WITH maxd AS (
206
+ SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
207
+ )
208
+ SELECT
209
+ p.bucket,
210
+ p.months,
211
+ SUM(p.Portfolio_value) AS amount
212
  FROM {VIEW_FQN} p
213
+ INNER JOIN maxd m ON p.as_of_date = m.d
214
  WHERE p.days_to_maturity <= 1
215
+ AND p.bucket IN ('Assets', 'SoF')
216
+ AND p.months IS NOT NULL
217
+ GROUP BY p.bucket, p.months
218
  ORDER BY p.bucket, amount DESC
219
  LIMIT 50;
220
  """
221
 
222
  T1_BY_SEGMENT_SQL = f"""
223
+ WITH maxd AS (
224
+ SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
225
+ )
226
+ SELECT
227
+ p.bucket,
228
+ p.segments,
229
+ SUM(p.Portfolio_value) AS amount
230
  FROM {VIEW_FQN} p
231
+ INNER JOIN maxd m ON p.as_of_date = m.d
232
  WHERE p.days_to_maturity <= 1
233
+ AND p.bucket IN ('Assets', 'SoF')
234
+ AND p.segments IS NOT NULL
235
+ GROUP BY p.bucket, p.segments
236
  ORDER BY p.bucket, amount DESC
237
  LIMIT 50;
238
  """
239
 
240
  T1_BY_CCY_SQL = f"""
241
+ WITH maxd AS (
242
+ SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
243
+ )
244
+ SELECT
245
+ p.bucket,
246
+ p.currency,
247
+ SUM(p.Portfolio_value) AS amount
248
  FROM {VIEW_FQN} p
249
+ INNER JOIN maxd m ON p.as_of_date = m.d
250
  WHERE p.days_to_maturity <= 1
251
+ AND p.bucket IN ('Assets', 'SoF')
252
+ AND p.currency IS NOT NULL
253
+ GROUP BY p.bucket, p.currency
254
  ORDER BY p.bucket, amount DESC;
255
  """
256
 
257
  IRR_SQL = f"""
258
+ WITH maxd AS (
259
+ SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
260
+ ),
261
  base AS (
262
  SELECT
263
  p.bucket,
264
  p.Portfolio_value AS pv,
265
  (p.Interest_rate / 100.0) AS y,
266
  CASE
267
+ WHEN p.days_to_maturity IS NOT NULL THEN p.days_to_maturity / 365.0
268
+ WHEN p.months IS NOT NULL THEN p.months / 12.0
269
  ELSE NULL
270
  END AS T_years
271
  FROM {VIEW_FQN} p
272
+ INNER JOIN maxd m ON p.as_of_date = m.d
273
  WHERE p.Portfolio_value IS NOT NULL
274
+ AND p.bucket IN ('Assets', 'SoF')
275
  ),
276
  metrics AS (
277
  SELECT
278
  bucket,
279
  pv,
280
+ CASE
281
+ WHEN T_years IS NULL THEN NULL
282
+ WHEN y IS NULL THEN T_years
283
+ ELSE T_years / NULLIF(1.0 + y, 0)
284
+ END AS dur_mod,
285
+ CASE
286
+ WHEN T_years IS NULL THEN NULL
287
+ WHEN y IS NULL THEN T_years * (T_years + 1.0)
288
+ ELSE (T_years * (T_years + 1.0)) / NULLIF(POWER(1.0 + y, 2), 0)
289
+ END AS convexity_approx,
290
+ CASE
291
+ WHEN T_years IS NULL THEN NULL
292
+ WHEN y IS NULL THEN pv * T_years * 0.0001
293
+ ELSE pv * (T_years / NULLIF(1.0 + y, 0)) * 0.0001
294
+ END AS dv01
295
  FROM base
296
  ),
297
  agg AS (
298
  SELECT
299
  bucket,
300
  SUM(pv) AS pv_sum,
301
+ SUM(pv * dur_mod) / NULLIF(SUM(pv), 0) AS dur_mod_port,
302
  SUM(dv01) AS dv01_sum
303
  FROM metrics
304
  GROUP BY bucket
305
  )
306
  SELECT
307
+ COALESCE(MAX(CASE WHEN bucket = 'Assets' THEN pv_sum END), 0) AS assets_pv,
308
+ COALESCE(MAX(CASE WHEN bucket = 'SoF' THEN pv_sum END), 0) AS sof_pv,
309
+ COALESCE(MAX(CASE WHEN bucket = 'Assets' THEN dur_mod_port END), 0) AS assets_dur_mod,
310
+ COALESCE(MAX(CASE WHEN bucket = 'SoF' THEN dur_mod_port END), 0) AS sof_dur_mod,
311
+ COALESCE(MAX(CASE WHEN bucket = 'Assets' THEN dur_mod_port END), 0)
312
+ - COALESCE(MAX(CASE WHEN bucket = 'SoF' THEN dur_mod_port END), 0) AS duration_gap,
313
+ COALESCE(MAX(CASE WHEN bucket = 'Assets' THEN dv01_sum END), 0)
314
+ - COALESCE(MAX(CASE WHEN bucket = 'SoF' THEN dv01_sum END), 0) AS net_dv01
315
  FROM agg;
316
  """
317
 
318
+ # FIXED: Complete the SHOCK_SQL query
319
  SHOCK_SQL = f"""
320
+ WITH maxd AS (
321
+ SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
322
+ ),
323
  base AS (
324
  SELECT
325
  p.bucket,
326
  p.Portfolio_value AS pv,
327
  (p.Interest_rate / 100.0) AS y,
328
  CASE
329
+ WHEN p.days_to_maturity IS NOT NULL THEN p.days_to_maturity / 365.0
330
+ WHEN p.months IS NOT NULL THEN p.months / 12.0
331
+ ELSE NULL
332
  END AS T_years
333
  FROM {VIEW_FQN} p
334
+ INNER JOIN maxd m ON p.as_of_date = m.d
335
+ WHERE p.Portfolio_value IS NOT NULL
336
+ AND p.bucket IN ('Assets', 'SoF')
337
  ),
338
  k AS (
339
  SELECT
340
+ bucket,
341
+ pv,
342
+ CASE
343
+ WHEN T_years IS NULL THEN NULL
344
+ WHEN y IS NULL THEN T_years
345
+ ELSE T_years / NULLIF(1.0 + y, 0)
346
+ END AS dur_mod,
347
+ CASE
348
+ WHEN T_years IS NULL THEN NULL
349
+ WHEN y IS NULL THEN T_years * (T_years + 1.0)
350
+ ELSE (T_years * (T_years + 1.0)) / NULLIF(POWER(1.0 + y, 2), 0)
351
+ END AS convexity_approx
352
  FROM base
353
+ )
354
+ SELECT
355
+ bucket,
356
+ SUM((- pv * dur_mod * 0.01) + (0.5 * pv * convexity_approx * POWER(0.01, 2))) AS dPV_up_100bp,
357
+ SUM((+ pv * dur_mod * 0.01) + (0.5 * pv * convexity_approx * POWER(-0.01, 2))) AS dPV_dn_100bp
358
+ FROM k
359
+ WHERE dur_mod IS NOT NULL
360
+ GROUP BY bucket
361
+ ORDER BY bucket;
362
+ """
363
+
364
+ # -------------------------------------------------------------------
365
+ # Initialize database view
366
+ # -------------------------------------------------------------------
367
+ def initialize_database():
368
+ """
369
+ Initialize database view with error handling.
370
+
371
+ Raises:
372
+ Exception: If view creation fails
373
+ """
374
+ try:
375
+ with get_db_connection() as conn:
376
+ execute_query(conn, CREATE_VIEW_SQL, "View creation")
377
+ logger.info(f"Successfully created/updated view: {VIEW_FQN}")
378
+ except Exception as e:
379
+ logger.error(f"Failed to initialize database: {str(e)}")
380
+ raise
381
+
382
+
383
+ # -------------------------------------------------------------------
384
+ # Data validation utilities
385
+ # -------------------------------------------------------------------
386
+ def validate_dataframe(df: pd.DataFrame, expected_columns: list,
387
+ min_rows: int = 0) -> bool:
388
+ """
389
+ Validate DataFrame structure and content.
390
+
391
+ Args:
392
+ df: DataFrame to validate
393
+ expected_columns: List of required column names
394
+ min_rows: Minimum number of rows expected
395
+
396
+ Returns:
397
+ bool: True if valid
398
+
399
+ Raises:
400
+ ValueError: If validation fails
401
+ """
402
+ if df is None or df.empty:
403
+ if min_rows > 0:
404
+ raise ValueError("DataFrame is empty but data was expected")
405
+ return True
406
+
407
+ missing_cols = set(expected_columns) - set(df.columns)
408
+ if missing_cols:
409
+ raise ValueError(f"Missing required columns: {missing_cols}")
410
+
411
+ if len(df) < min_rows:
412
+ raise ValueError(f"Expected at least {min_rows} rows, got {len(df)}")
413
+
414
+ return True
415
+
416
+
417
+ # -------------------------------------------------------------------
418
+ # Business logic functions with validation
419
+ # -------------------------------------------------------------------
420
+ def get_max_date() -> Optional[datetime]:
421
+ """
422
+ Get the maximum as_of_date from the dataset.
423
+
424
+ Returns:
425
+ datetime or None: Maximum date if available
426
+ """
427
+ try:
428
+ with get_db_connection() as conn:
429
+ df = execute_query(conn, MAX_DATE_SQL, "Max date query")
430
+ if not df.empty and df.iloc[0, 0] is not None:
431
+ return pd.to_datetime(df.iloc[0, 0])
432
+ return None
433
+ except Exception as e:
434
+ logger.error(f"Error fetching max date: {str(e)}")
435
+ return None
436
+
437
+
438
+ def get_kpi_metrics() -> Optional[KPIMetrics]:
439
+ """
440
+ Fetch T+1 KPI metrics with validation.
441
+
442
+ Returns:
443
+ KPIMetrics or None: Validated KPI metrics
444
+ """
445
+ try:
446
+ with get_db_connection() as conn:
447
+ df = execute_query(conn, KPI_SQL, "KPI query")
448
+ validate_dataframe(df, ['assets_t1', 'sof_t1', 'net_gap_t1'], min_rows=1)
449
+
450
+ metrics = KPIMetrics(**df.iloc[0].to_dict())
451
+ logger.info(f"KPI Metrics: Assets={metrics.assets_t1:,.0f}, "
452
+ f"SoF={metrics.sof_t1:,.0f}, Gap={metrics.net_gap_t1:,.0f}")
453
+ return metrics
454
+ except Exception as e:
455
+ logger.error(f"Error fetching KPI metrics: {str(e)}")
456
+ return None
457
+
458
+
459
+ # -------------------------------------------------------------------
460
+ # Helper function for safe numeric formatting
461
+ # -------------------------------------------------------------------
462
+ def safe_format_number(value: Any, decimals: int = 2,
463
+ prefix: str = "", suffix: str = "") -> str:
464
+ """
465
+ Safely format numeric values with error handling.
466
+
467
+ Args:
468
+ value: Value to format
469
+ decimals: Number of decimal places
470
+ prefix: String to prepend
471
+ suffix: String to append
472
+
473
+ Returns:
474
+ str: Formatted string
475
+ """
476
+ try:
477
+ if value is None or pd.isna(value):
478
+ return "N/A"
479
+ num_val = float(value)
480
+ if np.isinf(num_val):
481
+ return "∞" if num_val > 0 else "-∞"
482
+ formatted = f"{num_val:,.{decimals}f}"
483
+ return f"{prefix}{formatted}{suffix}"
484
+ except (ValueError, TypeError):
485
+ return "N/A"
486
+
487
+
488
+ # -------------------------------------------------------------------
489
+ # Add health check endpoint
490
+ # -------------------------------------------------------------------
491
+ def health_check() -> Dict[str, Any]:
492
+ """
493
+ Perform application health check.
494
+
495
+ Returns:
496
+ dict: Health status information
497
+ """
498
+ health = {
499
+ "status": "unhealthy",
500
+ "timestamp": datetime.now().isoformat(),
501
+ "checks": {}
502
+ }
503
+
504
+ # Check database connectivity
505
+ try:
506
+ with get_db_connection() as conn:
507
+ conn.execute("SELECT 1").fetchone()
508
+ health["checks"]["database"] = "ok"
509
+ except Exception as e:
510
+ health["checks"]["database"] = f"error: {str(e)}"
511
+ return health
512
+
513
+ # Check view exists
514
+ try:
515
+ max_date = get_max_date()
516
+ health["checks"]["view"] = "ok" if max_date else "no data"
517
+ health["max_date"] = max_date.isoformat() if max_date else None
518
+ except Exception as e:
519
+ health["checks"]["view"] = f"error: {str(e)}"
520
+ return health
521
+
522
+ # Check export directory
523
+ health["checks"]["export_dir"] = "ok" if EXPORT_DIR.exists() else "missing"
524
+
525
+ # Overall status
526
+ if all(v == "ok" or v == "no data" for v in health["checks"].values()):
527
+ health["status"] = "healthy"
528
+
529
+ return health
530
+
531
+
532
+ if __name__ == "__main__":
533
+ # Initialize on startup
534
+ try:
535
+ logger.info("Starting ALCO Dashboard application")
536
+ initialize_database()
537
+
538
+ # Perform health check
539
+ health = health_check()
540
+ logger.info(f"Health check: {health}")
541
+
542
+ if health["status"] != "healthy":
543
+ logger.warning("Application health check failed")
544
+
545
+ except Exception as e:
546
+ logger.critical(f"Application startup failed: {str(e)}")
547
+ raise