AshenH commited on
Commit
95bbd6d
·
verified ·
1 Parent(s): d1cf18a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +488 -518
app.py CHANGED
@@ -1,603 +1,573 @@
1
  import os
2
- import logging
3
  from datetime import datetime
4
  from pathlib import Path
5
- from typing import Dict, Any, Tuple, Optional
6
- from contextlib import contextmanager
7
- import signal
8
- from functools import wraps
9
 
10
  import duckdb
11
  import pandas as pd
12
  import numpy as np
13
  import matplotlib.pyplot as plt
14
  import gradio as gr
15
- from pydantic import BaseModel, Field
16
  from reportlab.lib.pagesizes import A4
17
  from reportlab.lib.units import mm
18
  from reportlab.pdfgen import canvas
19
 
20
- # -------------------------------------------------------------------
21
- # Logging Configuration
22
- # -------------------------------------------------------------------
23
- logging.basicConfig(
24
- level=logging.INFO,
25
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
26
- )
27
- logger = logging.getLogger(__name__)
28
-
29
  # -------------------------------------------------------------------
30
  # Basic configuration
31
  # -------------------------------------------------------------------
32
  APP_TITLE = "ALCO Liquidity & Interest-Rate Risk Dashboard"
33
- TABLE_FQN = "my_db.main.masterdataset_v"
34
- VIEW_FQN = "my_db.main.positions_v"
35
  EXPORT_DIR = Path("exports")
36
  EXPORT_DIR.mkdir(exist_ok=True)
37
 
38
- # Query timeout in seconds
39
- QUERY_TIMEOUT_SECONDS = 30
40
 
41
  # -------------------------------------------------------------------
42
- # Query Timeout Handler (since DuckDB doesn't support statement_timeout)
43
  # -------------------------------------------------------------------
44
- class QueryTimeoutError(Exception):
45
- """Raised when a query exceeds the timeout limit"""
46
- pass
47
-
 
 
 
 
 
 
48
 
49
- def with_timeout(timeout_seconds: int = QUERY_TIMEOUT_SECONDS):
50
- """
51
- Decorator to add timeout to query functions.
52
- Note: This is a simplified version. For production, consider using
53
- concurrent.futures or multiprocessing for better timeout handling.
54
-
55
- Args:
56
- timeout_seconds: Maximum execution time in seconds
57
- """
58
- def decorator(func):
59
- @wraps(func)
60
- def wrapper(*args, **kwargs):
61
- # For now, we'll log the timeout but not enforce it
62
- # since signal.alarm doesn't work well with threads
63
- # and DuckDB doesn't support native query timeouts
64
- logger.debug(f"Starting {func.__name__} with {timeout_seconds}s timeout")
65
- start_time = datetime.now()
66
-
67
- try:
68
- result = func(*args, **kwargs)
69
- elapsed = (datetime.now() - start_time).total_seconds()
70
-
71
- if elapsed > timeout_seconds:
72
- logger.warning(
73
- f"{func.__name__} exceeded timeout: {elapsed:.2f}s > {timeout_seconds}s"
74
- )
75
- else:
76
- logger.debug(f"{func.__name__} completed in {elapsed:.2f}s")
77
-
78
- return result
79
- except Exception as e:
80
- elapsed = (datetime.now() - start_time).total_seconds()
81
- logger.error(f"{func.__name__} failed after {elapsed:.2f}s: {str(e)}")
82
- raise
83
-
84
- return wrapper
85
- return decorator
86
 
87
  # -------------------------------------------------------------------
88
- # Data Models
89
  # -------------------------------------------------------------------
90
- class KPIMetrics(BaseModel):
91
- """T+1 Key Performance Indicators"""
92
- assets_t1: float = Field(ge=0, description="T+1 Assets")
93
- sof_t1: float = Field(ge=0, description="T+1 Sources of Funds")
94
- net_gap_t1: float = Field(description="T+1 Net Gap")
95
-
96
- class IRRMetrics(BaseModel):
97
- """Interest Rate Risk Metrics"""
98
- assets_pv: float = Field(ge=0)
99
- sof_pv: float = Field(ge=0)
100
- assets_dur_mod: float = Field(ge=0)
101
- sof_dur_mod: float = Field(ge=0)
102
- duration_gap: float
103
- net_dv01: float
 
 
 
 
104
 
105
- # -------------------------------------------------------------------
106
- # MotherDuck connection with proper error handling
107
- # -------------------------------------------------------------------
108
- @contextmanager
109
- def get_db_connection():
 
 
110
  """
111
- Context manager for database connections with proper cleanup.
112
-
113
- Yields:
114
- duckdb.DuckDBPyConnection: Active database connection
115
-
116
- Raises:
117
- RuntimeError: If MOTHERDUCK_TOKEN is not configured
118
- Exception: For database connection errors
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
  """
120
- conn = None
121
- try:
122
- token = os.environ.get("MOTHERDUCK_TOKEN", "")
123
- if not token:
124
- logger.error("MOTHERDUCK_TOKEN environment variable not set")
125
- raise RuntimeError(
126
- "MOTHERDUCK_TOKEN is not set. Please add it as a Space secret."
127
- )
128
-
129
- logger.info("Establishing MotherDuck connection")
130
- conn = duckdb.connect(f"md:?motherduck_token={token}")
131
-
132
- # Note: DuckDB/MotherDuck doesn't support statement_timeout like PostgreSQL
133
- # Query timeouts should be handled at application level with threading/async
134
-
135
- yield conn
136
-
137
- except Exception as e:
138
- logger.error(f"Database connection error: {str(e)}")
139
- raise
140
- finally:
141
- if conn:
142
- try:
143
- conn.close()
144
- logger.info("Database connection closed")
145
- except Exception as e:
146
- logger.warning(f"Error closing connection: {str(e)}")
147
 
148
 
149
- def execute_query(conn: duckdb.DuckDBPyConnection, query: str,
150
- description: str = "Query") -> pd.DataFrame:
151
- """
152
- Execute a query with error handling and logging.
153
-
154
- Args:
155
- conn: Database connection
156
- query: SQL query to execute
157
- description: Human-readable description for logging
158
-
159
- Returns:
160
- pd.DataFrame: Query results
161
-
162
- Raises:
163
- Exception: For query execution errors
164
- """
165
- start_time = datetime.now()
166
- try:
167
- logger.info(f"Executing {description}")
168
- result = conn.execute(query).df()
169
- elapsed = (datetime.now() - start_time).total_seconds()
170
- logger.info(f"{description} completed: {len(result)} rows in {elapsed:.2f}s")
171
-
172
- # Warn if query is slow
173
- if elapsed > QUERY_TIMEOUT_SECONDS:
174
- logger.warning(f"{description} exceeded timeout threshold: {elapsed:.2f}s")
175
-
176
- return result
177
- except Exception as e:
178
- elapsed = (datetime.now() - start_time).total_seconds()
179
- logger.error(f"{description} failed after {elapsed:.2f}s: {str(e)}")
180
- raise Exception(f"Query execution failed for {description}: {str(e)}")
181
 
182
 
183
- # -------------------------------------------------------------------
184
- # SQL snippets with fixed syntax
185
- # -------------------------------------------------------------------
186
- CREATE_VIEW_SQL = f"""
187
- CREATE OR REPLACE VIEW {VIEW_FQN} AS
188
- SELECT
189
- as_of_date,
190
- product,
191
- months,
192
- segments,
193
- currency,
194
- Portfolio_value,
195
- Interest_rate,
196
- days_to_maturity,
197
- CASE
198
- WHEN LOWER(TRIM(product)) IN ('fd','term_deposit','td','savings','current','call','repo_liab') THEN 'SoF'
199
- WHEN LOWER(TRIM(product)) IN ('loan','overdraft','advances','bills','bill','tbond','t-bond','tbill','t-bill','repo_asset', 'assets') THEN 'Assets'
200
- ELSE 'Unknown'
201
- END AS bucket
202
- FROM {TABLE_FQN}
203
- WHERE Portfolio_value IS NOT NULL;
204
- """
205
-
206
- MAX_DATE_SQL = f"""
207
- SELECT MAX(as_of_date) AS d
208
- FROM {VIEW_FQN};
209
- """
210
-
211
- KPI_SQL = f"""
212
- WITH maxd AS (
213
- SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
214
- ),
215
- t1 AS (
216
- SELECT
217
- p.bucket,
218
- SUM(p.Portfolio_value) AS amt
219
- FROM {VIEW_FQN} p
220
- INNER JOIN maxd m ON p.as_of_date = m.d
221
- WHERE p.days_to_maturity <= 1
222
- AND p.bucket IN ('Assets', 'SoF')
223
- GROUP BY p.bucket
224
- )
225
- SELECT
226
- COALESCE(SUM(CASE WHEN bucket = 'Assets' THEN amt END), 0) AS assets_t1,
227
- COALESCE(SUM(CASE WHEN bucket = 'SoF' THEN amt END), 0) AS sof_t1,
228
- COALESCE(SUM(CASE WHEN bucket = 'Assets' THEN amt END), 0)
229
- - COALESCE(SUM(CASE WHEN bucket = 'SoF' THEN amt END), 0) AS net_gap_t1
230
- FROM t1;
231
- """
232
-
233
- LADDER_SQL = f"""
234
- WITH maxd AS (
235
- SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
236
- )
237
- SELECT
238
- CASE
239
- WHEN p.days_to_maturity <= 1 THEN 'T+1'
240
- WHEN p.days_to_maturity BETWEEN 2 AND 7 THEN 'T+2..7'
241
- WHEN p.days_to_maturity BETWEEN 8 AND 30 THEN 'T+8..30'
242
- ELSE 'T+31+'
243
- END AS time_bucket,
244
- p.bucket,
245
- SUM(p.Portfolio_value) AS amount
246
- FROM {VIEW_FQN} p
247
- INNER JOIN maxd m ON p.as_of_date = m.d
248
- WHERE p.bucket IN ('Assets', 'SoF')
249
- GROUP BY 1, 2
250
- ORDER BY
251
- CASE time_bucket
252
- WHEN 'T+1' THEN 1
253
- WHEN 'T+2..7' THEN 2
254
- WHEN 'T+8..30' THEN 3
255
- ELSE 4
256
- END,
257
- p.bucket;
258
- """
259
-
260
- T1_BY_MONTH_SQL = f"""
261
- WITH maxd AS (
262
- SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
263
- )
264
- SELECT
265
- p.bucket,
266
- p.months,
267
- SUM(p.Portfolio_value) AS amount
268
- FROM {VIEW_FQN} p
269
- INNER JOIN maxd m ON p.as_of_date = m.d
270
- WHERE p.days_to_maturity <= 1
271
- AND p.bucket IN ('Assets', 'SoF')
272
- AND p.months IS NOT NULL
273
- GROUP BY p.bucket, p.months
274
- ORDER BY p.bucket, amount DESC
275
- LIMIT 50;
276
- """
277
-
278
- T1_BY_SEGMENT_SQL = f"""
279
- WITH maxd AS (
280
- SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
281
- )
282
- SELECT
283
- p.bucket,
284
- p.segments,
285
- SUM(p.Portfolio_value) AS amount
286
- FROM {VIEW_FQN} p
287
- INNER JOIN maxd m ON p.as_of_date = m.d
288
- WHERE p.days_to_maturity <= 1
289
- AND p.bucket IN ('Assets', 'SoF')
290
- AND p.segments IS NOT NULL
291
- GROUP BY p.bucket, p.segments
292
- ORDER BY p.bucket, amount DESC
293
- LIMIT 50;
294
- """
295
-
296
- T1_BY_CCY_SQL = f"""
297
- WITH maxd AS (
298
- SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
299
- )
300
- SELECT
301
- p.bucket,
302
- p.currency,
303
- SUM(p.Portfolio_value) AS amount
304
- FROM {VIEW_FQN} p
305
- INNER JOIN maxd m ON p.as_of_date = m.d
306
- WHERE p.days_to_maturity <= 1
307
- AND p.bucket IN ('Assets', 'SoF')
308
- AND p.currency IS NOT NULL
309
- GROUP BY p.bucket, p.currency
310
- ORDER BY p.bucket, amount DESC;
311
- """
312
-
313
- IRR_SQL = f"""
314
- WITH maxd AS (
315
- SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
316
- ),
317
  base AS (
318
  SELECT
319
  p.bucket,
320
  p.Portfolio_value AS pv,
321
- (p.Interest_rate / 100.0) AS y,
322
- CASE
323
- WHEN p.days_to_maturity IS NOT NULL THEN p.days_to_maturity / 365.0
324
- WHEN p.months IS NOT NULL THEN p.months / 12.0
325
- ELSE NULL
326
- END AS T_years
327
- FROM {VIEW_FQN} p
328
- INNER JOIN maxd m ON p.as_of_date = m.d
329
  WHERE p.Portfolio_value IS NOT NULL
330
- AND p.bucket IN ('Assets', 'SoF')
331
  ),
332
  metrics AS (
333
  SELECT
334
  bucket,
335
  pv,
336
- CASE
337
- WHEN T_years IS NULL THEN NULL
338
- WHEN y IS NULL THEN T_years
339
- ELSE T_years / NULLIF(1.0 + y, 0)
340
- END AS dur_mod,
341
- CASE
342
- WHEN T_years IS NULL THEN NULL
343
- WHEN y IS NULL THEN T_years * (T_years + 1.0)
344
- ELSE (T_years * (T_years + 1.0)) / NULLIF(POWER(1.0 + y, 2), 0)
345
- END AS convexity_approx,
346
- CASE
347
- WHEN T_years IS NULL THEN NULL
348
- WHEN y IS NULL THEN pv * T_years * 0.0001
349
- ELSE pv * (T_years / NULLIF(1.0 + y, 0)) * 0.0001
350
- END AS dv01
351
  FROM base
352
  ),
353
  agg AS (
354
  SELECT
355
  bucket,
356
  SUM(pv) AS pv_sum,
357
- SUM(pv * dur_mod) / NULLIF(SUM(pv), 0) AS dur_mod_port,
358
  SUM(dv01) AS dv01_sum
359
  FROM metrics
360
  GROUP BY bucket
361
  )
362
  SELECT
363
- COALESCE(MAX(CASE WHEN bucket = 'Assets' THEN pv_sum END), 0) AS assets_pv,
364
- COALESCE(MAX(CASE WHEN bucket = 'SoF' THEN pv_sum END), 0) AS sof_pv,
365
- COALESCE(MAX(CASE WHEN bucket = 'Assets' THEN dur_mod_port END), 0) AS assets_dur_mod,
366
- COALESCE(MAX(CASE WHEN bucket = 'SoF' THEN dur_mod_port END), 0) AS sof_dur_mod,
367
- COALESCE(MAX(CASE WHEN bucket = 'Assets' THEN dur_mod_port END), 0)
368
- - COALESCE(MAX(CASE WHEN bucket = 'SoF' THEN dur_mod_port END), 0) AS duration_gap,
369
- COALESCE(MAX(CASE WHEN bucket = 'Assets' THEN dv01_sum END), 0)
370
- - COALESCE(MAX(CASE WHEN bucket = 'SoF' THEN dv01_sum END), 0) AS net_dv01
371
- FROM agg;
372
- """
373
-
374
- # FIXED: Complete the SHOCK_SQL query
375
- SHOCK_SQL = f"""
376
- WITH maxd AS (
377
- SELECT MAX(as_of_date) AS d FROM {VIEW_FQN}
378
- ),
 
 
 
 
 
 
 
 
 
 
 
 
379
  base AS (
380
  SELECT
381
  p.bucket,
382
  p.Portfolio_value AS pv,
383
- (p.Interest_rate / 100.0) AS y,
384
- CASE
385
- WHEN p.days_to_maturity IS NOT NULL THEN p.days_to_maturity / 365.0
386
- WHEN p.months IS NOT NULL THEN p.months / 12.0
387
- ELSE NULL
388
- END AS T_years
389
- FROM {VIEW_FQN} p
390
- INNER JOIN maxd m ON p.as_of_date = m.d
391
- WHERE p.Portfolio_value IS NOT NULL
392
- AND p.bucket IN ('Assets', 'SoF')
393
  ),
394
  k AS (
395
  SELECT
396
- bucket,
397
- pv,
398
- CASE
399
- WHEN T_years IS NULL THEN NULL
400
- WHEN y IS NULL THEN T_years
401
- ELSE T_years / NULLIF(1.0 + y, 0)
402
- END AS dur_mod,
403
- CASE
404
- WHEN T_years IS NULL THEN NULL
405
- WHEN y IS NULL THEN T_years * (T_years + 1.0)
406
- ELSE (T_years * (T_years + 1.0)) / NULLIF(POWER(1.0 + y, 2), 0)
407
- END AS convexity_approx
408
  FROM base
 
 
 
 
 
 
 
 
409
  )
410
- SELECT
411
- bucket,
412
- SUM((- pv * dur_mod * 0.01) + (0.5 * pv * convexity_approx * POWER(0.01, 2))) AS dPV_up_100bp,
413
- SUM((+ pv * dur_mod * 0.01) + (0.5 * pv * convexity_approx * POWER(-0.01, 2))) AS dPV_dn_100bp
414
- FROM k
415
- WHERE dur_mod IS NOT NULL
416
- GROUP BY bucket
417
- ORDER BY bucket;
418
- """
419
-
420
- # -------------------------------------------------------------------
421
- # Initialize database view
422
- # -------------------------------------------------------------------
423
- def initialize_database():
424
- """
425
- Initialize database view with error handling.
426
-
427
- Raises:
428
- Exception: If view creation fails
429
  """
430
- try:
431
- with get_db_connection() as conn:
432
- execute_query(conn, CREATE_VIEW_SQL, "View creation")
433
- logger.info(f"Successfully created/updated view: {VIEW_FQN}")
434
- except Exception as e:
435
- logger.error(f"Failed to initialize database: {str(e)}")
436
- raise
437
 
438
 
439
  # -------------------------------------------------------------------
440
- # Data validation utilities
441
  # -------------------------------------------------------------------
442
- def validate_dataframe(df: pd.DataFrame, expected_columns: list,
443
- min_rows: int = 0) -> bool:
444
- """
445
- Validate DataFrame structure and content.
446
-
447
- Args:
448
- df: DataFrame to validate
449
- expected_columns: List of required column names
450
- min_rows: Minimum number of rows expected
451
-
452
- Returns:
453
- bool: True if valid
454
-
455
- Raises:
456
- ValueError: If validation fails
457
- """
458
- if df is None or df.empty:
459
- if min_rows > 0:
460
- raise ValueError("DataFrame is empty but data was expected")
461
- return True
462
-
463
- missing_cols = set(expected_columns) - set(df.columns)
464
- if missing_cols:
465
- raise ValueError(f"Missing required columns: {missing_cols}")
466
-
467
- if len(df) < min_rows:
468
- raise ValueError(f"Expected at least {min_rows} rows, got {len(df)}")
469
-
470
- return True
471
 
472
 
473
  # -------------------------------------------------------------------
474
- # Business logic functions with validation
475
  # -------------------------------------------------------------------
476
- def get_max_date() -> Optional[datetime]:
477
- """
478
- Get the maximum as_of_date from the dataset.
479
-
480
- Returns:
481
- datetime or None: Maximum date if available
482
- """
483
- try:
484
- with get_db_connection() as conn:
485
- df = execute_query(conn, MAX_DATE_SQL, "Max date query")
486
- if not df.empty and df.iloc[0, 0] is not None:
487
- return pd.to_datetime(df.iloc[0, 0])
488
- return None
489
- except Exception as e:
490
- logger.error(f"Error fetching max date: {str(e)}")
491
- return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
492
 
493
 
494
- def get_kpi_metrics() -> Optional[KPIMetrics]:
495
- """
496
- Fetch T+1 KPI metrics with validation.
497
-
498
- Returns:
499
- KPIMetrics or None: Validated KPI metrics
500
- """
501
- try:
502
- with get_db_connection() as conn:
503
- df = execute_query(conn, KPI_SQL, "KPI query")
504
- validate_dataframe(df, ['assets_t1', 'sof_t1', 'net_gap_t1'], min_rows=1)
505
-
506
- metrics = KPIMetrics(**df.iloc[0].to_dict())
507
- logger.info(f"KPI Metrics: Assets={metrics.assets_t1:,.0f}, "
508
- f"SoF={metrics.sof_t1:,.0f}, Gap={metrics.net_gap_t1:,.0f}")
509
- return metrics
510
- except Exception as e:
511
- logger.error(f"Error fetching KPI metrics: {str(e)}")
512
- return None
513
 
514
 
515
  # -------------------------------------------------------------------
516
- # Helper function for safe numeric formatting
517
  # -------------------------------------------------------------------
518
- def safe_format_number(value: Any, decimals: int = 2,
519
- prefix: str = "", suffix: str = "") -> str:
520
- """
521
- Safely format numeric values with error handling.
522
-
523
- Args:
524
- value: Value to format
525
- decimals: Number of decimal places
526
- prefix: String to prepend
527
- suffix: String to append
528
-
529
- Returns:
530
- str: Formatted string
531
- """
532
- try:
533
- if value is None or pd.isna(value):
534
- return "N/A"
535
- num_val = float(value)
536
- if np.isinf(num_val):
537
- return "∞" if num_val > 0 else "-∞"
538
- formatted = f"{num_val:,.{decimals}f}"
539
- return f"{prefix}{formatted}{suffix}"
540
- except (ValueError, TypeError):
541
- return "N/A"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
542
 
543
 
544
  # -------------------------------------------------------------------
545
- # Add health check endpoint
546
  # -------------------------------------------------------------------
547
- def health_check() -> Dict[str, Any]:
548
- """
549
- Perform application health check.
550
-
551
- Returns:
552
- dict: Health status information
553
- """
554
- health = {
555
- "status": "unhealthy",
556
- "timestamp": datetime.now().isoformat(),
557
- "checks": {}
558
- }
559
-
560
- # Check database connectivity
561
- try:
562
- with get_db_connection() as conn:
563
- conn.execute("SELECT 1").fetchone()
564
- health["checks"]["database"] = "ok"
565
- except Exception as e:
566
- health["checks"]["database"] = f"error: {str(e)}"
567
- return health
568
-
569
- # Check view exists
570
- try:
571
- max_date = get_max_date()
572
- health["checks"]["view"] = "ok" if max_date else "no data"
573
- health["max_date"] = max_date.isoformat() if max_date else None
574
- except Exception as e:
575
- health["checks"]["view"] = f"error: {str(e)}"
576
- return health
577
-
578
- # Check export directory
579
- health["checks"]["export_dir"] = "ok" if EXPORT_DIR.exists() else "missing"
580
-
581
- # Overall status
582
- if all(v == "ok" or v == "no data" for v in health["checks"].values()):
583
- health["status"] = "healthy"
584
-
585
- return health
586
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
587
 
588
  if __name__ == "__main__":
589
- # Initialize on startup
590
- try:
591
- logger.info("Starting ALCO Dashboard application")
592
- initialize_database()
593
-
594
- # Perform health check
595
- health = health_check()
596
- logger.info(f"Health check: {health}")
597
-
598
- if health["status"] != "healthy":
599
- logger.warning("Application health check failed")
600
-
601
- except Exception as e:
602
- logger.critical(f"Application startup failed: {str(e)}")
603
- raise
 
1
  import os
2
+ import sys
3
  from datetime import datetime
4
  from pathlib import Path
5
+ from typing import Tuple, Any, Dict, List
 
 
 
6
 
7
  import duckdb
8
  import pandas as pd
9
  import numpy as np
10
  import matplotlib.pyplot as plt
11
  import gradio as gr
12
+ from pydantic import BaseModel
13
  from reportlab.lib.pagesizes import A4
14
  from reportlab.lib.units import mm
15
  from reportlab.pdfgen import canvas
16
 
 
 
 
 
 
 
 
 
 
17
  # -------------------------------------------------------------------
18
  # Basic configuration
19
  # -------------------------------------------------------------------
20
  APP_TITLE = "ALCO Liquidity & Interest-Rate Risk Dashboard"
21
+ TABLE_FQN = "my_db.main.masterdataset_v" # <- your source
22
+ VIEW_FQN = "my_db.main.positions_v" # <- normalized view we create
23
  EXPORT_DIR = Path("exports")
24
  EXPORT_DIR.mkdir(exist_ok=True)
25
 
 
 
26
 
27
  # -------------------------------------------------------------------
28
+ # MotherDuck connection
29
  # -------------------------------------------------------------------
30
+ def connect_md() -> duckdb.DuckDBPyConnection:
31
+ token = os.environ.get("MOTHERDUCK_TOKEN", "")
32
+ if not token:
33
+ raise RuntimeError("MOTHERDUCK_TOKEN is not set. Add it as a Space secret.")
34
+ try:
35
+ conn = duckdb.connect(f"md:?motherduck_token={token}")
36
+ return conn
37
+ except Exception as e:
38
+ print("ERROR: Unable to connect to MotherDuck:", e, file=sys.stderr)
39
+ raise
40
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  # -------------------------------------------------------------------
43
+ # Column discovery & dynamic SQL builders
44
  # -------------------------------------------------------------------
45
+ WANTED_COLS = [
46
+ "as_of_date",
47
+ "product",
48
+ "months",
49
+ "segments",
50
+ "currency",
51
+ "Portfolio_value",
52
+ "Interest_rate",
53
+ "days_to_maturity",
54
+ ]
55
+
56
+ PRODUCT_ASSETS = (
57
+ "loan", "overdraft", "advances", "bills", "bill", "tbond", "t-bond", "tbill",
58
+ "t-bill", "repo_asset", "assets"
59
+ )
60
+ PRODUCT_SOF = (
61
+ "fd", "term_deposit", "td", "savings", "current", "call", "repo_liab"
62
+ )
63
 
64
+
65
+ def discover_columns(conn: duckdb.DuckDBPyConnection, table_fqn: str) -> List[str]:
66
+ q = f"""
67
+ SELECT lower(column_name) AS col
68
+ FROM information_schema.columns
69
+ WHERE table_schema = split_part('{table_fqn}', '.', 2)
70
+ AND table_name = split_part('{table_fqn}', '.', 3)
71
  """
72
+ df = conn.execute(q).fetchdf()
73
+ return [c for c in df["col"].tolist()]
74
+
75
+
76
+ def build_view_sql(existing_cols: List[str]) -> str:
77
+ # Build a SELECT list that only references columns that exist; others are NULLs.
78
+ parts = []
79
+ for c in WANTED_COLS:
80
+ if c.lower() in existing_cols:
81
+ parts.append(c)
82
+ else:
83
+ # use sensible defaults for types
84
+ if c in ("Portfolio_value", "Interest_rate", "days_to_maturity", "months"):
85
+ parts.append(f"CAST(NULL AS DOUBLE) AS {c}")
86
+ else:
87
+ parts.append(f"CAST(NULL AS VARCHAR) AS {c}")
88
+
89
+ # Add bucket derived from product (which must exist; we hard-require product & Portfolio_value & days_to_maturity)
90
+ # If 'product' doesn't exist, the app can't work; guard above (we will assert later).
91
+ bucket_case = (
92
+ "CASE "
93
+ f"WHEN lower(product) IN ({','.join([f\"'{p}'\" for p in PRODUCT_SOF])}) THEN 'SoF' "
94
+ f"WHEN lower(product) IN ({','.join([f\"'{p}'\" for p in PRODUCT_ASSETS])}) THEN 'Assets' "
95
+ "ELSE 'Unknown' END AS bucket"
96
+ )
97
+
98
+ select_list = ",\n ".join(parts + [bucket_case])
99
+ return f"""
100
+ CREATE OR REPLACE VIEW {VIEW_FQN} AS
101
+ SELECT
102
+ {select_list}
103
+ FROM {TABLE_FQN};
104
  """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
 
106
 
107
+ def make_max_date_sql(has_asof: bool) -> str:
108
+ if not has_asof:
109
+ # No as_of_date column -> return N/A row
110
+ return "SELECT 'N/A'::VARCHAR AS d;"
111
+ return f"WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}) SELECT d FROM maxd;"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
 
113
 
114
+ def wrap_latest_date(sql_body: str, has_asof: bool) -> str:
115
+ """
116
+ If as_of_date exists, pin to latest date using a CTE and JOIN.
117
+ Otherwise, return the body directly from VIEW_FQN (no date pinning).
118
+ The sql_body must reference the view as 'p'.
119
+ """
120
+ if not has_asof:
121
+ # Remove any JOIN to maxd; just select from the view
122
+ return f"SELECT * FROM ({sql_body})"
123
+ else:
124
+ return f"SELECT * FROM ({sql_body})"
125
+
126
+
127
+ def build_kpi_sql(has_asof: bool) -> str:
128
+ if has_asof:
129
+ return f"""
130
+ WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}),
131
+ t1 AS (
132
+ SELECT p.bucket, SUM(p.Portfolio_value) AS amt
133
+ FROM {VIEW_FQN} p
134
+ JOIN maxd m ON p.as_of_date = m.d
135
+ WHERE p.days_to_maturity <= 1
136
+ GROUP BY p.bucket
137
+ )
138
+ SELECT
139
+ COALESCE(SUM(CASE WHEN bucket='Assets' THEN amt END),0) AS assets_t1,
140
+ COALESCE(SUM(CASE WHEN bucket='SoF' THEN amt END),0) AS sof_t1,
141
+ COALESCE(SUM(CASE WHEN bucket='Assets' THEN amt END),0)
142
+ - COALESCE(SUM(CASE WHEN bucket='SoF' THEN amt END),0) AS net_gap_t1
143
+ FROM t1;
144
+ """
145
+ else:
146
+ return f"""
147
+ WITH t1 AS (
148
+ SELECT p.bucket, SUM(p.Portfolio_value) AS amt
149
+ FROM {VIEW_FQN} p
150
+ WHERE p.days_to_maturity <= 1
151
+ GROUP BY p.bucket
152
+ )
153
+ SELECT
154
+ COALESCE(SUM(CASE WHEN bucket='Assets' THEN amt END),0) AS assets_t1,
155
+ COALESCE(SUM(CASE WHEN bucket='SoF' THEN amt END),0) AS sof_t1,
156
+ COALESCE(SUM(CASE WHEN bucket='Assets' THEN amt END),0)
157
+ - COALESCE(SUM(CASE WHEN bucket='SoF' THEN amt END),0) AS net_gap_t1
158
+ FROM t1;
159
+ """
160
+
161
+
162
+ def build_ladder_sql(has_asof: bool) -> str:
163
+ if has_asof:
164
+ return f"""
165
+ WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN})
166
+ SELECT
167
+ CASE
168
+ WHEN p.days_to_maturity <= 1 THEN 'T+1'
169
+ WHEN p.days_to_maturity BETWEEN 2 AND 7 THEN 'T+2..7'
170
+ WHEN p.days_to_maturity BETWEEN 8 AND 30 THEN 'T+8..30'
171
+ ELSE 'T+31+'
172
+ END AS time_bucket,
173
+ p.bucket,
174
+ SUM(p.Portfolio_value) AS amount
175
+ FROM {VIEW_FQN} p
176
+ JOIN maxd m ON p.as_of_date = m.d
177
+ GROUP BY 1,2
178
+ ORDER BY CASE time_bucket WHEN 'T+1' THEN 1 WHEN 'T+2..7' THEN 2 WHEN 'T+8..30' THEN 3 ELSE 4 END, p.bucket;
179
+ """
180
+ else:
181
+ return f"""
182
+ SELECT
183
+ CASE
184
+ WHEN p.days_to_maturity <= 1 THEN 'T+1'
185
+ WHEN p.days_to_maturity BETWEEN 2 AND 7 THEN 'T+2..7'
186
+ WHEN p.days_to_maturity BETWEEN 8 AND 30 THEN 'T+8..30'
187
+ ELSE 'T+31+'
188
+ END AS time_bucket,
189
+ p.bucket,
190
+ SUM(p.Portfolio_value) AS amount
191
+ FROM {VIEW_FQN} p
192
+ GROUP BY 1,2
193
+ ORDER BY CASE time_bucket WHEN 'T+1' THEN 1 WHEN 'T+2..7' THEN 2 WHEN 'T+8..30' THEN 3 ELSE 4 END, p.bucket;
194
+ """
195
+
196
+
197
+ def build_t1_group_sql(group_col: str, has_asof: bool) -> str:
198
+ if has_asof:
199
+ return f"""
200
+ WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN})
201
+ SELECT p.bucket, p.{group_col} AS grp, SUM(p.Portfolio_value) AS amount
202
+ FROM {VIEW_FQN} p
203
+ JOIN maxd m ON p.as_of_date = m.d
204
+ WHERE p.days_to_maturity <= 1
205
+ GROUP BY 1,2
206
+ ORDER BY p.bucket, amount DESC
207
+ LIMIT 50;
208
+ """
209
+ else:
210
+ return f"""
211
+ SELECT p.bucket, p.{group_col} AS grp, SUM(p.Portfolio_value) AS amount
212
+ FROM {VIEW_FQN} p
213
+ WHERE p.days_to_maturity <= 1
214
+ GROUP BY 1,2
215
+ ORDER BY p.bucket, amount DESC
216
+ LIMIT 50;
217
+ """
218
+
219
+
220
+ def build_irr_sql(has_asof: bool, has_months: bool, has_ir: bool) -> str:
221
+ # T_years uses days_to_maturity OR months (if present). y uses Interest_rate (if present).
222
+ t_years_expr = "CASE WHEN p.days_to_maturity IS NOT NULL THEN p.days_to_maturity/365.0"
223
+ if has_months:
224
+ t_years_expr += " WHEN p.months IS NOT NULL THEN p.months/12.0"
225
+ t_years_expr += " ELSE NULL END"
226
+
227
+ y_expr = "(p.Interest_rate / 100.0)" if has_ir else "NULL"
228
+
229
+ if has_asof:
230
+ base_from = f"FROM {VIEW_FQN} p JOIN maxd m ON p.as_of_date = m.d"
231
+ max_cte = f"WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}),"
232
+ else:
233
+ base_from = f"FROM {VIEW_FQN} p"
234
+ max_cte = "WITH"
235
+
236
+ return f"""
237
+ {max_cte}
 
 
 
 
 
 
 
 
 
 
238
  base AS (
239
  SELECT
240
  p.bucket,
241
  p.Portfolio_value AS pv,
242
+ {y_expr} AS y,
243
+ {t_years_expr} AS T_years
244
+ {base_from}
 
 
 
 
 
245
  WHERE p.Portfolio_value IS NOT NULL
 
246
  ),
247
  metrics AS (
248
  SELECT
249
  bucket,
250
  pv,
251
+ CASE WHEN T_years IS NULL THEN NULL
252
+ WHEN y IS NULL THEN T_years
253
+ ELSE T_years/(1.0+y) END AS dur_mod,
254
+ CASE WHEN T_years IS NULL THEN NULL
255
+ WHEN y IS NULL THEN T_years*(T_years+1.0)
256
+ ELSE (T_years*(T_years+1.0))/POWER(1.0+y,2) END AS convexity_approx,
257
+ CASE WHEN T_years IS NULL THEN NULL
258
+ ELSE pv * (CASE WHEN y IS NULL THEN T_years ELSE T_years/(1.0+y) END) * 0.0001 END AS dv01
 
 
 
 
 
 
 
259
  FROM base
260
  ),
261
  agg AS (
262
  SELECT
263
  bucket,
264
  SUM(pv) AS pv_sum,
265
+ SUM(pv * dur_mod) / NULLIF(SUM(pv),0) AS dur_mod_port,
266
  SUM(dv01) AS dv01_sum
267
  FROM metrics
268
  GROUP BY bucket
269
  )
270
  SELECT
271
+ COALESCE(MAX(CASE WHEN bucket='Assets' THEN pv_sum END),0) AS assets_pv,
272
+ COALESCE(MAX(CASE WHEN bucket='SoF' THEN pv_sum END),0) AS sof_pv,
273
+ COALESCE(MAX(CASE WHEN bucket='Assets' THEN dur_mod_port END),0) AS assets_dur_mod,
274
+ COALESCE(MAX(CASE WHEN bucket='SoF' THEN dur_mod_port END),0) AS sof_dur_mod,
275
+ COALESCE(MAX(CASE WHEN bucket='Assets' THEN dur_mod_port END),0)
276
+ - COALESCE(MAX(CASE WHEN bucket='SoF' THEN dur_mod_port END),0) AS duration_gap,
277
+ COALESCE(MAX(CASE WHEN bucket='Assets' THEN dv01_sum END),0)
278
+ - COALESCE(MAX(CASE WHEN bucket='SoF' THEN dv01_sum END),0) AS net_dv01;
279
+ """
280
+
281
+
282
+ def build_shock_sql(has_asof: bool, has_months: bool, has_ir: bool) -> str:
283
+ t_years_expr = "CASE WHEN p.days_to_maturity IS NOT NULL THEN p.days_to_maturity/365.0"
284
+ if has_months:
285
+ t_years_expr += " WHEN p.months IS NOT NULL THEN p.months/12.0"
286
+ t_years_expr += " ELSE NULL END"
287
+
288
+ y_expr = "(p.Interest_rate / 100.0)" if has_ir else "NULL"
289
+
290
+ if has_asof:
291
+ base_from = f"FROM {VIEW_FQN} p JOIN maxd m ON p.as_of_date = m.d"
292
+ max_cte = f"WITH maxd AS (SELECT max(as_of_date) AS d FROM {VIEW_FQN}),"
293
+ else:
294
+ base_from = f"FROM {VIEW_FQN} p"
295
+ max_cte = "WITH"
296
+
297
+ return f"""
298
+ {max_cte}
299
  base AS (
300
  SELECT
301
  p.bucket,
302
  p.Portfolio_value AS pv,
303
+ {y_expr} AS y,
304
+ {t_years_expr} AS T_years
305
+ {base_from}
 
 
 
 
 
 
 
306
  ),
307
  k AS (
308
  SELECT
309
+ bucket, pv,
310
+ CASE WHEN T_years IS NULL THEN NULL
311
+ WHEN y IS NULL THEN T_years
312
+ ELSE T_years/(1.0+y) END AS dur_mod,
313
+ CASE WHEN T_years IS NULL THEN NULL
314
+ WHEN y IS NULL THEN T_years*(T_years+1.0)
315
+ ELSE (T_years*(T_years+1.0))/POWER(1.0+y,2) END AS convexity_approx
 
 
 
 
 
316
  FROM base
317
+ ),
318
+ shock AS (
319
+ SELECT
320
+ bucket,
321
+ SUM((- pv * dur_mod * 0.01) + (0.5 * pv * convexity_approx * POWER(0.01,2))) AS dPV_up_100bp,
322
+ SUM((+ pv * dur_mod * 0.01) + (0.5 * pv * convexity_approx * POWER(-0.01,2))) AS dPV_dn_100bp
323
+ FROM k
324
+ GROUP BY bucket
325
  )
326
+ SELECT * FROM shock ORDER BY bucket;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
327
  """
 
 
 
 
 
 
 
328
 
329
 
330
  # -------------------------------------------------------------------
331
+ # Data class
332
  # -------------------------------------------------------------------
333
+ class DashboardResult(BaseModel):
334
+ as_of_date: str
335
+ assets_t1: float
336
+ sof_t1: float
337
+ net_gap_t1: float
338
+ ladder: pd.DataFrame
339
+ t1_by_month: pd.DataFrame
340
+ t1_by_segment: pd.DataFrame
341
+ t1_by_ccy: pd.DataFrame
342
+ irr: pd.DataFrame
343
+ shocks: pd.DataFrame
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
344
 
345
 
346
  # -------------------------------------------------------------------
347
+ # Query helpers
348
  # -------------------------------------------------------------------
349
+ def ensure_view(conn: duckdb.DuckDBPyConnection, existing_cols: List[str]) -> None:
350
+ # sanity: ensure mandatory columns exist in source table
351
+ mandatory = {"product", "portfolio_value", "days_to_maturity"}
352
+ if not mandatory.issubset(set(existing_cols)):
353
+ raise RuntimeError(
354
+ f"Source table {TABLE_FQN} must contain {mandatory}, "
355
+ f"found only: {existing_cols}"
356
+ )
357
+ conn.execute(build_view_sql(existing_cols))
358
+
359
+
360
+ def fetch_all(conn: duckdb.DuckDBPyConnection) -> DashboardResult:
361
+ existing = discover_columns(conn, TABLE_FQN)
362
+ ensure_view(conn, existing)
363
+
364
+ has_asof = "as_of_date" in existing
365
+ has_months = "months" in existing
366
+ has_segments = "segments" in existing
367
+ has_currency = "currency" in existing
368
+ has_ir = "interest_rate" in existing
369
+
370
+ # As-of date (or N/A)
371
+ asof_df = conn.execute(make_max_date_sql(has_asof)).fetchdf()
372
+ as_of = asof_df["d"].iloc[0]
373
+ as_of_str = (
374
+ pd.to_datetime(as_of).strftime("%Y-%m-%d")
375
+ if has_asof and not pd.isna(as_of)
376
+ else "N/A"
377
+ )
378
+
379
+ # KPIs & ladder
380
+ kpis = conn.execute(build_kpi_sql(has_asof)).fetchdf()
381
+ ladder = conn.execute(build_ladder_sql(has_asof)).fetchdf()
382
+
383
+ # Contributors (only if columns exist)
384
+ if has_months:
385
+ t1_m = conn.execute(build_t1_group_sql("months", has_asof)).fetchdf()
386
+ t1_m = t1_m.rename(columns={"grp": "months"})
387
+ else:
388
+ t1_m = pd.DataFrame(columns=["bucket", "months", "amount"])
389
+
390
+ if has_segments:
391
+ t1_s = conn.execute(build_t1_group_sql("segments", has_asof)).fetchdf()
392
+ t1_s = t1_s.rename(columns={"grp": "segments"})
393
+ else:
394
+ t1_s = pd.DataFrame(columns=["bucket", "segments", "amount"])
395
+
396
+ if has_currency:
397
+ t1_c = conn.execute(build_t1_group_sql("currency", has_asof)).fetchdf()
398
+ t1_c = t1_c.rename(columns={"grp": "currency"})
399
+ else:
400
+ t1_c = pd.DataFrame(columns=["bucket", "currency", "amount"])
401
+
402
+ # IRR & shocks (works even if Interest_rate/months are missing)
403
+ irr = conn.execute(build_irr_sql(has_asof, has_months, has_ir)).fetchdf()
404
+ shocks = conn.execute(build_shock_sql(has_asof, has_months, has_ir)).fetchdf()
405
+
406
+ return DashboardResult(
407
+ as_of_date=as_of_str,
408
+ assets_t1=float(kpis["assets_t1"].iloc[0]),
409
+ sof_t1=float(kpis["sof_t1"].iloc[0]),
410
+ net_gap_t1=float(kpis["net_gap_t1"].iloc[0]),
411
+ ladder=ladder,
412
+ t1_by_month=t1_m,
413
+ t1_by_segment=t1_s,
414
+ t1_by_ccy=t1_c,
415
+ irr=irr,
416
+ shocks=shocks,
417
+ )
418
 
419
 
420
+ # -------------------------------------------------------------------
421
+ # Plotting
422
+ # -------------------------------------------------------------------
423
+ def plot_ladder(df: pd.DataFrame):
424
+ pivot = df.pivot(index="time_bucket", columns="bucket", values="amount").fillna(0)
425
+ order = ["T+1", "T+2..7", "T+8..30", "T+31+"]
426
+ pivot = pivot.reindex(order)
427
+ fig, ax = plt.subplots(figsize=(7, 4))
428
+ assets = pivot.get("Assets", pd.Series([0] * len(pivot), index=pivot.index))
429
+ sof = pivot.get("SoF", pd.Series([0] * len(pivot), index=pivot.index))
430
+ ax.bar(pivot.index, assets, label="Assets")
431
+ ax.bar(pivot.index, -sof, bottom=0, label="SoF")
432
+ ax.axhline(0, linewidth=1)
433
+ ax.set_ylabel("LKR")
434
+ ax.set_title("Maturity Ladder (Assets vs SoF)")
435
+ ax.legend()
436
+ fig.tight_layout()
437
+ return fig
 
438
 
439
 
440
  # -------------------------------------------------------------------
441
+ # Exports
442
  # -------------------------------------------------------------------
443
+ def export_excel(res: DashboardResult) -> Path:
444
+ out = EXPORT_DIR / f"alco_report_{res.as_of_date}.xlsx"
445
+ with pd.ExcelWriter(out, engine="xlsxwriter") as xw:
446
+ pd.DataFrame({
447
+ "as_of_date": [res.as_of_date],
448
+ "assets_t1": [res.assets_t1],
449
+ "sof_t1": [res.sof_t1],
450
+ "net_gap_t1": [res.net_gap_t1],
451
+ }).to_excel(xw, index=False, sheet_name="kpis")
452
+ res.ladder.to_excel(xw, index=False, sheet_name="ladder")
453
+ res.t1_by_month.to_excel(xw, index=False, sheet_name="t1_by_month")
454
+ res.t1_by_segment.to_excel(xw, index=False, sheet_name="t1_by_segment")
455
+ res.t1_by_ccy.to_excel(xw, index=False, sheet_name="t1_by_ccy")
456
+ res.irr.to_excel(xw, index=False, sheet_name="irr")
457
+ res.shocks.to_excel(xw, index=False, sheet_name="shocks")
458
+ return out
459
+
460
+
461
+ def export_pdf(res: DashboardResult) -> Path:
462
+ out = EXPORT_DIR / f"alco_report_{res.as_of_date}.pdf"
463
+ c = canvas.Canvas(str(out), pagesize=A4)
464
+ W, H = A4
465
+ y = H - 20 * mm
466
+
467
+ def line(txt, size=11, dy=6 * mm):
468
+ nonlocal y
469
+ c.setFont("Helvetica", size)
470
+ c.drawString(20 * mm, y, txt)
471
+ y -= dy
472
+
473
+ line(APP_TITLE, 14, dy=8 * mm)
474
+ line(f"As of: {res.as_of_date}")
475
+ line(f"Assets T+1: {res.assets_t1:,.0f} LKR")
476
+ line(f"SoF T+1: {res.sof_t1:,.0f} LKR")
477
+ line(f"Net Gap T+1: {res.net_gap_t1:,.0f} LKR (negative = shortfall)")
478
+ y -= 4 * mm
479
+
480
+ if not res.irr.empty:
481
+ irr = res.irr.iloc[0]
482
+ line("Interest-Rate Risk (approx)", 12, dy=7 * mm)
483
+ line(f"Assets ModDur: {irr['assets_dur_mod']:.2f} | SoF ModDur: {irr['sof_dur_mod']:.2f}")
484
+ line(f"Duration Gap: {irr['duration_gap']:.2f}")
485
+ line(f"Net DV01: {irr['net_dv01']:,.0f} LKR/bp")
486
+
487
+ if not res.shocks.empty:
488
+ net_up = res.shocks["dPV_up_100bp"].sum()
489
+ net_dn = res.shocks["dPV_dn_100bp"].sum()
490
+ y -= 2 * mm
491
+ line(f"+100bp net ΔPV: {net_up:,.0f} LKR | -100bp net ΔPV: {net_dn:,.0f} LKR")
492
+
493
+ c.showPage()
494
+ c.save()
495
+ return out
496
 
497
 
498
  # -------------------------------------------------------------------
499
+ # Gradio UI
500
  # -------------------------------------------------------------------
501
+ def run_dashboard() -> Tuple[str, float, float, float, Any, Any, Any, Any, Any, Any, Any]:
502
+ conn = connect_md()
503
+ res = fetch_all(conn)
504
+ fig = plot_ladder(res.ladder)
505
+ excel_path = export_excel(res)
506
+ pdf_path = export_pdf(res)
507
+ return (
508
+ res.as_of_date,
509
+ res.assets_t1,
510
+ res.sof_t1,
511
+ res.net_gap_t1,
512
+ fig,
513
+ res.t1_by_month,
514
+ res.t1_by_segment,
515
+ res.t1_by_ccy,
516
+ res.irr,
517
+ res.shocks,
518
+ str(excel_path),
519
+ str(pdf_path),
520
+ )
521
+
522
+
523
+ with gr.Blocks(title=APP_TITLE) as demo:
524
+ gr.Markdown(
525
+ f"# {APP_TITLE}\n"
526
+ "*Source:* `my_db.main.masterdataset_v` `positions_v` | *Sign:* Assets=+ SoF=–"
527
+ )
528
+
529
+ with gr.Row():
530
+ btn = gr.Button("🔄 Refresh", variant="primary")
531
+
532
+ with gr.Row():
533
+ as_of = gr.Textbox(label="As of date", interactive=False)
534
+
535
+ with gr.Row():
536
+ k1 = gr.Number(label="Assets T+1 (LKR)", precision=0)
537
+ k2 = gr.Number(label="SoF T+1 (LKR)", precision=0)
538
+ k3 = gr.Number(label="Net Gap T+1 (LKR)", precision=0)
539
+
540
+ chart = gr.Plot(label="Maturity Ladder")
541
+
542
+ with gr.Row():
543
+ t1m = gr.Dataframe(label="T+1 by Tenor (months)")
544
+ t1s = gr.Dataframe(label="T+1 by Segment")
545
+
546
+ t1c = gr.Dataframe(label="T+1 by Currency")
547
+ irr = gr.Dataframe(label="Interest-Rate Risk (bucketed)")
548
+ shocks = gr.Dataframe(label="Parallel Shock ±100bp (bucketed)")
549
+
550
+ with gr.Row():
551
+ excel_file = gr.File(label="Excel export", interactive=False)
552
+ pdf_file = gr.File(label="PDF export", interactive=False)
553
+
554
+ btn.click(
555
+ fn=run_dashboard,
556
+ outputs=[
557
+ as_of,
558
+ k1,
559
+ k2,
560
+ k3,
561
+ chart,
562
+ t1m,
563
+ t1s,
564
+ t1c,
565
+ irr,
566
+ shocks,
567
+ excel_file,
568
+ pdf_file,
569
+ ],
570
+ )
571
 
572
  if __name__ == "__main__":
573
+ demo.launch()