Skip to content
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions parsers/IN.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,11 @@
from zoneinfo import ZoneInfo

import pandas as pd
import requests

Check failure on line 11 in parsers/IN.py

View workflow job for this annotation

GitHub Actions / Python / Formatting

Ruff (F401)

parsers/IN.py:11:8: F401 `requests` imported but unused
from bs4 import BeautifulSoup
from requests import Response, Session
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry

from electricitymap.contrib.lib.models.event_lists import (
ProductionBreakdownList,
Expand Down Expand Up @@ -415,7 +418,9 @@
cea_data_url = (
"https://cea.nic.in/wp-admin/admin-ajax.php?action=getpostsfordatatables"
)

r_all_data: Response = session.get(cea_data_url)

if r_all_data.status_code == 200:
all_data = r_all_data.json()["data"]
target_elem = [
Expand All @@ -427,6 +432,7 @@
if len(target_elem) > 0 and target_elem[0]["link"] != "file_not_found":
target_url = target_elem[0]["link"].split(": ")[0]
formatted_url = target_url.split("^")[0]

r: Response = session.get(formatted_url)
renewable_production = format_ren_production_data(
url=r.url, zone_key=zone_key, target_datetime=target_datetime
Expand Down Expand Up @@ -462,6 +468,16 @@

all_data_points = []
days_lookback_to_try = list(range(1, 8))

retries = Retry(
total=5,
backoff_factor=0.5,
allowed_methods=["GET"],
)

adapter = HTTPAdapter(max_retries=retries)
session.mount("https://", adapter)

for days_lookback in days_lookback_to_try:
_target_datetime = target_datetime - timedelta(days=days_lookback)

Expand Down
Loading