Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
# This pickle file is produced by test runs:
epw_test.pkl

# Data maintenance scripts
check_links.py
fix_broken_urls.py

venv
.idea
cache-directory
Expand Down
1 change: 1 addition & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ pandas = "==2.2.0"
numpy = "==1.26.3"
dash-iconify = "*"
scipy = "==1.12.0"
kgcpy = "*"

[dev-packages]
cleanpy = "*"
Expand Down
1,540 changes: 911 additions & 629 deletions Pipfile.lock

Large diffs are not rendered by default.

Binary file modified assets/data/OneBuilding files.zip
Binary file not shown.
Binary file modified assets/data/one_building.csv
Binary file not shown.
5 changes: 4 additions & 1 deletion pages/lib/import_one_building_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ def import_kml_files(file_name):

data = []
for location in locations:
url_match = re.findall(r"<td>URL (.+?)<\/td>", location)
if not url_match:
continue
location_info = []
# lat
location_info.append(
Expand All @@ -26,7 +29,7 @@ def import_kml_files(file_name):
# url
location_info.append(
"<a href="
+ re.findall(r"<td>URL (.+?)<\/td>", location)[0]
+ url_match[0]
+ ' style="color: #fff">Climate.OneBuilding.Org</a>'
)
# description
Expand Down
2 changes: 1 addition & 1 deletion pages/lib/template_graphs.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ def yearly_profile(df, var, global_local, si_ip):
x=all_dates,
y=np.array(hi_rh) - np.array(lo_rh),
base=lo_rh,
name="humidity comfort band",
name="ASHRAE 160 humidity range",
marker_opacity=0.3,
marker_color="silver",
)
Expand Down
47 changes: 40 additions & 7 deletions pages/summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import pandas as pd
import dash_mantine_components as dmc
import plotly.graph_objects as go
import requests
from kgcpy import lookupCZ

from dash.exceptions import PreventUpdate
from dash_extensions.enrich import dcc, Output, Input, State, callback
Expand All @@ -24,6 +24,41 @@
)


KG_DESCRIPTIONS = {
"Af": "Tropical rainforest",
"Am": "Tropical monsoon",
"Aw": "Tropical savanna, dry winter",
"As": "Tropical savanna, dry summer",
"BWh": "Hot desert",
"BWk": "Cold desert",
"BSh": "Hot semi-arid steppe",
"BSk": "Cold semi-arid steppe",
"Csa": "Hot-summer Mediterranean",
"Csb": "Warm-summer Mediterranean",
"Csc": "Cold-summer Mediterranean",
"Cwa": "Monsoon-influenced humid subtropical",
"Cwb": "Subtropical highland",
"Cwc": "Cold subtropical highland",
"Cfa": "Humid subtropical, no dry season",
"Cfb": "Temperate oceanic",
"Cfc": "Subpolar oceanic",
"Dsa": "Hot-summer continental",
"Dsb": "Warm-summer continental",
"Dsc": "Subarctic",
"Dsd": "Extremely cold subarctic",
"Dwa": "Monsoon-influenced hot-summer continental",
"Dwb": "Monsoon-influenced warm-summer continental",
"Dwc": "Monsoon-influenced subarctic",
"Dwd": "Monsoon-influenced extremely cold subarctic",
"Dfa": "Hot-summer humid continental",
"Dfb": "Warm-summer humid continental",
"Dfc": "Subarctic boreal",
"Dfd": "Extremely cold subarctic boreal",
"ET": "Tundra",
"EF": "Ice cap",
}


dash.register_page(
__name__,
name=PageInfo.SUMMARY_NAME,
Expand Down Expand Up @@ -228,12 +263,10 @@ def update_location_info(ts, df, meta, si_ip):

climate_text = ""
try:
r = requests.get(
f"http://climateapi.scottpinkelman.com/api/v1/location/{meta[Variables.LAT.col_name]}/{meta[Variables.LON.col_name]}"
)
if r.status_code == 200:
j = r.json()["return_values"][0]
climate_text = f"Köppen-Geiger climate zone: {j['koppen_geiger_zone']}. {j['zone_description']}."
zone = lookupCZ(meta[Variables.LAT.col_name], meta[Variables.LON.col_name])
desc = KG_DESCRIPTIONS.get(zone, "")
if zone and desc:
climate_text = f"Köppen-Geiger climate zone: {zone}. {desc}."
except Exception:
pass

Expand Down
9 changes: 9 additions & 0 deletions tests/test_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ def test_location_info_loaded(page: Page):
"""Verify that location info section shows correct values"""
info_section = page.locator("#location-info")
expect(info_section).to_be_visible()
expect(info_section).not_to_have_attribute(
"data-dash-is-loading", "true", timeout=20000
)
Comment on lines +56 to +58
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor | ⚡ Quick win

not_to_have_attribute may pass before loading even starts — consider waiting for loading to begin first.

data-dash-is-loading="true" is only set once Dash's callback begins rendering; if the check runs before the attribute is ever attached (i.e. before the callback fires), the assertion passes immediately and subsequent text checks run against incomplete content. A more robust pattern is to first assert the attribute is "true" (i.e. loading started) and then assert it is no longer "true":

🛡️ Proposed fix
-    expect(info_section).not_to_have_attribute(
-        "data-dash-is-loading", "true", timeout=20000
-    )
+    expect(info_section).to_have_attribute(
+        "data-dash-is-loading", "true", timeout=10000
+    )
+    expect(info_section).not_to_have_attribute(
+        "data-dash-is-loading", "true", timeout=20000
+    )

The same pattern should be applied to lines 94–96 in test_unit_switch.

📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
expect(info_section).not_to_have_attribute(
"data-dash-is-loading", "true", timeout=20000
)
expect(info_section).to_have_attribute(
"data-dash-is-loading", "true", timeout=10000
)
expect(info_section).not_to_have_attribute(
"data-dash-is-loading", "true", timeout=20000
)
🤖 Prompt for AI Agents
Verify each finding against current code. Fix only still-valid issues, skip the
rest with a brief reason, keep changes minimal, and validate.

In `@tests/test_summary.py` around lines 56 - 58, The test currently uses
expect(info_section).not_to_have_attribute("data-dash-is-loading","true",...)
which can pass before loading begins; update the test to first wait for the
loading attribute to appear by asserting
expect(info_section).to_have_attribute("data-dash-is-loading","true",
timeout=...) and only after that assert
expect(info_section).not_to_have_attribute("data-dash-is-loading","true",
timeout=...); apply the same two-step pattern to the analogous checks in the
test_unit_switch block (lines referencing info_section in that test) so you wait
for loading to start then for it to finish.

expected_texts = [
"Location: Bologna Marconi AP, ITA",
"Longitude: 11.2969",
Expand Down Expand Up @@ -88,6 +91,12 @@ def test_unit_switch(page: Page):
ip_button.click(force=True)

info_section = page.locator("#location-info")
expect(info_section).to_have_attribute(
"data-dash-is-loading", "true", timeout=10000
)
expect(info_section).not_to_have_attribute(
"data-dash-is-loading", "true", timeout=20000
)
expect(info_section.get_by_text("58.0 °F")).to_be_visible()
expect(info_section.get_by_text("121.4 ft")).to_be_visible()
expect(info_section.get_by_text("kBtu/ft2")).to_be_visible()
Loading