Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixup #242

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open

Fixup #242

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions .github/workflows/build_pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v1
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: 3.7
- uses: actions/cache@v1
python-version: 3.8
- uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
Expand Down Expand Up @@ -76,7 +76,7 @@ jobs:
aws s3 cp app.zip "s3://$AWS_S3_CODE_BUCKET/$repo_slug.zip"
- name: Send build success notification
if: success()
uses: rtCamp/action-slack-notify@v2.0.0
uses: rtCamp/action-slack-notify@v2.2.0
env:
SLACK_MESSAGE: ${{ github.repository }} build ${{ github.run_number }} launched by ${{ github.actor }} has succeeded
SLACK_TITLE: Build Success
Expand All @@ -86,7 +86,7 @@ jobs:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
- name: Send build failure notification
if: failure()
uses: rtCamp/action-slack-notify@v2.0.0
uses: rtCamp/action-slack-notify@v2.2.0
env:
SLACK_COLOR: '#FF0000'
SLACK_MESSAGE: ${{ github.repository }} build ${{ github.run_number }} launched by ${{ github.actor }} has failed
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:3.8-slim-bullseye
FROM python:3.13-slim-bullseye

RUN apt-get update \
&& apt-get upgrade -y \
Expand Down
14 changes: 14 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,20 @@ git clone [email protected]:arup-group/elara.git

Once available locally, navigate to the folder to install Elara and its dependencies. Using a virtual environment is highly recommended.

### Mamba

Using [mamba](https://mamba.readthedocs.io/en/latest/index.html).

```{sh}
git clone [email protected]:arup-group/elara.git
cd elara
mamba install -n elara --file requirements.txt python=3.13
mamba activate elara
pip install --no-deps -e .
pytest
elara --help
```

### OSX

```{sh}
Expand Down
37 changes: 22 additions & 15 deletions elara/benchmarking.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from pandas.core import groupby
import pandas as pd
import os
import numpy as np
Expand Down Expand Up @@ -95,7 +94,7 @@ def build(self, resources: dict, write_path: Optional[str] = None) -> dict:
bm_df = pd.concat([benchmarks_df[self.value_field],
simulation_df[self.value_field]], axis=1)
bm_df.columns = self.output_value_fields
bm_df.dropna(0, inplace=True)
bm_df.dropna(axis=0, inplace=True)

self.plot_comparisons(bm_df)

Expand Down Expand Up @@ -314,7 +313,7 @@ def __init__(
**kwargs
)
self.groupby_person_attribute = groupby_person_attribute
self.simulation_name = f"trip_modes_all"
self.simulation_name = "trip_modes_all"
if groupby_person_attribute is not None:
self.logger.debug(
f"Found 'groupby_person_attribute': {groupby_person_attribute}")
Expand Down Expand Up @@ -348,7 +347,7 @@ def __init__(
**kwargs
)
self.groupby_person_attribute = groupby_person_attribute
self.simulation_name = f"trip_modes_all"
self.simulation_name = "trip_modes_all"
if groupby_person_attribute is not None:
self.logger.debug(
f"Found 'groupby_person_attribute': {groupby_person_attribute}")
Expand All @@ -372,7 +371,7 @@ def __init__(self, config, mode, **kwargs):
super().__init__(config, mode=mode, **kwargs)
destination_activities = kwargs.get("destination_activity_filters", [])
groupby_person_attribute = kwargs.get("groupby_person_attribute")
self.simulation_name = f"trip_activity_modes_all"
self.simulation_name = "trip_activity_modes_all"
for act in destination_activities:
self.simulation_name += f"_{act}"
if groupby_person_attribute is not None:
Expand All @@ -398,7 +397,7 @@ def __init__(self, config, mode, **kwargs):
super().__init__(config, mode=mode, **kwargs)
destination_activities = kwargs.get("destination_activity_filters", [])
groupby_person_attribute = kwargs.get("groupby_person_attribute")
self.simulation_name = f"trip_activity_modes_all"
self.simulation_name = "trip_activity_modes_all"
for act in destination_activities:
self.simulation_name += f"_{act}"
if groupby_person_attribute is not None:
Expand Down Expand Up @@ -434,7 +433,7 @@ def __init__(
**kwargs
)
self.groupby_person_attribute = groupby_person_attribute
self.simulation_name = f"plan_modes_all"
self.simulation_name = "plan_modes_all"
if groupby_person_attribute is not None:
self.logger.debug(
f"Found 'groupby_person_attribute': {groupby_person_attribute}")
Expand Down Expand Up @@ -468,7 +467,7 @@ def __init__(
**kwargs
)
self.groupby_person_attribute = groupby_person_attribute
self.simulation_name = f"plan_modes_all"
self.simulation_name = "plan_modes_all"
if groupby_person_attribute is not None:
self.logger.debug(
f"Found 'groupby_person_attribute': {groupby_person_attribute}")
Expand All @@ -492,7 +491,7 @@ def __init__(self, config, mode, **kwargs):
super().__init__(config, mode=mode, **kwargs)
destination_activities = kwargs.get("destination_activity_filters", [])
groupby_person_attribute = kwargs.get("groupby_person_attribute")
self.simulation_name = f"plan_activity_modes_all"
self.simulation_name = "plan_activity_modes_all"
for act in destination_activities:
self.simulation_name += f"_{act}"
if groupby_person_attribute is not None:
Expand All @@ -518,7 +517,7 @@ def __init__(self, config, mode, **kwargs):
super().__init__(config, mode=mode, **kwargs)
destination_activities = kwargs.get("destination_activity_filters", [])
groupby_person_attribute = kwargs.get("groupby_person_attribute")
self.simulation_name = f"plan_activity_modes_all"
self.simulation_name = "plan_activity_modes_all"
for act in destination_activities:
self.simulation_name += f"_{act}"
if groupby_person_attribute is not None:
Expand Down Expand Up @@ -653,7 +652,7 @@ def __init__(self, config, mode, benchmark_data_path=None, **kwargs) -> None:
if not links:
missing_counters += 1
self.logger.warning(
f"Benchmark data has no links - suggests error with Bench (i.e. MATSIM network has not matched to BM)."
"Benchmark data has no links - suggests error with Bench (i.e. MATSIM network has not matched to BM)."
)

# Check for number of missing BM links. Note this is a fault with the BM (ie missing links)
Expand Down Expand Up @@ -919,7 +918,7 @@ def __init__(self, config, mode, benchmark_data_path=None, **kwargs) -> None:
if not stops:
missing_counters += 1
self.logger.debug(
f"Benchmark data has no stop/s - suggests error with Bench (i.e. MATSIM network has not matched to BM)."
"Benchmark data has no stop/s - suggests error with Bench (i.e. MATSIM network has not matched to BM)."
)

# Check for number of missing BM stops. Note this is a fault with the BM (ie missing stops)
Expand Down Expand Up @@ -994,7 +993,7 @@ def build(self, resource: dict, write_path: Optional[str] = None) -> dict:
sim_result = np.array([0.0 for _ in range(len(bm_hours))])

# check if direction available
if not direction in model_results:
if direction not in model_results:
raise UserWarning(
f"Direction: {direction} not available in model results"
)
Expand Down Expand Up @@ -2311,8 +2310,16 @@ def merge_summary_stats(bm_results_summary):
for record in data:
record_type = record['source']

record.pop("score")
record.pop("source")
record.pop("score", None)
record.pop("source", None)
record.pop("mode", None)
record.pop("counter_id", None)
record.pop("line", None)
record.pop("o", None)
record.pop("d", None)
record.pop("direction", None)
record.pop("link_id", None)
# absolute trash, but it works

if record_type != "difference":
for measurement in list(record):
Expand Down
44 changes: 24 additions & 20 deletions elara/event_handlers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
import os
import pickle
from math import floor
from typing import Optional, Tuple, Union

Expand Down Expand Up @@ -231,7 +232,10 @@ def finalise(self):
del self.veh_occupancy
name = f"{str(self)}.pkl"
path = os.path.join(self.config.output_path, name)
nx.write_gpickle(self.graph, path)

with open(path, 'wb') as f:
pickle.dump(self.graph, f, pickle.HIGHEST_PROTOCOL)

del self.graph


Expand Down Expand Up @@ -435,7 +439,7 @@ def build(self, resources: dict, write_path: Optional[str] = None) -> None:
)
else:
self.logger.debug(f'Selecting links for mode:{self.mode}.')
self.elem_gdf = self.elem_gdf.loc[links, :]
self.elem_gdf = self.elem_gdf.loc[list(links), :]

self.elem_ids, self.elem_indices = self.generate_elem_ids(self.elem_gdf)

Expand Down Expand Up @@ -492,7 +496,7 @@ def finalise(self) -> None:
counts_df = pd.DataFrame(self.counts.flatten(), index=index)[0]
counts_df = counts_df.unstack(level='hour').sort_index()

counts_df['total'] = counts_df.sum(1)
counts_df['total'] = counts_df.sum(1, numeric_only=True)
counts_df = counts_df.reset_index().set_index('elem')

key = f"{self.name}_{self.groupby_person_attribute}"
Expand All @@ -510,7 +514,7 @@ def finalise(self) -> None:
totals_df = pd.DataFrame(
data=self.counts, index=self.elem_ids, columns=range(0, self.config.time_periods)
).sort_index()
totals_df['total'] = totals_df.sum(1)
totals_df['total'] = totals_df.sum(1, numeric_only=True)

del self.counts

Expand Down Expand Up @@ -590,7 +594,7 @@ def build(self, resources: dict, write_path: Optional[str] = None) -> None:
)
else:
self.logger.debug(f'Selecting links for mode:{self.mode}.')
self.elem_gdf = self.elem_gdf.loc[links, :]
self.elem_gdf = self.elem_gdf.loc[list(links), :]

self.elem_ids, self.elem_indices = self.generate_elem_ids(self.elem_gdf)

Expand Down Expand Up @@ -649,7 +653,7 @@ def finalise(self) -> None:
counts_df = counts_df.unstack(level='hour').sort_index()
# counts_df = counts_df.reset_index().set_index(['elem', self.groupby_person_attribute])

counts_df['total'] = counts_df.sum(1)
counts_df['total'] = counts_df.sum(1, numeric_only=True)
counts_df = counts_df.reset_index().set_index('elem')

key = f"{self.name}_{self.groupby_person_attribute}"
Expand All @@ -667,7 +671,7 @@ def finalise(self) -> None:
totals_df = pd.DataFrame(
data=self.counts, index=self.elem_ids, columns=range(0, self.config.time_periods)
).sort_index()
totals_df['total'] = totals_df.sum(1)
totals_df['total'] = totals_df.sum(1, numeric_only=True)

del self.counts

Expand Down Expand Up @@ -740,7 +744,7 @@ def build(self, resources: dict, write_path: Optional[str] = None) -> None:
)
else:
self.logger.debug(f'Selecting links for mode:{self.mode}.')
self.elem_gdf = self.elem_gdf.loc[links, :]
self.elem_gdf = self.elem_gdf.loc[list(links), :]

self.elem_ids, self.elem_indices = self.generate_elem_ids(self.elem_gdf)

Expand Down Expand Up @@ -1038,7 +1042,7 @@ def build(self, resources: dict, write_path: Optional[str] = None) -> None:
)
else:
self.logger.debug(f'Selecting links for mode:{self.mode}.')
self.elem_gdf = self.elem_gdf.loc[links, :]
self.elem_gdf = self.elem_gdf.loc[list(links), :]

self.elem_ids, self.elem_indices = self.generate_elem_ids(self.elem_gdf)

Expand Down Expand Up @@ -1150,7 +1154,7 @@ def finalise(self):
counts_df = pd.DataFrame(self.counts.flatten(), index=index)[0]
counts_df = counts_df.unstack(level='hour').sort_index()
counts_df = counts_df.reset_index().set_index(['elem', self.groupby_person_attribute])
counts_df['total'] = counts_df.sum(1)
counts_df['total'] = counts_df.sum(1, numeric_only=True)
counts_df = counts_df.reset_index().set_index('elem')
key = f"{self.name}_{self.groupby_person_attribute}"
counts_df = self.elem_gdf.join(
Expand All @@ -1168,7 +1172,7 @@ def finalise(self):
totals_df = pd.DataFrame(
data=self.counts, index=self.elem_ids, columns=range(0, self.config.time_periods)
).sort_index()
totals_df['total'] = totals_df.sum(1)
totals_df['total'] = totals_df.sum(1, numeric_only=True)

del self.counts

Expand Down Expand Up @@ -1435,7 +1439,7 @@ def build(self, resources: dict, write_path: Optional[str] = None) -> None:
)
else:
self.logger.debug(f'Filtering stops for mode:{self.mode}.')
self.elem_gdf = self.elem_gdf.loc[viable_stops, :]
self.elem_gdf = self.elem_gdf.loc[list(viable_stops), :]

self.elem_ids, self.elem_indices = self.generate_elem_ids(self.elem_gdf)

Expand Down Expand Up @@ -1527,7 +1531,7 @@ def finalise(self):
counts_df = pd.DataFrame(data.flatten(), index=index)[0]
counts_df = counts_df.unstack(level='hour').sort_index()
counts_df = counts_df.reset_index().set_index(['elem', self.groupby_person_attribute])
counts_df['total'] = counts_df.sum(1)
counts_df['total'] = counts_df.sum(1, numeric_only=True)
counts_df = counts_df.reset_index().set_index('elem')

# Create volume counts output
Expand All @@ -1544,7 +1548,7 @@ def finalise(self):
totals_df = pd.DataFrame(
data=data, index=self.elem_ids, columns=range(0, self.config.time_periods)
).sort_index()
totals_df['total'] = totals_df.sum(1)
totals_df['total'] = totals_df.sum(1, numeric_only=True)

del data

Expand Down Expand Up @@ -1621,7 +1625,7 @@ def build(self, resources: dict, write_path: Optional[str] = None) -> None:
)
else:
self.logger.debug(f'Filtering stops for mode:{self.mode}.')
self.elem_gdf = self.elem_gdf.loc[viable_stops, :]
self.elem_gdf = self.elem_gdf.loc[list(viable_stops), :]

self.elem_ids, self.elem_indices = self.generate_elem_ids(self.elem_gdf)

Expand Down Expand Up @@ -1751,7 +1755,7 @@ def finalise(self):
counts_df.index.name = n

counts_df = counts_df.reset_index().set_index(['origin', 'destination', str(self.groupby_person_attribute)])
counts_df['total'] = counts_df.sum(1)
counts_df['total'] = counts_df.sum(1, numeric_only=True)

counts_df['geometry'] = [
LineString([o, d]) for o, d in zip(counts_df.origin_geometry, counts_df.destination_geometry)
Expand All @@ -1769,7 +1773,7 @@ def finalise(self):
# calc sum across all recorded attribute classes
totals_df = counts_df.reset_index().groupby(
['origin', 'destination']
).sum().reset_index().set_index(['origin', 'destination'])
).sum(numeric_only=True).reset_index().set_index(['origin', 'destination'])

# Join stop data and build geometry
for n in ("origin", "destination"):
Expand Down Expand Up @@ -1867,7 +1871,7 @@ def build(self, resources: dict, write_path: Optional[str] = None) -> None:
)
else:
self.logger.debug(f'Filtering stops for mode:{self.mode}.')
self.elem_gdf = self.elem_gdf.loc[viable_stops, :]
self.elem_gdf = self.elem_gdf.loc[list(viable_stops), :]

self.elem_ids, self.elem_indices = self.generate_elem_ids(self.elem_gdf)

Expand Down Expand Up @@ -2001,7 +2005,7 @@ def finalise(self):

counts_df = counts_df.reset_index().set_index(names+['veh_counts'])
counts_df['route'] = counts_df.index.get_level_values('veh_id').map(self.veh_route)
counts_df['total'] = counts_df.sum(1)
counts_df['total'] = counts_df.sum(1, numeric_only=True)

counts_df['geometry'] = [LineString([o, d]) for o, d in zip(
counts_df.from_stop_geometry, counts_df.to_stop_geometry)]
Expand All @@ -2022,7 +2026,7 @@ def finalise(self):
self.result_dfs[key] = counts_df

# calc sum across all recorded attribute classes
totals_df = counts_df.reset_index().groupby(['from_stop', 'to_stop', 'veh_id', 'route', 'veh_counts']).sum()
totals_df = counts_df.reset_index().groupby(['from_stop', 'to_stop', 'veh_id', 'route', 'veh_counts']).sum(numeric_only=True)

# Join stop data and build geometry
for n in ("from_stop", "to_stop"):
Expand Down
Loading
Loading