diff --git a/seed/static/seed/scss/style.scss b/seed/static/seed/scss/style.scss
index 86c63ba834..51724da3f7 100755
--- a/seed/static/seed/scss/style.scss
+++ b/seed/static/seed/scss/style.scss
@@ -4872,3 +4872,16 @@ ul.r-list {
border-color: #dd2c00;
}
}
+
+.or-text {
+ font-size: 1.3em;
+ text-align: center;
+}
+
+.modal-content-section {
+ padding: 1em;
+
+ &:not(:last-child) {
+ border-bottom: 1px solid #ddd;
+ }
+}
diff --git a/seed/templates/seed/_scripts.html b/seed/templates/seed/_scripts.html
index ccd52971d7..4a7b5edd3c 100644
--- a/seed/templates/seed/_scripts.html
+++ b/seed/templates/seed/_scripts.html
@@ -51,6 +51,7 @@
+
@@ -141,6 +142,7 @@
+
diff --git a/seed/tests/data/mappings/espm-single-mapping.csv b/seed/tests/data/mappings/espm-single-mapping.csv
new file mode 100644
index 0000000000..101b3df8bb
--- /dev/null
+++ b/seed/tests/data/mappings/espm-single-mapping.csv
@@ -0,0 +1,25 @@
+Raw Columns,units,SEED Table,SEED Columns
+How Many Buildings?,,PropertyState,building_count
+City/Municipality,,PropertyState,city
+Construction Status,,PropertyState,Construction Status
+Country,,PropertyState,Country
+Federal Agency/Department,,PropertyState,Federal Agency/Department
+GFA Units,,PropertyState,GFA Units
+Gross Floor Area,ft**2,PropertyState,gross_floor_area
+Irrigated Area,,PropertyState,Irrigated Area
+Irrigated Area Units,,PropertyState,Irrigated Area Units
+Is this an Institutional Property? (Applicable only for Canadian properties),,PropertyState,Is this an Institutional Property? (Applicable only for Canadian properties)
+Is this Property Owned or Operated by the US or Canadian Federal Government?,,PropertyState,Is this Property Owned or Operated by the US or Canadian Federal Government?
+Number of Buildings,,PropertyState,Number of Buildings
+Occupancy (%),,PropertyState,Occupancy
+Other State/Province,,PropertyState,Other State
+Parent Property ID,,PropertyState,Parent Property ID
+Parent Property Name (if Applicable),,PropertyState,Parent Property Name (if Applicable)
+Portfolio Manager ID,,PropertyState,pm_property_id
+Postal Code,,PropertyState,postal_code
+Property Name,,PropertyState,property_name
+Property Type - Self-Selected,,PropertyState,property_type
+State/Province,,PropertyState,state
+Street Address,,PropertyState,address_line_1
+Street Address 2,,PropertyState,address_line_2
+Year Built,,PropertyState,year_built
diff --git a/seed/tests/data/portfolio-manager-single-22482007.xlsx b/seed/tests/data/portfolio-manager-single-22482007.xlsx
new file mode 100644
index 0000000000..0317d2c7f3
Binary files /dev/null and b/seed/tests/data/portfolio-manager-single-22482007.xlsx differ
diff --git a/seed/tests/test_account_views.py b/seed/tests/test_account_views.py
index 8fb6c31b4a..497da3cff7 100644
--- a/seed/tests/test_account_views.py
+++ b/seed/tests/test_account_views.py
@@ -85,7 +85,6 @@ def test_dict_org(self):
'mapquest_api_key': '',
'geocoding_enabled': True,
'better_analysis_api_key': '',
- 'better_host_url': 'https://better-lbnl-staging.herokuapp.com',
'property_display_field': 'address_line_1',
'taxlot_display_field': 'address_line_1',
'display_meter_units': Organization._default_display_meter_units,
@@ -98,13 +97,18 @@ def test_dict_org(self):
'at_organization_token': '',
'audit_template_user': '',
'audit_template_password': '',
- 'at_host_url': 'https://api.labworks.org',
'salesforce_enabled': False,
'ubid_threshold': 1
}
org_payload = _dict_org(self.fake_request, [self.org])
+
self.assertEqual(len(org_payload), 1)
+ # pull out and test the URLs that can be configured differently based on the test environment.
+ better_url = org_payload[0].pop('better_host_url')
+ self.assertRegexpMatches(better_url, r'^https://.*better.*$')
+ at_url = org_payload[0].pop('at_host_url')
+ self.assertRegexpMatches(at_url, r'^https://.*labworks.*$|https://buildingenergyscore.energy.gov$')
self.assertDictEqual(org_payload[0], expected_single_org_payload)
# Now let's make sure that we pick up related buildings correctly.
@@ -124,10 +128,11 @@ def test_dict_org(self):
'name': self.cal_year_name,
'cycle_id': self.cycle.pk
}]
- self.assertDictEqual(
- _dict_org(self.fake_request, [self.org])[0],
- expected_single_org_payload
- )
+ org_payload_2 = _dict_org(self.fake_request, [self.org])[0]
+ # pop the urls again
+ org_payload_2.pop('better_host_url')
+ org_payload_2.pop('at_host_url')
+ self.assertDictEqual(org_payload_2, expected_single_org_payload)
def test_dict_org_w_member_in_parent_and_child(self):
"""What happens when a user has a role in parent and child."""
@@ -179,7 +184,6 @@ def test_dict_org_w_member_in_parent_and_child(self):
'mapquest_api_key': '',
'geocoding_enabled': True,
'better_analysis_api_key': '',
- 'better_host_url': 'https://better-lbnl-staging.herokuapp.com',
'property_display_field': 'address_line_1',
'taxlot_display_field': 'address_line_1',
'display_meter_units': Organization._default_display_meter_units,
@@ -192,7 +196,6 @@ def test_dict_org_w_member_in_parent_and_child(self):
'at_organization_token': '',
'audit_template_user': '',
'audit_template_password': '',
- 'at_host_url': 'https://api.labworks.org',
'salesforce_enabled': False,
'ubid_threshold': 1
}],
@@ -211,7 +214,6 @@ def test_dict_org_w_member_in_parent_and_child(self):
'mapquest_api_key': '',
'geocoding_enabled': True,
'better_analysis_api_key': '',
- 'better_host_url': 'https://better-lbnl-staging.herokuapp.com',
'property_display_field': 'address_line_1',
'taxlot_display_field': 'address_line_1',
'display_meter_units': Organization._default_display_meter_units,
@@ -224,13 +226,18 @@ def test_dict_org_w_member_in_parent_and_child(self):
'at_organization_token': '',
'audit_template_user': '',
'audit_template_password': '',
- 'at_host_url': 'https://api.labworks.org',
'salesforce_enabled': False,
'ubid_threshold': 1
}
org_payload = _dict_org(self.fake_request, Organization.objects.all())
+ # pop the better and at urls
+ org_payload[0].pop('better_host_url')
+ org_payload[0].pop('at_host_url')
+ org_payload[0]['sub_orgs'][0].pop('better_host_url')
+ org_payload[0]['sub_orgs'][0].pop('at_host_url')
+
self.assertEqual(len(org_payload), 2)
self.assertDictEqual(org_payload[0], expected_multiple_org_payload)
diff --git a/seed/tests/test_api_mixins.py b/seed/tests/test_api_mixins.py
index 4e4fdd0a08..9badca358e 100644
--- a/seed/tests/test_api_mixins.py
+++ b/seed/tests/test_api_mixins.py
@@ -374,18 +374,18 @@ def test_get_profile_id(self):
self.assertListEqual(columns['extra_data'], ['field_1'])
# no extra data
- columnlistprofile = self.column_list_factory.get_columnlistprofile(
+ column_list_profile = self.column_list_factory.get_columnlistprofile(
columns=['address_line_1', 'site_eui']
)
- columns = self.mixin_class.get_show_columns(self.org.id, columnlistprofile.id)
+ columns = self.mixin_class.get_show_columns(self.org.id, column_list_profile.id)
self.assertListEqual(columns['fields'], ['extra_data', 'id', 'address_line_1', 'site_eui'])
self.assertListEqual(columns['extra_data'], [])
# with extra data
- columnlistprofile = self.column_list_factory.get_columnlistprofile(
+ column_list_profile = self.column_list_factory.get_columnlistprofile(
columns=['address_line_1', 'site_eui', 'field_1']
)
- columns = self.mixin_class.get_show_columns(self.org.id, columnlistprofile.id)
+ columns = self.mixin_class.get_show_columns(self.org.id, column_list_profile.id)
self.assertListEqual(columns['fields'], ['extra_data', 'id', 'address_line_1', 'site_eui'])
self.assertListEqual(columns['extra_data'], ['field_1'])
diff --git a/seed/tests/test_portfoliomanager.py b/seed/tests/test_portfoliomanager.py
index 9a25ad1560..891d2dc9c4 100644
--- a/seed/tests/test_portfoliomanager.py
+++ b/seed/tests/test_portfoliomanager.py
@@ -15,6 +15,7 @@
import xmltodict
from django.test import TestCase
from django.urls import reverse_lazy
+from xlrd import open_workbook
from seed.landing.models import SEEDUser as User
from seed.utils.organizations import create_organization
@@ -87,7 +88,7 @@ def test_template_list_interface_no_username(self):
# status should be error
# message should have "missing username"
self.assertEqual(400, resp.status_code)
- data = json.loads(resp.content)
+ data = resp.json()
self.assertIn('status', data)
self.assertIn('message', data)
self.assertEqual('error', data['status'])
@@ -103,7 +104,7 @@ def test_template_list_interface_no_password(self):
# status should be error
# message should have "missing password"
self.assertEqual(400, resp.status_code)
- data = json.loads(resp.content)
+ data = resp.json()
self.assertIn('status', data)
self.assertIn('message', data)
self.assertEqual('error', data['status'])
@@ -120,7 +121,7 @@ def test_template_list_invalid_credentials(self):
# status should be error
# message should have "missing template"
self.assertEqual(400, resp.status_code)
- data = json.loads(resp.content)
+ data = resp.json()
self.assertIn('status', data)
self.assertIn('message', data)
self.assertEqual('error', data['status'])
@@ -209,7 +210,7 @@ def test_report_interface_no_username(self):
# status should be error
# message should have "missing username"
self.assertEqual(400, resp.status_code)
- data = json.loads(resp.content)
+ data = resp.json()
self.assertIn('status', data)
self.assertIn('message', data)
self.assertEqual('error', data['status'])
@@ -225,7 +226,7 @@ def test_report_interface_no_password(self):
# status should be error
# message should have "missing password"
self.assertEqual(400, resp.status_code)
- data = json.loads(resp.content)
+ data = resp.json()
self.assertIn('status', data)
self.assertIn('message', data)
self.assertEqual('error', data['status'])
@@ -241,7 +242,7 @@ def test_report_interface_no_template(self):
# status should be error
# message should have "missing template"
self.assertEqual(400, resp.status_code)
- data = json.loads(resp.content)
+ data = resp.json()
self.assertIn('status', data)
self.assertIn('message', data)
self.assertEqual('error', data['status'])
@@ -266,7 +267,7 @@ def test_report_invalid_credentials(self):
# status should be error
# message should have "missing template"
self.assertEqual(400, resp.status_code)
- data = json.loads(resp.content)
+ data = resp.json()
self.assertIn('status', data)
self.assertIn('message', data)
self.assertEqual('error', data['status'])
@@ -368,6 +369,8 @@ def test_report_generation_empty_child_template(self):
class PortfolioManagerReportSinglePropertyUploadTest(TestCase):
+ """Test case for downloading a report with a single building and saving
+ it to SEED's Dataset upload api."""
def setUp(self):
user_details = {
@@ -398,7 +401,7 @@ def setUp(self):
@pm_skip_test_check
def test_single_property_template_for_upload(self):
- # create a single property report with template
+ # create a single ESPM property report with template
template = {
"children": [],
"display_name": "SEED_Test - Single Property",
@@ -415,11 +418,11 @@ def test_single_property_template_for_upload(self):
)
self.assertEqual(200, report_response.status_code)
- property_info = json.loads(report_response.content)
+ property_info = report_response.json()
self.assertEqual(1, len(property_info['properties']))
self.assertIsInstance(property_info['properties'], list)
- # add report to dataset
+ # add report to SEED's dataset
response = self.client.post(
reverse_lazy('api:v3:upload-create-from-pm-import'),
json.dumps({
@@ -431,6 +434,70 @@ def test_single_property_template_for_upload(self):
self.assertEqual(200, response.status_code)
+class PortfolioManagerSingleReportXSLX(TestCase):
+ """Test downloading a single ESPM report in XSLX format."""
+
+ def setUp(self):
+ user_details = {
+ 'username': 'test_user@demo.com',
+ 'password': 'test_pass',
+ }
+ self.user = User.objects.create_superuser(
+ email='test_user@demo.com', **user_details
+ )
+ self.org, _, _ = create_organization(self.user)
+ self.client.login(**user_details)
+
+ self.pm_un = os.environ.get(PM_UN, False)
+ self.pm_pw = os.environ.get(PM_PW, False)
+ if not self.pm_un or not self.pm_pw:
+ self.fail('Somehow PM test was initiated without %s or %s in the environment' % (PM_UN, PM_PW))
+
+ self.output_dir = Path(__file__).parent.absolute() / 'output'
+ if not self.output_dir.exists():
+ os.mkdir(self.output_dir)
+
+ @pm_skip_test_check
+ def test_single_report_download(self):
+ # PM ID 22178850 is a more complete test case with meter data
+ pm_id = 22178850
+
+ # remove the file if it exists
+ new_file = self.output_dir / f"single_property_{pm_id}.xlsx"
+ if new_file.exists():
+ new_file.unlink()
+ self.assertFalse(new_file.exists())
+
+ pm = PortfolioManagerImport(self.pm_un, self.pm_pw)
+
+ content = pm.return_single_property_report(pm_id)
+ self.assertIsNotNone(content)
+ with open(new_file, 'wb') as file:
+ file.write(content)
+
+ self.assertTrue(new_file.exists())
+
+ # TODO: load the xlsx file and ensure that it has the right tabs
+ workbook = open_workbook(new_file)
+ self.assertIn('Property', workbook.sheet_names())
+ self.assertIn('Meters', workbook.sheet_names())
+ self.assertIn('Meter Entries', workbook.sheet_names())
+
+ # verify that the Property worksheet has the PM id in it
+ sheet = workbook.sheet_by_name('Property')
+ self.assertTrue(str(pm_id) in str(sheet._cell_values))
+
+ @pm_skip_test_check
+ def test_single_report_view(self):
+ pm_id = 22178850
+ response = self.client.post(
+ reverse_lazy('api:v3:portfolio_manager-download', args=[pm_id]),
+ json.dumps({"username": self.pm_un, "password": self.pm_pw}),
+ content_type='application/json',
+ )
+ self.assertEqual(200, response.status_code)
+
+
class PortfolioManagerReportParsingTest(TestCase):
"""Test the parsing of the resulting PM XML file. This is only for the
version 2 parsing"""
diff --git a/seed/tests/test_property_views.py b/seed/tests/test_property_views.py
index 010cb5ae28..b14d6572a6 100644
--- a/seed/tests/test_property_views.py
+++ b/seed/tests/test_property_views.py
@@ -29,8 +29,10 @@
from seed.lib.xml_mapping.mapper import default_buildingsync_profile_mappings
from seed.models import (
DATA_STATE_MAPPING,
+ DATA_STATE_MATCHING,
GREEN_BUTTON,
PORTFOLIO_METER_USAGE,
+ PORTFOLIO_RAW,
SEED_DATA_SOURCES,
BuildingFile,
Column,
@@ -47,6 +49,10 @@
TaxLotView
)
from seed.models.sensors import DataLogger, Sensor, SensorReading
+from seed.serializers.properties import (
+ PropertyStatePromoteWritableSerializer,
+ PropertyStateSerializer
+)
from seed.test_helpers.fake import (
FakeColumnFactory,
FakeColumnListProfileFactory,
@@ -93,6 +99,76 @@ def setUp(self):
self.column_list_factory = FakeColumnListProfileFactory(organization=self.org)
self.client.login(**user_details)
+ def test_create_property(self):
+ state = self.property_state_factory.get_property_state()
+ cycle_id = self.cycle.id
+
+ params = json.dumps({
+ "cycle_id": cycle_id,
+ "state": PropertyStateSerializer(state).data
+ })
+
+ url = reverse('api:v3:properties-list') + '?organization_id={}'.format(self.org.pk)
+ response = self.client.post(url, params, content_type='application/json')
+ self.assertEqual(response.status_code, 201)
+ self.assertEqual(response.json()['status'], 'success')
+
+ def test_create_property_in_diff_org(self):
+ state = self.property_state_factory.get_property_state()
+ cycle_id = self.cycle.id
+ user_2 = User.objects.create_superuser(
+ **{'username': 'test_user2@demo.com', 'password': 'test_pass', 'email': 'test_user2@demo.com'})
+ org_2, _, _ = create_organization(user_2)
+
+ # verify that user (1) can't post to user_2's org
+ params = json.dumps({
+ "cycle_id": cycle_id,
+ "state": PropertyStateSerializer(state).data
+ })
+ url = reverse('api:v3:properties-list') + '?organization_id={}'.format(org_2.pk)
+ response = self.client.post(url, params, content_type='application/json')
+ self.assertEqual(response.status_code, 403)
+ self.assertEqual(response.json()['detail'], 'You do not have permission to perform this action.')
+
+ def test_create_property_with_protected_fields(self):
+ state = self.property_state_factory.get_property_state()
+ state.normalized_address = '741 Evergreen Terrace'
+ state.data_state = 999
+ cycle_id = self.cycle.id
+
+ params = json.dumps({
+ "cycle_id": cycle_id,
+ "state": PropertyStateSerializer(state).data
+ })
+
+ url = reverse('api:v3:properties-list') + '?organization_id={}'.format(self.org.pk)
+ response = self.client.post(url, params, content_type='application/json')
+ self.assertEqual(response.status_code, 201)
+ self.assertEqual(response.json()['status'], 'success')
+
+ # verify that the protected fields were not overwritten
+ new_state_data = response.json()['view']['state']
+ self.assertNotEqual(new_state_data['normalized_address'], state.normalized_address)
+ self.assertNotEqual(new_state_data['data_state'], state.data_state)
+ self.assertEqual(new_state_data['data_state'], DATA_STATE_MATCHING)
+
+ # above was for spot checking, now look at the serializer and make sure that the
+ # protected column objects are read_only.
+ serializer = PropertyStatePromoteWritableSerializer(new_state_data)
+ protected_columns = list(set(Column.EXCLUDED_MAPPING_FIELDS + Column.COLUMN_EXCLUDE_FIELDS))
+ # go through each of the Column's class columns and ensure that the serializer is read only
+ # map the related object ids to the column names
+ protected_columns.pop(protected_columns.index('import_file'))
+ protected_columns.pop(protected_columns.index('extra_data')) # extra_data is allowed
+ protected_columns.append('import_file_id')
+ protected_columns.append('measures')
+ protected_columns.append('scenarios')
+ protected_columns.append('files')
+
+ for column in protected_columns:
+ self.assertIsNotNone(serializer.fields.get(column), f"Column {column} is not in the serializer")
+ self.assertTrue(serializer.fields[column].read_only, f"Column {column} is not read_only in the write serializer")
+
def test_get_and_edit_properties(self):
state = self.property_state_factory.get_property_state()
prprty = self.property_factory.get_property()
@@ -149,8 +225,9 @@ def test_upload_inventory_document_and_delete(self):
view = PropertyView.objects.create(
property=prprty, cycle=self.cycle, state=state
)
- location = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
- test_filepath = os.path.relpath(os.path.join(location, 'data', 'test-document.pdf'))
+
+ test_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data', 'test-document.pdf')
+
url = reverse('api:v3:properties-detail', args=[view.id]) + f'upload_inventory_document/?organization_id={self.org.pk}'
document = open(test_filepath, 'rb')
@@ -2132,3 +2209,84 @@ def test_property_meter_usage_can_filter_when_usages_span_a_single_month(self):
self.assertCountEqual(result_dict['readings'], expectation['readings'])
self.assertCountEqual(result_dict['column_defs'], expectation['column_defs'])
+
+
+class PropertyViewUpdateWithESPMTests(DataMappingBaseTestCase):
+ def setUp(self):
+ user_details = {
+ 'username': 'test_user@demo.com',
+ 'password': 'test_pass',
+ 'email': 'test_user@demo.com'
+ }
+ selfvars = self.set_up(
+ PORTFOLIO_RAW, user_details['username'], user_details['password']
+ )
+ self.user, self.org, self.import_file_1, self.import_record_1, self.cycle_1 = selfvars
+
+ # create the test factories
+ self.column_factory = FakeColumnFactory(organization=self.org)
+ self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
+ self.property_factory = FakePropertyFactory(organization=self.org)
+ self.property_state_factory = FakePropertyStateFactory(organization=self.org)
+ self.property_view_factory = FakePropertyViewFactory(organization=self.org)
+ self.column_list_factory = FakeColumnListProfileFactory(organization=self.org)
+
+ # log into the client
+ self.client.login(**user_details)
+
+ def test_update_property_view_with_espm(self):
+ """Simple test to verify that the property state is merged with an updated
+ ESPM download XLSX file."""
+ pm_property_id = '22482007'
+ pv = self.property_view_factory.get_property_view(
+ cycle=self.cycle_1, pm_property_id=pm_property_id
+ )
+ self.assertTrue(pv.state.pm_property_id, pm_property_id)
+
+ # save some of the pv state's data to verify merging
+ pv_city = pv.state.city
+ pv_address_line_1 = pv.state.address_line_1
+ pv_site_eui = pv.state.site_eui
+
+ mapping_filepath = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), 'data', 'mappings', 'espm-single-mapping.csv'
+ )
+
+ # need to upload the mappings for the ESPM data to a new profile
+ mapping_profile = ColumnMappingProfile.create_from_file(
+ mapping_filepath, self.org, 'ESPM', overwrite_if_exists=True
+ )
+
+ test_filepath = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), 'data', f'portfolio-manager-single-{pm_property_id}.xlsx'
+ )
+
+ url = reverse('api:v3:properties-update-with-espm', args=[pv.id])
+ url += f"?organization_id={self.org.id}&cycle_id={self.cycle_1.id}&mapping_profile_id={mapping_profile.id}"
+ doc = open(test_filepath, 'rb')
+ # need to encode the data as multipart form data since this is a PUT. A
+ # POST in the client defaults to multipart, so in a PUT we have to construct it.
+ response = self.client.put(
+ path=url,
+ data=encode_multipart(data=dict(
+ file=doc,
+ file_type='XLSX',
+ name=doc.name),
+ boundary=BOUNDARY),
+ content_type=MULTIPART_CONTENT
+ )
+ self.assertEqual(response.status_code, 200)
+
+ # now spot check that some of fields were updated
+ pv.refresh_from_db()
+ self.assertNotEqual(pv.state.city, pv_city)
+ self.assertNotEqual(pv.state.address_line_1, pv_address_line_1)
+ # site_eui should not have changed
+ self.assertEqual(pv.state.site_eui.magnitude, pv_site_eui)
+
+ # check that the values are what is in the XLSX file
+ self.assertEqual(pv.state.city, 'WASHINGTON')
+ self.assertEqual(pv.state.address_line_1, '2425 N STREET NW')
+
+ # verify that the property has meters too, which came from the XLSX file
+ self.assertEqual(pv.property.meters.count(), 2)
diff --git a/seed/tests/util.py b/seed/tests/util.py
index 7f2624cd0b..f505821866 100644
--- a/seed/tests/util.py
+++ b/seed/tests/util.py
@@ -89,14 +89,14 @@ def tearDown(self):
class DataMappingBaseTestCase(DeleteModelsTestCase):
"""Base Test Case Class to handle data import"""
- def set_up(self, import_file_source_type):
+ def set_up(self, import_file_source_type, user_name='test_user@demo.com', user_password='test_pass'):
# default_values
import_file_data_state = getattr(self, 'import_file_data_state', DATA_STATE_IMPORT)
- if not User.objects.filter(username='test_user@demo.com').exists():
- user = User.objects.create_user('test_user@demo.com', password='test_pass')
+ if not User.objects.filter(username=user_name).exists():
+ user = User.objects.create_user(user_name, password=user_password)
else:
- user = User.objects.get(username='test_user@demo.com')
+ user = User.objects.get(username=user_name)
org, _, _ = create_organization(user, "test-organization-a")
diff --git a/seed/utils/api_schema.py b/seed/utils/api_schema.py
index 44409894f3..9013990c41 100644
--- a/seed/utils/api_schema.py
+++ b/seed/utils/api_schema.py
@@ -16,6 +16,7 @@ class AutoSchemaHelper(SwaggerAutoSchema):
'string': openapi.TYPE_STRING,
'boolean': openapi.TYPE_BOOLEAN,
'integer': openapi.TYPE_INTEGER,
+ 'object': openapi.TYPE_OBJECT,
}
@classmethod
diff --git a/seed/views/v3/analyses.py b/seed/views/v3/analyses.py
index c6982eece0..a126ebf32c 100644
--- a/seed/views/v3/analyses.py
+++ b/seed/views/v3/analyses.py
@@ -369,7 +369,7 @@ def get_counts(field_name):
property_types = get_counts('extra_data__Largest Property Use Type')
year_built = get_counts('year_built')
energy = get_counts('site_eui')
- sqftage = get_counts('gross_floor_area')
+ square_footage = get_counts('gross_floor_area')
from collections import defaultdict
@@ -443,16 +443,16 @@ def get_counts(field_name):
e[f['site_eui']] += f['count']
energy_list2 = [{'site_eui': site_eui, 'percentage': count / views.count() * 100} for site_eui, count in e.items()]
- sqftage_list = []
- for i in sqftage:
+ square_footage_list = []
+ for i in square_footage:
dict = i.copy()
for k, v in i.items():
if isinstance(v, Quantity):
dict[k] = v.to(ureg.feet**2).magnitude
- sqftage_list.append(dict)
+ square_footage_list.append(dict)
- sqftage_agg = []
- for record in sqftage_list:
+ square_footage_agg = []
+ for record in square_footage_list:
dict = record.copy()
if isinstance(record['gross_floor_area'], float):
if 0 < record['gross_floor_area'] <= 1000:
@@ -475,12 +475,12 @@ def get_counts(field_name):
dict['gross_floor_area'] = "500,000-1,000,000"
else:
dict['gross_floor_area'] = "> 1,000,000"
- sqftage_agg.append(dict)
+ square_footage_agg.append(dict)
g = defaultdict(int)
- for h in sqftage_agg:
+ for h in square_footage_agg:
g[h['gross_floor_area']] += h['count']
- sqftage_list2 = [{'gross_floor_area': gross_floor_area, 'percentage': count / views.count() * 100} for gross_floor_area, count in g.items()]
+ square_footage_list2 = [{'gross_floor_area': gross_floor_area, 'percentage': count / views.count() * 100} for gross_floor_area, count in g.items()]
extra_data_list = []
for data in states.values_list('extra_data', flat=True):
@@ -509,7 +509,7 @@ def get_counts(field_name):
'property_types': property_types,
'year_built': year_built_list,
'energy': energy_list2,
- 'square_footage': sqftage_list2
+ 'square_footage': square_footage_list2
})
@swagger_auto_schema(manual_parameters=[
diff --git a/seed/views/v3/columns.py b/seed/views/v3/columns.py
index c51e7701b0..2508fb903e 100644
--- a/seed/views/v3/columns.py
+++ b/seed/views/v3/columns.py
@@ -115,7 +115,7 @@ def list(self, request):
@api_endpoint_class
@ajax_request_class
def create(self, request):
- self.get_organization(self.request)
+ org_id = self.get_organization(self.request)
table_name = self.request.data.get("table_name")
if table_name != "PropertyState" and table_name != "TaxLotState":
@@ -125,6 +125,9 @@ def create(self, request):
}, status=status.HTTP_400_BAD_REQUEST)
try:
+ # set request data organization_id to org_id just in case it is not set or incorrectly set
+ self.request.data['organization_id'] = org_id
+
new_column = Column.objects.create(
is_extra_data=True,
**self.request.data
diff --git a/seed/views/v3/portfolio_manager.py b/seed/views/v3/portfolio_manager.py
index 7832c2cbfb..c25e6032e2 100644
--- a/seed/views/v3/portfolio_manager.py
+++ b/seed/views/v3/portfolio_manager.py
@@ -7,10 +7,11 @@
import json
import logging
import time
+from datetime import datetime
import requests
import xmltodict
-from django.http import JsonResponse
+from django.http import HttpResponse, JsonResponse
from drf_yasg.utils import swagger_auto_schema
from rest_framework import serializers, status
from rest_framework.decorators import action
@@ -31,7 +32,7 @@ class PortfolioManagerSerializer(serializers.Serializer):
class PortfolioManagerViewSet(GenericViewSet):
"""
- This viewset contains two API views: /template_list/ and /report/ that are used to interface SEED with ESPM
+ This ViewSet contains two API views: /template_list/ and /report/ that are used to interface SEED with ESPM
"""
serializer_class = PortfolioManagerSerializer
@@ -100,7 +101,8 @@ def template_list(self, request):
'password': 'string',
'template': {
'[copy information from template_list]': 'string'
- }
+ },
+ 'report_format': 'string'
},
description='ESPM account credentials.',
required=['username', 'password']
@@ -146,9 +148,13 @@ def report(self, request):
{'status': 'error', 'message': 'Invalid call to PM worker: missing template for PM account'},
status=status.HTTP_400_BAD_REQUEST
)
+
username = request.data['username']
password = request.data['password']
template = request.data['template']
+ # report format defaults to XML if not provided
+ report_format = request.data.get('report_format', 'XML')
+
pm = PortfolioManagerImport(username, password)
try:
try:
@@ -162,14 +168,34 @@ def report(self, request):
status=status.HTTP_400_BAD_REQUEST
)
if template['z_seed_child_row']:
- content = pm.generate_and_download_child_data_request_report(template)
+ content = pm.generate_and_download_child_data_request_report(template, report_format)
else:
- content = pm.generate_and_download_template_report(template)
+ content = pm.generate_and_download_template_report(template, report_format)
except PMExcept as pme:
_log.debug("%s: %s" % (str(pme), str(template)))
return JsonResponse({'status': 'error', 'message': str(pme)}, status=status.HTTP_400_BAD_REQUEST)
+
+ if report_format == 'EXCEL':
+ try:
+ # return the excel file
+ filename = 'pm_report_export.xlsx'
+ response = HttpResponse(
+ content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(filename)
+ response.write(content)
+ return response
+
+ except Exception as e:
+ _log.debug("ERROR downloading EXCEL report: %s" % str(e))
+ return JsonResponse(
+ {'status': 'error', 'message': 'Malformed XML from template download'},
+ status=status.HTTP_400_BAD_REQUEST
+ )
+
+ # rest is for XML reports
try:
content_object = xmltodict.parse(content, dict_constructor=dict)
+
except Exception: # catch all because xmltodict doesn't specify a class of Exceptions
_log.debug("Malformed XML from template download: %s" % str(content))
return JsonResponse(
@@ -205,15 +231,67 @@ def report(self, request):
_log.debug("%s: %s" % (e, str(request.data)))
return JsonResponse({'status': 'error', 'message': e}, status=status.HTTP_400_BAD_REQUEST)
+ @swagger_auto_schema(
+ manual_parameters=[
+ AutoSchemaHelper.query_integer_field('id', True, 'ID of the ESPM Property to download')
+ ],
+ request_body=AutoSchemaHelper.schema_factory(
+ {
+ 'username': 'string',
+ 'password': 'string',
+ 'filename': 'string',
+ },
+ description='ESPM account credentials.',
+ required=['username', 'password']
+ ),
+ )
+ @action(detail=True, methods=['POST'])
+ def download(self, request, pk):
+ """Download a single property report from Portfolio Manager. The PK is the
+ PM property ID that is on ESPM"""
+ if 'username' not in request.data:
+ _log.debug("Invalid call to PM worker: missing username for PM account: %s" % str(request.data))
+ return JsonResponse(
+ {'status': 'error', 'message': 'Invalid call to PM worker: missing username for PM account'},
+ status=status.HTTP_400_BAD_REQUEST
+ )
+ if 'password' not in request.data:
+ _log.debug("Invalid call to PM worker: missing password for PM account: %s" % str(request.data))
+ return JsonResponse(
+ {'status': 'error', 'message': 'Invalid call to PM worker: missing password for PM account'},
+ status=status.HTTP_400_BAD_REQUEST
+ )
+
+ username = request.data['username']
+ password = request.data['password']
+ if 'filename' not in request.data:
+ filename = f'pm_{pk}_{datetime.strftime(datetime.now(), "%Y%m%d_%H%M%S")}.xlsx'
+ else:
+ filename = request.data['filename']
+
+ pm = PortfolioManagerImport(username, password)
+ try:
+ content = pm.return_single_property_report(pk)
+
+ # return the excel file as the HTTP response
+ response = HttpResponse(
+ content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(filename)
+ response.write(content)
+ return response
+ except PMExcept as pme:
+ _log.debug(f"{str(pme)}: PM Property ID {pk}")
+ return JsonResponse({'status': 'error', 'message': str(pme)}, status=status.HTTP_400_BAD_REQUEST)
+
+
+# TODO: Move this object to /seed/utils/portfolio_manager.py
class PortfolioManagerImport(object):
- """
- This class is essentially a wrapper around the ESPM login/template/report operations
+ """This class is essentially a wrapper around the ESPM login/template/report operations
"""
def __init__(self, m_username, m_password):
- """
- To instantiate this class, provide ESPM username and password. Currently, this constructor doesn't do anything
+ """To instantiate this class, provide ESPM username and password. Currently, this constructor doesn't do anything
except store the credentials.
:param m_username: The ESPM username
@@ -231,9 +309,10 @@ def __init__(self, m_username, m_password):
# The root URL for downloading the report, code will add the template ID and the XML
self.DOWNLOAD_REPORT_URL = "https://portfoliomanager.energystar.gov/pm/reports/download"
+ self.DOWNLOAD_SINGLE_PROPERTY_REPORT_URL = "https://portfoliomanager.energystar.gov/pm/property"
+
def login_and_set_cookie_header(self):
- """
- This method calls out to ESPM to perform a login operation and get a session authentication token. This token
+ """This method calls out to ESPM to perform a login operation and get a session authentication token. This token
is then stored in the proper form to allow authenticated calls into ESPM.
:return: None
@@ -266,8 +345,7 @@ def login_and_set_cookie_header(self):
}
def get_list_of_report_templates(self):
- """
- New method to support update to ESPM
+ """New method to support update to ESPM
:return: Returns a list of template objects. All rows will have a z_seed_child_row key that is False for main
rows and True for child rows
@@ -315,7 +393,7 @@ def get_list_of_report_templates(self):
raise PMExcept('Unsuccessful response from child row template lookup; aborting.')
try:
# the data are now in the string of the data key of the returned dictionary with an excessive amount of
- # escaped doublequotes.
+ # escaped double quotes.
# e.g., response = {"data": "{"customReportsData":"..."}"}
decoded = json.loads(children_response.text) # .encode('utf-8').decode('unicode_escape')
@@ -351,11 +429,45 @@ def get_template_by_name(templates, template_name):
_log.debug("Desired report name found, template info: " + json.dumps(matched_template, indent=2))
return matched_template
- def generate_and_download_template_report(self, matched_template):
+ # TODO: Is there a need to call just this instead of generate_and_download...?
+ def update_template_report(self, template, start_month, start_year, end_month, end_year, property_ids):
+ """This method calls out to ESPM to (re)generate a specific template
+
+ :param template: A specific template object
+ :param start_month: reporting period start month
+ :param start_year: reporting period start year
+ :param end_month: reporting period end month
+ :param end_year: reporting period end year
+ :property_ids: list of property ids to include in report
+ :return: TODO
+ """
+ # login if needed
+ if not self.authenticated_headers:
+ self.login_and_set_cookie_header()
+
+ template_report_id = template['id']
+ update_report_url = 'https://portfoliomanager.energystar.gov/pm/reports/generateData/' + str(template_report_id)
+
+ new_authenticated_headers = self.authenticated_headers.copy()
+ new_authenticated_headers['Content-Type'] = 'application/x-www-form-urlencoded'
+
+ try:
+ response = requests.post(update_report_url, headers=self.authenticated_headers)
+ except requests.exceptions.SSLError:
+ raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.')
+ if not response.status_code == status.HTTP_200_OK:
+ raise PMExcept('Unsuccessful response from POST to update report; aborting.')
+ _log.debug('Triggered report update,\n status code=' + str(
+ response.status_code) + '\n response headers=' + str(
+ response.headers))
+
+ return response.content
+
+ def generate_and_download_template_report(self, matched_template, report_format='XML'):
"""
This method calls out to ESPM to trigger generation of a report for the supplied template. The process requires
calling out to the generateData/ endpoint on ESPM, followed by a waiting period for the template status to be
- updated to complete. Once complete, a download URL allows download of the report in XML format.
+ updated to complete. Once complete, a download URL allows download of the report in XML or EXCEL format.
This response content can be enormous, so ...
TODO: Evaluate whether we should just download this XML to file here. It would require re-reading the file
@@ -402,7 +514,7 @@ def generate_and_download_template_report(self, matched_template):
template_objects = response.json()['reportTabData']
for t in template_objects:
- if 'id' in t and t['id'] == matched_template['id']:
+ if 'id' in t and t['id'] == template_report_id:
this_matched_template = t
break
else:
@@ -423,18 +535,18 @@ def generate_and_download_template_report(self, matched_template):
# Finally we can download the generated report
try:
- response = requests.get(self.download_url(matched_template['id']), headers=self.authenticated_headers)
+ response = requests.get(self.download_url(template_report_id, report_format), headers=self.authenticated_headers)
except requests.exceptions.SSLError:
raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.')
if not response.status_code == status.HTTP_200_OK:
error_message = 'Unsuccessful response from GET trying to download generated report;'
error_message += ' Generated report name: ' + matched_template['name'] + ';'
- error_message += ' Tried to download report from URL: ' + self.download_url(matched_template['id']), + ';'
+ error_message += ' Tried to download report from URL: ' + self.download_url(template_report_id), + ';'
error_message += ' Returned with a status code = ' + response.status_code + ';'
raise PMExcept(error_message)
return response.content
- def generate_and_download_child_data_request_report(self, matched_data_request):
+ def generate_and_download_child_data_request_report(self, matched_data_request, report_format='XML'):
"""
Updated for recent update of ESPM
@@ -454,7 +566,7 @@ def generate_and_download_child_data_request_report(self, matched_data_request):
# Generate the url to download this file
try:
- response = requests.get(self.download_url(matched_data_request["id"]), headers=self.authenticated_headers, allow_redirects=True)
+ response = requests.get(self.download_url(matched_data_request["id"], report_format), headers=self.authenticated_headers, allow_redirects=True)
except requests.exceptions.SSLError:
raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.')
@@ -463,6 +575,41 @@ def generate_and_download_child_data_request_report(self, matched_data_request):
return response.content
+ def return_single_property_report(self, pm_property_id: int):
+ """Return (in memory) a single property report from ESPM based on the passed
+ ESPM Property ID (SEED calls this the pm_property_id). This method returns
+ the XLSX file in memory with all the tabs for the single property.
+
+ This method differs from the others in that this it does not need to know
+ the template of the report, it is simply the entire ESPM record (meters and all).
+
+ Args:
+ pm_property_id (int): The ESPM Property ID to download.
+
+ Returns:
+ str: Content of an XLSX file which will need to be persisted
+ """
+ # login if needed
+ if not self.authenticated_headers:
+ self.login_and_set_cookie_header()
+
+ # Generate the url to download this file
+ try:
+ response = requests.get(
+ self.download_url_single_report(pm_property_id),
+ headers=self.authenticated_headers,
+ allow_redirects=True
+ )
+
+ if response.status_code == status.HTTP_200_OK:
+
+ return response.content
+ else:
+ raise PMExcept('Unsuccessful response from GET trying to download single report; aborting.')
+
+ except requests.exceptions.SSLError:
+ raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.')
+
def _parse_properties_v1(self, xml):
"""Parse the XML (in dict format) response from the ESPM API and return a list of
properties. This version was implemented prior to 02/13/2023
@@ -532,6 +679,19 @@ def _flatten_property_metrics(pm):
return True, return_data
- def download_url(self, template_id):
- """helper method to assemble the download url for a given template id"""
- return f'{self.DOWNLOAD_REPORT_URL}/{template_id}/XML?testEnv=false&filterResponses=false'
+ def download_url(self, template_id, report_format='XML'):
+ """helper method to assemble the download url for a given template id. Default format is XML"""
+ return f"{self.DOWNLOAD_REPORT_URL}/{template_id}/{report_format}?testEnv=false&filterResponses=false"
+
+ def download_url_single_report(self, pm_property_id: int) -> str:
+ """helper method to assemble the download url for a single property report.
+
+ Args:
+ pm_property_id (int): PM Property ID to download
+
+ Returns:
+ str: URL
+ """
+ url = f"{self.DOWNLOAD_SINGLE_PROPERTY_REPORT_URL}/{pm_property_id}/download/{pm_property_id}.xlsx"
+ _log.debug(f"ESPM single property download URL is {url}")
+ return url
diff --git a/seed/views/v3/properties.py b/seed/views/v3/properties.py
index 3bf53d1437..34d56cfe04 100644
--- a/seed/views/v3/properties.py
+++ b/seed/views/v3/properties.py
@@ -6,6 +6,8 @@
import os
from collections import namedtuple
+from django.conf import settings
+from django.core.files.storage import FileSystemStorage
from django.db.models import Q, Subquery
from django.http import HttpResponse, JsonResponse
from django_filters import CharFilter, DateFilter
@@ -17,16 +19,26 @@
from rest_framework.renderers import JSONRenderer
from seed.building_sync.building_sync import BuildingSync
+from seed.data_importer import tasks
+from seed.data_importer.match import save_state_match
+from seed.data_importer.meters_parser import MetersParser
+from seed.data_importer.models import ImportFile, ImportRecord
+from seed.data_importer.tasks import _save_pm_meter_usage_data_task
from seed.data_importer.utils import kbtu_thermal_conversion_factors
from seed.decorators import ajax_request_class
from seed.hpxml.hpxml import HPXML
+from seed.lib.progress_data.progress_data import ProgressData
from seed.lib.superperms.orgs.decorators import has_perm_class
from seed.models import (
+ AUDIT_USER_CREATE,
AUDIT_USER_EDIT,
+ DATA_STATE_MAPPING,
DATA_STATE_MATCHING,
MERGE_STATE_DELETE,
MERGE_STATE_MERGED,
MERGE_STATE_NEW,
+ PORTFOLIO_RAW,
+ SEED_DATA_SOURCES,
Analysis,
BuildingFile,
Column,
@@ -36,6 +48,7 @@
InventoryDocument,
Meter,
Note,
+ Organization,
Property,
PropertyAuditLog,
PropertyMeasure,
@@ -50,6 +63,7 @@
from seed.serializers.pint import PintJSONEncoder
from seed.serializers.properties import (
PropertySerializer,
+ PropertyStatePromoteWritableSerializer,
PropertyStateSerializer,
PropertyViewAsStateSerializer,
PropertyViewSerializer,
@@ -1012,6 +1026,139 @@ def retrieve(self, request, pk=None):
else:
return JsonResponse(result, status=status.HTTP_404_NOT_FOUND)
+ @swagger_auto_schema(
+ manual_parameters=[
+ AutoSchemaHelper.query_org_id_field(),
+ ],
+ request_body=AutoSchemaHelper.schema_factory(
+ {
+ 'cycle_id': 'integer',
+ 'state': 'object',
+ },
+ required=['cycle_id', 'state']
+ ),
+ )
+ @api_endpoint_class
+ @ajax_request_class
+ @has_perm_class('can_modify_data')
+ def create(self, request):
+ """
+ Create a propertyState and propertyView via promote for given cycle
+ """
+ org_id = self.get_organization(self.request)
+ data = request.data
+ # get state data
+ property_state_data = data.get('state', None)
+ cycle_pk = data.get('cycle_id', None)
+
+ if cycle_pk is None:
+ return JsonResponse({
+ 'status': 'error',
+ 'message': 'Missing required parameter cycle_id',
+ }, status=status.HTTP_400_BAD_REQUEST)
+
+ if property_state_data is None:
+ return JsonResponse({
+ 'status': 'error',
+ 'message': 'Missing required parameter state',
+ }, status=status.HTTP_400_BAD_REQUEST)
+
+ # ensure that state organization_id is set to org in the request
+ state_org_id = property_state_data.get('organization_id', org_id)
+ if state_org_id != org_id:
+ return JsonResponse({
+ 'status': 'error',
+ 'message': 'State organization_id does not match request organization_id',
+ }, status=status.HTTP_400_BAD_REQUEST)
+ property_state_data['organization_id'] = state_org_id
+
+ # get cycle
+ try:
+ cycle = Cycle.objects.get(pk=cycle_pk, organization_id=org_id)
+ except Cycle.DoesNotExist:
+ return JsonResponse({
+ 'status': 'error',
+ 'message': 'Invalid cycle_id',
+ }, status=status.HTTP_400_BAD_REQUEST)
+
+ # set empty strings to None
+ try:
+ for key, val in property_state_data.items():
+ if val == '':
+ property_state_data[key] = None
+ except AttributeError:
+ return JsonResponse({
+ 'status': 'error',
+ 'message': 'Invalid state',
+ }, status=status.HTTP_400_BAD_REQUEST)
+
+ # extra data fields that do not match existing columns will not be imported
+ extra_data_columns = list(Column.objects.filter(
+ organization_id=org_id,
+ table_name='PropertyState',
+ is_extra_data=True,
+ derived_column_id=None
+ ).values_list('column_name', flat=True))
+
+ extra_data = property_state_data.get('extra_data', {})
+ new_data = {}
+
+ for k, v in extra_data.items():
+ # keep only those that match a column
+ if k in extra_data_columns:
+ new_data[k] = v
+
+ property_state_data['extra_data'] = new_data
+
+ # this serializer is meant to be used by a `create` action
+ property_state_serializer = PropertyStatePromoteWritableSerializer(
+ data=property_state_data
+ )
+
+ try:
+ valid = property_state_serializer.is_valid()
+ except ValueError as e:
+ return JsonResponse({
+ 'status': 'error',
+ 'message': 'Invalid state: {}'.format(str(e))
+ }, status=status.HTTP_400_BAD_REQUEST)
+
+ if valid:
+ # create the new property state, and perform an initial save
+ new_state = property_state_serializer.save()
+ # set `merge_state` to new, rather than unknown
+ new_state.merge_state = MERGE_STATE_NEW
+
+ # Log this appropriately - "Import Creation" ?
+ PropertyAuditLog.objects.create(organization_id=org_id,
+ parent1=None,
+ parent2=None,
+ parent_state1=None,
+ parent_state2=None,
+ state=new_state,
+ name='Import Creation',
+ description='Creation from API',
+ import_filename=None,
+ record_type=AUDIT_USER_CREATE)
+
+ # promote to view
+ view = new_state.promote(cycle)
+
+ return JsonResponse({
+ 'status': 'success',
+ 'property_view_id': view.id,
+ 'property_state_id': new_state.id,
+ 'property_id': view.property.id,
+ 'view': PropertyViewSerializer(view).data
+ }, encoder=PintJSONEncoder, status=status.HTTP_201_CREATED)
+
+ else:
+ # invalid request
+ return JsonResponse({
+ 'status': 'error',
+ 'message': 'Invalid state: {}'.format(property_state_serializer.errors)
+ }, status=status.HTTP_400_BAD_REQUEST)
+
@swagger_auto_schema_org_query_param
@api_endpoint_class
@ajax_request_class
@@ -1412,6 +1559,200 @@ def update_with_building_sync(self, request, pk):
'message': "Could not process building file with messages {}".format(messages)
}, status=status.HTTP_400_BAD_REQUEST)
+ @swagger_auto_schema(
+ manual_parameters=[
+ AutoSchemaHelper.path_id_field(
+ description='ID of the property view to update'
+ ),
+ AutoSchemaHelper.query_org_id_field(),
+ AutoSchemaHelper.query_integer_field(
+ 'cycle_id',
+ required=True,
+ description='ID of the cycle of the property view'
+ ),
+ AutoSchemaHelper.query_integer_field(
+ 'mapping_profile_id',
+ required=True,
+ description='ID of the column mapping profile to use'
+ ),
+ AutoSchemaHelper.upload_file_field(
+ 'file',
+ required=True,
+ description='ESPM property report to use (in XLSX format)',
+ ),
+ ],
+ request_body=no_body,
+ )
+ @action(detail=True, methods=['PUT'], parser_classes=(MultiPartParser,))
+ @has_perm_class('can_modify_data')
+ def update_with_espm(self, request, pk):
+ """Update an existing PropertyView with an exported singular ESPM file.
+ """
+ if len(request.FILES) == 0:
+ return JsonResponse({
+ 'success': False,
+ 'message': 'Must pass file in as a multipart/form-data request'
+ }, status=status.HTTP_400_BAD_REQUEST)
+
+ the_file = request.data['file']
+ cycle_pk = request.query_params.get('cycle_id', None)
+ org_id = self.get_organization(self.request)
+ org_inst = Organization.objects.get(pk=org_id)
+
+ # get mapping profile (ensure it is part of the org)
+ mapping_profile_id = request.query_params.get('mapping_profile_id', None)
+ if not mapping_profile_id:
+ return JsonResponse({
+ 'success': False,
+ 'message': 'Must provide a column mapping profile'
+ }, status=status.HTTP_400_BAD_REQUEST)
+
+ column_mapping_profile = org_inst.columnmappingprofile_set.filter(
+ pk=mapping_profile_id
+ )
+ if len(column_mapping_profile) == 0:
+ return JsonResponse({
+ 'success': False,
+ 'message': 'Could not find ESPM column mapping profile'
+ }, status=status.HTTP_400_BAD_REQUEST)
+ elif len(column_mapping_profile) > 1:
+ return JsonResponse({
+ 'success': False,
+ 'message': f"Found multiple ESPM column mapping profiles, found {len(column_mapping_profile)}"
+ }, status=status.HTTP_400_BAD_REQUEST)
+ column_mapping_profile = column_mapping_profile[0]
+
+ try:
+ Cycle.objects.get(pk=cycle_pk, organization_id=org_id)
+ except Cycle.DoesNotExist:
+ return JsonResponse({
+ 'success': False,
+ 'message': 'Cycle ID is missing or Cycle does not exist'
+ }, status=status.HTTP_404_NOT_FOUND)
+
+ try:
+ # note that this is a "safe" query b/c we should have already returned
+ # if the cycle was not within the user's organization
+ property_view = PropertyView.objects.select_related(
+ 'property', 'cycle', 'state'
+ ).get(pk=pk, cycle_id=cycle_pk)
+ except PropertyView.DoesNotExist:
+ return JsonResponse({
+ 'status': 'error',
+ 'message': 'property view does not exist'
+ }, status=status.HTTP_404_NOT_FOUND)
+
+ # create a new "datafile" object to store the file
+ import_record, _ = ImportRecord.objects.get_or_create(
+ name='Manual ESPM Records',
+ owner=request.user,
+ last_modified_by=request.user,
+ super_organization_id=org_id
+ )
+
+ filename = the_file.name
+ path = os.path.join(settings.MEDIA_ROOT, "uploads", filename)
+
+ # Get a unique filename using the get_available_name method in FileSystemStorage
+ s = FileSystemStorage()
+ path = s.get_available_name(path)
+
+ # verify the directory exists
+ if not os.path.exists(os.path.dirname(path)):
+ os.makedirs(os.path.dirname(path))
+
+ # save the file
+ with open(path, 'wb+') as temp_file:
+ for chunk in the_file.chunks():
+ temp_file.write(chunk)
+
+ import_file = ImportFile.objects.create(
+ cycle_id=cycle_pk,
+ import_record=import_record,
+ uploaded_filename=filename,
+ file=path,
+ source_type=SEED_DATA_SOURCES[PORTFOLIO_RAW][1],
+ source_program='PortfolioManager',
+ source_program_version='1.0',
+ )
+
+ # save the raw data, but do it synchronously in the foreground
+ tasks.save_raw_espm_data_synchronous(import_file.pk)
+
+ # verify that there is only one property in the file
+ import_file.refresh_from_db()
+ if import_file.num_rows != 1:
+ return JsonResponse({
+ 'success': False,
+ 'message': f"File must contain exactly one property, found {import_file.num_rows or 0} properties"
+ }, status=status.HTTP_400_BAD_REQUEST)
+
+ # create the column mappings
+ Column.retrieve_mapping_columns(import_file.pk)
+
+ # assign the mappings to the import file id
+ Column.create_mappings(column_mapping_profile.mappings, org_inst, request.user, import_file.pk)
+
+ # call the mapping process - but do this in the foreground, not asynchronously.
+ tasks.map_data_synchronous(import_file.pk)
+
+ # The data should now be mapped, but since we called the task, we have the IDs of the
+ # mapped files, so query for the files.
+ new_property_state = PropertyState.objects.filter(
+ organization_id=org_id,
+ import_file_id=import_file.pk,
+ data_state=DATA_STATE_MAPPING,
+ )
+ if len(new_property_state) == 0:
+ return JsonResponse({
+ 'success': False,
+ 'message': "Could not find newly mapped property state"
+ }, status=status.HTTP_400_BAD_REQUEST)
+ elif len(new_property_state) > 1:
+ return JsonResponse({
+ 'success': False,
+ 'message': f"Found multiple newly mapped property states, found {len(new_property_state)}"
+ }, status=status.HTTP_400_BAD_REQUEST)
+ new_property_state = new_property_state[0]
+
+ # retrieve the column merge priorities and then save the update new property state.
+ # This is called merge protection on the front end.
+ priorities = Column.retrieve_priorities(org_id)
+ merged_state = save_state_match(property_view.state, new_property_state, priorities)
+
+ # save the merged state to the latest property view
+ property_view.state = merged_state
+ property_view.save()
+
+ # now save the meters, need a progress_data object to pass to the tasks, although
+ # not used.
+ progress_data = ProgressData(func_name='meter_import', unique_id=import_file.pk)
+ # -- Start --
+ # For now, we are duplicating the methods that are called in the tasks in order
+ # to circumvent the celery background task management (i.e., run in foreground)
+ meters_parser = MetersParser.factory(import_file.local_file, org_id)
+ meters_and_readings = meters_parser.meter_and_reading_objs
+ for meter_readings in meters_and_readings:
+ _save_pm_meter_usage_data_task(meter_readings, import_file.id, progress_data.key)
+ # -- End -- of duplicate (and simplified) meter import methods
+ progress_data.delete()
+
+ if merged_state:
+ return JsonResponse({
+ 'success': True,
+ 'status': 'success',
+ 'message': 'successfully updated property with ESPM file',
+ 'data': {
+ 'status': 'success',
+ 'property_view': PropertyViewAsStateSerializer(property_view).data,
+ },
+ }, status=status.HTTP_200_OK)
+ else:
+ return JsonResponse({
+ 'status': 'error',
+ 'message': "Could not process ESPM file"
+ }, status=status.HTTP_400_BAD_REQUEST)
+
@action(detail=True, methods=['PUT'], parser_classes=(MultiPartParser,))
@has_perm_class('can_modify_data')
def upload_inventory_document(self, request, pk):