diff --git a/.cspell/custom-dictionary-workspace.txt b/.cspell/custom-dictionary-workspace.txt index 058b0ee7c4..c03f67d8f4 100644 --- a/.cspell/custom-dictionary-workspace.txt +++ b/.cspell/custom-dictionary-workspace.txt @@ -7,6 +7,7 @@ afterwards ajax Aleck Amongst +analysispropertyview Andriy angularjs api @@ -26,8 +27,10 @@ AWS backend backends badpass +basestring bashrc bedes +biffh Bool boolean BrowserDefinition @@ -36,7 +39,7 @@ bsyncr buildingsnapshot BuildingSnapshot BuildingSnapshots -BUILDINGSYNC +buildingsync bytestring calendarize canonicalbuilding @@ -55,6 +58,8 @@ cli cmp codebase collectstatic +columnlistprofile +columnmappingprofile comparators concat cond @@ -66,8 +71,11 @@ coparents crlf css csv +csvfile +csvreader Ctrl customizable +datacoercions dataset datasets datasource @@ -81,6 +89,7 @@ dest dev dict dicts +diffupdate django Django docker0 @@ -91,6 +100,7 @@ edgecase energystar enums env +ESPM eui eula fieldname @@ -111,6 +121,7 @@ geocode geocoded geocoding geocodings +geomodels getattr getenv getitem @@ -118,12 +129,14 @@ gis Github Google graphviz +greenbutton gte Gunter Gzip hardcoded Homebrew hotfix +hpxml href html iand @@ -140,11 +153,13 @@ iterable Iterable iteritems js +JSESSIONID jshint json JsonField JSONField kBtu +klass kubectl Kubectl Kubernetes @@ -170,9 +185,12 @@ mappable mapquest mcm metadata +meterdata +meterreading middleware MIDDLEWARE mixin +mmbtu ModelSerializer multipart Multipart @@ -186,12 +204,16 @@ nginx nlong nodejs noncanonicalprojectbuildings +nondefault nones noqa npm +nrows num Octant +officedocument onload +openxmlformats OperationalError OrderedDict OrderedDicts @@ -221,12 +243,14 @@ PrimaryKeyRelatedField projectbuilding propertystate propertyview +prprty py Pyright pytype pytz qqfile qs +quantityfield queryset querysets readthedocs @@ -247,26 +271,33 @@ scalable seeddb seedorg seeduser +selfvars sendmail serializable serializer serializers +servlet setUp setUpClass sha signup +spreadsheetml +springframework sqft Starke statuslabel str +strcmp strftime subclasses subdirectory Submodules +submonthly suborg Subpackages subtask sudo +superperms superset TableRows tastypie @@ -276,6 +307,7 @@ taxlots TaxLots taxlotstate taxlotview +taxview td tearDown templatetags @@ -305,13 +337,18 @@ Uncomment ungeocoded unicode unittest +unlinkable unmatch unmatching +unmerge unmerges +unpair unresolvable untracked uom uploader +uploaderfunc +ureg uri url urllist @@ -338,9 +375,14 @@ wildcards workflow wsgi xlarge +xlrd +xlsxwriter xml xmltodict xpath XPath XPATH +xpaths +XSLX +yasg yml diff --git a/.gitignore b/.gitignore index 6c49aab7ea..166547d8dc 100644 --- a/.gitignore +++ b/.gitignore @@ -63,6 +63,7 @@ seed/data_importer/tests/data/tmp_* seed/data_importer/tests/data/~* seed/tests/api/api_test_user.json seed/building_sync/tests/data/test_file.xml +seed/tests/output test.sqlite # Ignore all protractor coverage diff --git a/docs/source/translation.rst b/docs/source/translation.rst index e9236b3d94..b40ae3886b 100644 --- a/docs/source/translation.rst +++ b/docs/source/translation.rst @@ -3,7 +3,14 @@ Translating SEED 1. Update translations on `lokalise`_. -2. Copy lokalise.cfg.example to lokalise.cfg. Update API token. +2. Copy lokalise.yml.example to lokalise.yml. Update API token. + +3. Install lokalise locally + + .. code:: bash + + brew tap lokalise/cli-2 + brew install lokalise2 3. Run scripts if you have Lokalise CLI installed. If not, see scripts for manual steps. @@ -76,6 +83,6 @@ Compare:

{$:: inventory_type == 'taxlots' ? translations['INCLUDE_SHARED_TAXLOTS'] : - translations['INCLUDE_SHARED + translations['INCLUDE_SHARED'] .. _lokalise: https://lokalise.com/project/3537487659ca9b1dce98a7.36378626/?view=multi diff --git a/locale/en_US/LC_MESSAGES/django.mo b/locale/en_US/LC_MESSAGES/django.mo index adfb1f8cd6..c91468bfcc 100644 Binary files a/locale/en_US/LC_MESSAGES/django.mo and b/locale/en_US/LC_MESSAGES/django.mo differ diff --git a/locale/en_US/LC_MESSAGES/django.po b/locale/en_US/LC_MESSAGES/django.po index 260b658ad4..2ca5de7e11 100644 --- a/locale/en_US/LC_MESSAGES/django.po +++ b/locale/en_US/LC_MESSAGES/django.po @@ -282,6 +282,9 @@ msgstr "Audit Template Organization Token" msgid "Audit Template Password" msgstr "Audit Template Password" +msgid "Audit Template Upload Results" +msgstr "Audit Template Upload Results" + #: seed/models/models.py:136 msgid "Auditing" msgstr "Auditing" @@ -570,6 +573,9 @@ msgstr "Configuration" msgid "Confirm" msgstr "Confirm" +msgid "Confirm Audit Template Building Import?" +msgstr "Confirm Audit Template Building Import?" + msgid "Confirm Save Mappings?" msgstr "Confirm Save Mappings?" @@ -991,6 +997,15 @@ msgstr "----Choose energy type----" msgid "ENERGY_UNIT_DISPLAY_CHOICE_PLACEHOLDER" msgstr "----Change display unit----" +msgid "ESPM Password" +msgstr "ESPM Password" + +msgid "ESPM Property ID" +msgstr "ESPM Property ID" + +msgid "ESPM Username" +msgstr "ESPM Username" + msgid "EUI" msgstr "EUI" @@ -1144,6 +1159,9 @@ msgstr "File types supported: .csv, .xls, .csv, .xls dict: + """This method is used to map data synchronously and intended for only mapping a + small set of data. It is used in the Update with ESPM workflow, which runs on a + single property. Further, this method is a copy of the `map_data` method but simplified + to run only in the foreground. + + Args: + import_file_id (int): Database ID of the import file record + + Returns: + dict: Result of the progress data. + """ + import_file = ImportFile.objects.get(pk=import_file_id) + + # create a key, but this is just used to communicate the result + progress_data = ProgressData(func_name='map_data_only', unique_id=import_file_id) + progress_data.delete() + + # Check for duplicate column headers + column_headers = import_file.first_row_columns or [] + duplicate_tracker: dict = collections.defaultdict(lambda: 0) + for header in column_headers: + duplicate_tracker[header] += 1 + if duplicate_tracker[header] > 1: + raise Exception("Duplicate column found in file: %s" % (header)) + + source_type = SEED_DATA_SOURCES_MAPPING.get(import_file.source_type, ASSESSED_RAW) + + qs = PropertyState.objects.filter( + import_file=import_file, + source_type=source_type, + data_state=DATA_STATE_IMPORT, + ).only('id').iterator() + + # This version of the `map_data` should only be run when the data + # set is reasonably small because it will block operations and prevent + # reporting status updates. + id_chunks = [[obj.id for obj in chunk] for chunk in batch(qs, 100)] + for ids in id_chunks: + map_row_chunk(ids, import_file_id, source_type, progress_data.key) + + finish_mapping(import_file_id, True, progress_data.key) + + return progress_data.result() + + def map_data(import_file_id, remap=False, mark_as_done=True): """ Map data task. By default this method will run through the mapping and mark it as complete. @@ -603,7 +649,7 @@ def map_data(import_file_id, remap=False, mark_as_done=True): :param remap: bool, if remapping, then delete previous objects from the database :param mark_as_done: bool, if skip review then the mapping_done flag will be set to true at the end. - :return: JSON + :return: dict """ import_file = ImportFile.objects.get(pk=import_file_id) @@ -1256,6 +1302,67 @@ def _save_raw_data_create_tasks(file_pk, progress_key): return chord(tasks, interval=15)(finish_raw_save.s(file_pk, progress_data.key)) +def save_raw_espm_data_synchronous(file_pk: int) -> dict: + """This method is a one-off method for saving ESPM the raw data synchronously. This + is needed for the ESPM update method that runs on a single property. The `save_raw_data` + method is not used because it is asynchronous and the pieces of that method were + copied here. Technically, this method will work with a CSV or XLSX spreadsheet too, + but was only intended for ESPM. + + Args: + file_pk (int): Import file ID to import + + Returns: + dict: returns the result of the progress data + """ + progress_data = ProgressData(func_name='save_raw_data_synchronous', unique_id=file_pk) + try: + # Go get the tasks that need to be created, then call them in the chord here. + import_file = ImportFile.objects.get(pk=file_pk) + if import_file.raw_save_done: + return progress_data.finish_with_warning('Raw data already saved') + + try: + parser = reader.MCMParser(import_file.local_file) + except Exception as e: + _log.debug(f'Error reading XLSX file: {str(e)}') + return progress_data.finish_with_error('Failed to parse XLSX file. Please review your import file - all headers should be present and non-numeric.') + + import_file.has_generated_headers = False + if hasattr(parser, 'has_generated_headers'): + import_file.has_generated_headers = parser.has_generated_headers + + cache_first_rows(import_file, parser) + import_file.num_rows = 0 + import_file.num_columns = parser.num_columns() + + chunks = [] + for batch_chunk in batch(parser.data, 100): + import_file.num_rows += len(batch_chunk) + chunks.append(batch_chunk) + import_file.save() + + progress_data.total = len(chunks) + progress_data.save() + + # Save the raw data chunks. This should only happen + # on a small amount of data since it is running in the foreground + for chunk in chunks: + _save_raw_data_chunk(chunk, file_pk, progress_data.key) + + finish_raw_save(file_pk, progress_data.key) + except Error as e: + progress_data.finish_with_error('File Content Error: ' + str(e), traceback.format_exc()) + except KeyError as e: + progress_data.finish_with_error('Invalid Column Name: "' + str(e) + '"', traceback.format_exc()) + except TypeError: + progress_data.finish_with_error('TypeError Exception', traceback.format_exc()) + except Exception as e: + progress_data.finish_with_error('Unhandled Error: ' + str(e), traceback.format_exc()) + + return progress_data.result() + + def save_raw_data(file_pk): """ Simply report to the user that we have queued up the save_run_data to run. This is the entry diff --git a/seed/lib/xml_mapping/mapper.py b/seed/lib/xml_mapping/mapper.py index c50db4b23c..a07bad8413 100644 --- a/seed/lib/xml_mapping/mapper.py +++ b/seed/lib/xml_mapping/mapper.py @@ -56,20 +56,24 @@ def get_bae_mappings(): # units field name is the same with " Units" appended. bsync_assets = BAE.get_default_asset_defs() - for item in bsync_assets: + for asset in bsync_assets: + if isinstance(asset, dict): + asset_type, export_name, export_units = asset['type'], asset['export_name'], asset['export_units'] + else: + asset_type, export_name, export_units = asset.type, asset.export_name, asset.export_units - if item['type'] == 'sqft': + if asset_type == 'sqft': # these types need 2 different entries: 1 for "primary" and 1 for "secondary" for i in ['Primary', 'Secondary']: - results.append(make_bae_hash(i + ' ' + item['export_name'])) - if 'export_units' in item and item['export_units'] is True: + results.append(make_bae_hash(i + ' ' + export_name)) + if export_units is True: # also export units field - results.append(make_bae_hash(i + ' ' + item['export_name'] + " Units")) + results.append(make_bae_hash(i + ' ' + export_name + " Units")) else: - results.append(make_bae_hash(item['export_name'])) - if 'export_units' in item and item['export_units'] is True: - results.append(make_bae_hash(item['export_name'] + " Units")) + results.append(make_bae_hash(export_name)) + if export_units is True: + results.append(make_bae_hash(export_name + " Units")) return results diff --git a/seed/lib/xml_mapping/reader.py b/seed/lib/xml_mapping/reader.py index 89407ef5c4..e16f18ced2 100644 --- a/seed/lib/xml_mapping/reader.py +++ b/seed/lib/xml_mapping/reader.py @@ -64,10 +64,10 @@ def _add_property_to_data(self, bsync_file, file_name): # add to data and column headers for item in assets: - property_[item['name']] = item['value'] + property_[item.name] = item.value # only append if not already there (when processing a zip of xmls) - if item['name'] not in self.headers: - self.headers.append(item['name']) + if item.name not in self.headers: + self.headers.append(item.name) # When importing zip files, we need to be able to determine which .xml file # a certain PropertyState came from (because of the linked BuildingFile model). diff --git a/seed/models/column_mapping_profiles.py b/seed/models/column_mapping_profiles.py index f83e2343c9..95b8fe4d46 100644 --- a/seed/models/column_mapping_profiles.py +++ b/seed/models/column_mapping_profiles.py @@ -4,6 +4,9 @@ SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. See also https://github.com/seed-platform/seed/main/LICENSE.md """ +import csv +import os + from django.db import models from seed.lib.superperms.orgs.models import Organization @@ -23,6 +26,9 @@ class ColumnMappingProfile(models.Model): name = models.CharField(max_length=255, blank=False) mappings = models.JSONField(default=dict, blank=True) + # TODO: Need to verify that we want ManyToMany here. This might be needed for + # the BuildingSync related profiles, but the dev database appears to just + # have one org per profile. organizations = models.ManyToManyField(Organization) created = models.DateTimeField(auto_now_add=True) @@ -44,3 +50,56 @@ def get_profile_type(cls, profile_type): if profile_type in types_dict: return types_dict[profile_type] raise Exception(f'Invalid profile type "{profile_type}"') + + @classmethod + def create_from_file(cls, filename: str, org: Organization, profile_name: str, profile_type: int = NORMAL, overwrite_if_exists: bool = False): + """Generate a ColumnMappingProfile from a set of mappings in a file. The format of the file + is slightly different from the Column.create_mappings_from_file, but is the same format as + the file that you download from the column mappings page within SEED. + + Args: + filename (str): path to the file to create the mappings from. + org (Organization): Instance object of the organization + profile_name (str): Name of the new profile to create + profile_type (int, optional): Type of profile, will be NORMAL for most cases. Defaults to NORMAL. + overwrite_if_exists (bool, optional): If the mapping exists, then overwrite. Defaults to False. + + Raises: + Exception: If the file does not exist, mappings are empty, or the profile already exists and overwrite_if_exists is False. + """ + mappings = [] + if os.path.isfile(filename): + with open(filename, 'r', newline=None) as csvfile: + csvreader = csv.reader(csvfile) + next(csvreader) # skip header + for row in csvreader: + data = { + "from_field": row[0], + "from_units": row[1], + "to_table_name": row[2], + "to_field": row[3], + } + mappings.append(data) + else: + raise Exception(f"Mapping file does not exist: {filename}") + + if len(mappings) == 0: + raise Exception(f"No mappings in file: {filename}") + + # Because this object has a many to many on orgs (which I argue shouldn't), then + # first, get all the org's mapping profiles + profiles = org.columnmappingprofile_set.all() + + # second, get or create the profile now that we are only seeing my 'orgs' profiles + profile, created = profiles.get_or_create(name=profile_name, profile_type=profile_type) + if not created and not overwrite_if_exists: + raise Exception(f"ColumnMappingProfile already exists, not overwriting: {profile_name}") + + # Do I need to confirm that the mappings are defined in the Columns world? + profile.mappings = mappings + profile.save() + + # make sure that it is added to the org + org.columnmappingprofile_set.add(profile) + + return profile diff --git a/seed/serializers/properties.py b/seed/serializers/properties.py index 9dfec58c7c..985388f275 100644 --- a/seed/serializers/properties.py +++ b/seed/serializers/properties.py @@ -212,6 +212,83 @@ def to_representation(self, data): return result +class PropertyStatePromoteWritableSerializer(serializers.ModelSerializer): + """ + Used by Property create which takes in a state and promotes it to a PropertyView + Organization_id is set in view (not passed in directly by user) + """ + extra_data = serializers.JSONField(required=False) + measures = PropertyMeasureSerializer(source='propertymeasure_set', many=True, read_only=True) + scenarios = ScenarioSerializer(many=True, read_only=True) + files = BuildingFileSerializer(source='building_files', many=True, read_only=True) + + # to support the old state serializer method with the PROPERTY_STATE_FIELDS variables + import_file_id = serializers.IntegerField(allow_null=True, read_only=True) + organization_id = serializers.IntegerField() + + # read-only core fields + id = serializers.IntegerField(read_only=True) + data_state = serializers.IntegerField(read_only=True) + merge_state = serializers.IntegerField(allow_null=True, read_only=True) + source_type = serializers.IntegerField(allow_null=True, read_only=True) + hash_object = serializers.CharField(allow_null=True, read_only=True) + lot_number = serializers.CharField(allow_null=True, read_only=True) + normalized_address = serializers.CharField(allow_null=True, read_only=True) + created = serializers.DateTimeField(read_only=True) + updated = serializers.DateTimeField(read_only=True) + # read-only geo fields + bounding_box = serializers.CharField(allow_null=True, read_only=True) + centroid = serializers.CharField(allow_null=True, read_only=True) + geocoded_address = serializers.CharField(allow_null=True, read_only=True) + geocoding_confidence = serializers.CharField(allow_null=True, read_only=True) + geocoded_city = serializers.CharField(allow_null=True, read_only=True) + geocoded_county = serializers.CharField(allow_null=True, read_only=True) + geocoded_country = serializers.CharField(allow_null=True, read_only=True) + geocoded_neighborhood = serializers.CharField(allow_null=True, read_only=True) + geocoded_state = serializers.CharField(allow_null=True, read_only=True) + geocoded_postal_code = serializers.CharField(allow_null=True, read_only=True) + geocoded_side_of_street = serializers.CharField(allow_null=True, read_only=True) + long_lat = serializers.CharField(allow_null=True, read_only=True) + + # support naive datetime objects + generation_date = serializers.DateTimeField('%Y-%m-%dT%H:%M:%S', allow_null=True, required=False) + recent_sale_date = serializers.DateTimeField('%Y-%m-%dT%H:%M:%S', allow_null=True, required=False) + release_date = serializers.DateTimeField('%Y-%m-%dT%H:%M:%S', allow_null=True, required=False) + + # support the pint objects + conditioned_floor_area = PintQuantitySerializerField(allow_null=True, required=False) + gross_floor_area = PintQuantitySerializerField(allow_null=True, required=False) + occupied_floor_area = PintQuantitySerializerField(allow_null=True, required=False) + site_eui = PintQuantitySerializerField(allow_null=True, required=False) + site_eui_modeled = PintQuantitySerializerField(allow_null=True, required=False) + source_eui_weather_normalized = PintQuantitySerializerField(allow_null=True, required=False) + source_eui = PintQuantitySerializerField(allow_null=True, required=False) + source_eui_modeled = PintQuantitySerializerField(allow_null=True, required=False) + site_eui_weather_normalized = PintQuantitySerializerField(allow_null=True, required=False) + total_ghg_emissions = PintQuantitySerializerField(allow_null=True, required=False) + total_marginal_ghg_emissions = PintQuantitySerializerField(allow_null=True, required=False) + total_ghg_emissions_intensity = PintQuantitySerializerField(allow_null=True, required=False) + total_marginal_ghg_emissions_intensity = PintQuantitySerializerField(allow_null=True, required=False) + + # old fields that are no longer used and should not be updated + conditioned_floor_area_orig = serializers.FloatField(allow_null=True, read_only=True) + gross_floor_area_orig = serializers.FloatField(allow_null=True, read_only=True) + occupied_floor_area_orig = serializers.FloatField(allow_null=True, read_only=True) + site_eui_orig = serializers.FloatField(allow_null=True, read_only=True) + site_eui_modeled_orig = serializers.FloatField(allow_null=True, read_only=True) + site_eui_weather_normalized_orig = serializers.FloatField(allow_null=True, read_only=True) + source_eui_orig = serializers.FloatField(allow_null=True, read_only=True) + source_eui_weather_normalized_orig = serializers.FloatField(allow_null=True, read_only=True) + source_eui_modeled_orig = serializers.FloatField(allow_null=True, read_only=True) + + class Meta: + fields = '__all__' + model = PropertyState + extra_kwargs = { + 'organization': {'read_only': True} + } + + class PropertyStateWritableSerializer(serializers.ModelSerializer): """ Used by PropertyViewAsState as a nested serializer @@ -229,7 +306,6 @@ class PropertyStateWritableSerializer(serializers.ModelSerializer): import_file_id = serializers.IntegerField(allow_null=True, read_only=True) organization_id = serializers.IntegerField(read_only=True) - # support naive datetime objects # support naive datetime objects generation_date = serializers.DateTimeField('%Y-%m-%dT%H:%M:%S', allow_null=True, required=False) recent_sale_date = serializers.DateTimeField('%Y-%m-%dT%H:%M:%S', allow_null=True, required=False) diff --git a/seed/static/seed/js/controllers/data_upload_espm_modal_controller.js b/seed/static/seed/js/controllers/data_upload_espm_modal_controller.js new file mode 100644 index 0000000000..f0159ae7b5 --- /dev/null +++ b/seed/static/seed/js/controllers/data_upload_espm_modal_controller.js @@ -0,0 +1,91 @@ +/** + * SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. + * See also https://github.com/seed-platform/seed/main/LICENSE.md + */ +angular.module('BE.seed.controller.data_upload_espm_modal', []) + .controller('data_upload_espm_modal_controller', [ + '$scope', + '$uibModalInstance', + 'spinner_utility', + 'organization', + 'cycle_id', + 'upload_from_file', + 'espm_service', + 'view_id', + 'pm_property_id', + 'column_mapping_profiles', + function ( + $scope, + $uibModalInstance, + spinner_utility, + organization, + cycle_id, + upload_from_file, + espm_service, + view_id, + pm_property_id, + column_mapping_profiles + ) { + $scope.organization = organization; + $scope.view_id = view_id; + $scope.cycle_id = cycle_id; + $scope.upload_from_file = upload_from_file; + $scope.error = ''; + $scope.busy = false; + $scope.mapping_profiles = column_mapping_profiles; + let profile = $scope.mapping_profiles.length ? $scope.mapping_profiles[0].id : null; + + $scope.fields = { + pm_property_id: pm_property_id, + espm_username: '', + espm_password: '', + mapping_profile: profile + }; + + // password field + $scope.secret = 'password'; + $scope.toggle_secret = function () { + $scope.secret = ($scope.secret == 'password') ? 'text' : 'password'; + }; + + $scope.upload_from_file_and_close = function (event_message, file, progress) { + $scope.close(); + $scope.upload_from_file(event_message, file, progress); + }; + + $scope.confirm_import = function () { + if (!$scope.fields.pm_property_id) { + $scope.error = "An ESPM Property ID is required."; + } else { + $scope.submit_request(); + } + }; + + $scope.submit_request = function () { + $scope.error = ''; + $scope.busy = true; + spinner_utility.show(); + return espm_service.get_espm_building_xlsx($scope.organization.id, $scope.fields.pm_property_id, $scope.fields.espm_username, $scope.fields.espm_password).then(file_result => { + spinner_utility.hide(); + if (typeof (result) == 'object' && !result.success) { + $scope.error = 'Error: ' + result.message; + $scope.busy = false; + } else { + return espm_service.update_building_with_espm_xlsx($scope.organization.id, $scope.cycle_id, $scope.view_id, $scope.fields.mapping_profile, file_result).then(result => { + if (typeof (result) == 'object' && !result.success) { + $scope.error = 'Error: ' + result.message; + $scope.busy = false; + } else { + $scope.close(); + $scope.upload_from_file('upload_complete', null, null) + $scope.busy = false; + } + }); + } + }); + }; + + $scope.close = function () { + $uibModalInstance.dismiss(); + }; + }]); diff --git a/seed/static/seed/js/controllers/inventory_detail_controller.js b/seed/static/seed/js/controllers/inventory_detail_controller.js index 70176820fb..64bb8072e0 100644 --- a/seed/static/seed/js/controllers/inventory_detail_controller.js +++ b/seed/static/seed/js/controllers/inventory_detail_controller.js @@ -36,7 +36,6 @@ angular.module('BE.seed.controller.inventory_detail', []) 'current_profile', 'labels_payload', 'organization_payload', - 'audit_template_service', 'cycle_service', 'simple_modal_service', 'property_measure_service', @@ -74,7 +73,6 @@ angular.module('BE.seed.controller.inventory_detail', []) current_profile, labels_payload, organization_payload, - audit_template_service, cycle_service, simple_modal_service, property_measure_service, @@ -82,6 +80,7 @@ angular.module('BE.seed.controller.inventory_detail', []) ) { $scope.inventory_type = $stateParams.inventory_type; $scope.organization = organization_payload.organization; + // WARNING: $scope.org is used by "child" controller - analysis_details_controller $scope.org = {id: organization_payload.organization.id}; $scope.static_url = urls.static_url; @@ -118,6 +117,7 @@ angular.module('BE.seed.controller.inventory_detail', []) return !_.isEmpty(label.is_applied); }); $scope.audit_template_building_id = inventory_payload.state.audit_template_building_id; + $scope.pm_property_id = inventory_payload.state.pm_property_id; /** See service for structure of returned payload */ $scope.historical_items = inventory_payload.history; @@ -655,6 +655,33 @@ angular.module('BE.seed.controller.inventory_detail', []) }); }; + $scope.open_data_upload_espm_modal = function () { + var modalInstance = $uibModal.open({ + templateUrl: urls.static_url + 'seed/partials/data_upload_espm_modal.html', + controller: 'data_upload_espm_modal_controller', + resolve: { + pm_property_id: () => $scope.pm_property_id, + organization: () => $scope.organization, + cycle_id: () => $scope.cycle.id, + upload_from_file: () => $scope.uploaderfunc, + view_id: () => $stateParams.view_id, + column_mapping_profiles: [ + 'column_mappings_service', + function ( + column_mappings_service + ) { + return column_mappings_service.get_column_mapping_profiles_for_org( + $scope.organization.id, [] + ).then(function (response) { + return response.data; + }); + }] + } + }); + modalInstance.result.then(function () { + }); + }; + $scope.export_building_sync = function () { var modalInstance = $uibModal.open({ templateUrl: urls.static_url + 'seed/partials/export_buildingsync_modal.html', @@ -792,6 +819,7 @@ angular.module('BE.seed.controller.inventory_detail', []) $scope.uploader = { invalid_xml_extension_alert: false, + invalid_xlsx_extension_alert: false, in_progress: false, progress: 0, complete: false, @@ -804,9 +832,14 @@ angular.module('BE.seed.controller.inventory_detail', []) $scope.uploader.invalid_xml_extension_alert = true; break; + case 'invalid_extension': + $scope.uploader.invalid_xlsx_extension_alert = true; + break; + case 'upload_submitted': $scope.uploader.filename = file.filename; $scope.uploader.invalid_xml_extension_alert = false; + $scope.uploader.invalid_xlsx_extension_alert = false; $scope.uploader.in_progress = true; $scope.uploader.status_message = 'uploading file'; break; diff --git a/seed/static/seed/js/directives/sdUploader.js b/seed/static/seed/js/directives/sdUploader.js index dac67ed207..23b25af373 100644 --- a/seed/static/seed/js/directives/sdUploader.js +++ b/seed/static/seed/js/directives/sdUploader.js @@ -303,6 +303,148 @@ var makeBuildingSyncUpdater = function (scope, element, allowed_extensions) { return uploader; }; +var makeESPMUpdater = function (scope, element, allowed_extensions) { + var uploader = new qq.FineUploaderBasic({ + button: element[0], + request: { + method: 'PUT', + endpoint: '/api/v3/properties/' + scope.importrecord + '/update_with_espm/?cycle_id=' + scope.cycleId + '&organization_id=' + scope.organizationId + '&mapping_profile_id=' + scope.mappingProfileId, + inputName: 'file', + paramsInBody: true, + forceMultipart: true, + customHeaders: { + 'X-CSRFToken': BE.csrftoken + }, + params: { + } + }, + validation: { + allowedExtensions: allowed_extensions + }, + text: { + fileInputTitle: '', + uploadButton: scope.buttontext + }, + retry: { + enableAuto: false + }, + iframeSupport: { + localBlankPathPage: '/success.html' + }, + /** + * multiple: only allow one file to be uploaded at a time + */ + multiple: false, + maxConnections: 20, + callbacks: { + /** + * onSubmitted: overloaded callback that calls the callback defined + * in the element attribute. Passes as arguments to the callback + * a message indicating upload has started, "upload_submitted", and + * the filename. + */ + onSubmitted: function (id, fileName) { + scope.eventfunc({ + message: 'upload_submitted', + file: { + filename: fileName, + source_type: scope.sourcetype + } + }); + var params = { + csrf_token: BE.csrftoken, + csrf_name: 'csrfmiddlewaretoken', + csrf_xname: 'X-CSRFToken', + file_type: 1, + organization_id: scope.organizationId, + cycle_id: scope.cycleId + }; + + uploader.setParams(params); + }, + /** + * onComplete: overloaded callback that calls the callback defined + * in the element attribute unless the upload failed, which will + * fire a window alert. Passes as arguments to the callback + * a message indicating upload has completed, "upload_complete", and + * the filename. + */ + onComplete: function (id, fileName, responseJSON) { + + // Only handle success because error transition is in onError event handler + if (responseJSON.status === 'success') { + scope.eventfunc({ + message: 'upload_complete', + file: { + filename: fileName, + view_id: _.get(responseJSON, 'data.property_view.id'), + source_type: scope.sourcetype + } + }); + } + }, + /** + * onProgress: overloaded callback that calls the callback defined + * in the element attribute. Passes as arguments to the callback + * a message indicating upload is in progress, "upload_in_progress", + * the filename, and a progress object with two keys: loaded - the + * bytes of the file loaded, and total - the total number of bytes + * for the file. + */ + onProgress: function (id, fileName, loaded, total) { + scope.eventfunc({ + message: 'upload_in_progress', + file: { + filename: fileName, + source_type: scope.sourcetype + }, + progress: { + loaded: loaded, + total: total + } + }); + }, + /** + * onError: overloaded callback that calls the callback defined + * in the element attribute. Primarily for non-conforming files + * that return 400 from the backend and invalid file extensions. + */ + onError: function (id, fileName, errorReason, xhr) { + if (_.includes(errorReason, ' has an invalid extension.')) { + scope.eventfunc({message: 'invalid_extension'}); + return; + } + + // Ignore this error handler if the network request hasn't taken place yet (e.g., invalid file extension) + if (!xhr) { + alert(errorReason); + return; + } + + var error = errorReason; + try { + var json = JSON.parse(xhr.responseText); + if (_.has(json, 'message')) { + error = json.message; + } + } catch (e) { + // no-op + } + + scope.eventfunc({ + message: 'upload_error', + file: { + filename: fileName, + source_type: scope.sourcetype, + error: error + } + }); + } + } + }); + return uploader; +}; + /* Inventory Document Uploader for files to attach to a property */ var makeDocumentUploader = function (scope, element, allowed_extensions) { @@ -451,6 +593,8 @@ var sdUploaderFineUploader = function (scope, element/*, attrs, filename*/) { var uploader; if (scope.sourcetype === 'BuildingSyncUpdate') { uploader = makeBuildingSyncUpdater(scope, element, ['xml']); + } else if (scope.sourcetype === 'ESPMUpdate') { + uploader = makeESPMUpdater(scope, element, ['xlsx']); } else if (scope.sourcetype === 'GreenButton') { uploader = makeFileSystemUploader(scope, element, ['xml']); } else if (scope.sourcetype === 'SensorMetaData') { @@ -474,6 +618,7 @@ angular.module('sdUploader', []).directive('sdUploader', function () { eventfunc: '&', importrecord: '=', organizationId: '=', + mappingProfileId: '=?', sourceprog: '@', sourcetype: '@', sourcever: '=' diff --git a/seed/static/seed/js/seed.js b/seed/static/seed/js/seed.js index dce6dd9e85..dbbf4cd0e9 100644 --- a/seed/static/seed/js/seed.js +++ b/seed/static/seed/js/seed.js @@ -55,6 +55,7 @@ angular.module('BE.seed.controllers', [ 'BE.seed.controller.data_quality_labels_modal', 'BE.seed.controller.data_quality_modal', 'BE.seed.controller.data_upload_audit_template_modal', + 'BE.seed.controller.data_upload_espm_modal', 'BE.seed.controller.data_upload_modal', 'BE.seed.controller.data_view', 'BE.seed.controller.dataset', @@ -170,6 +171,7 @@ angular.module('BE.seed.services', [ 'BE.seed.service.data_view', 'BE.seed.service.dataset', 'BE.seed.service.derived_columns', + 'BE.seed.service.espm', 'BE.seed.service.event', 'BE.seed.service.filter_groups', 'BE.seed.service.flippers', diff --git a/seed/static/seed/js/services/espm_service.js b/seed/static/seed/js/services/espm_service.js new file mode 100644 index 0000000000..ebde12b025 --- /dev/null +++ b/seed/static/seed/js/services/espm_service.js @@ -0,0 +1,51 @@ +/** + * SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors. + * See also https://github.com/seed-platform/seed/main/LICENSE.md + */ +angular.module('BE.seed.service.espm', []).factory('espm_service', [ + '$http', + '$log', + function ( + $http + ) { + + const get_espm_building_xlsx = function (org_id, pm_property_id, espm_username, espm_password) { + return $http.post(['/api/v3/portfolio_manager/', pm_property_id, '/download/?organization_id=', org_id].join(''), { + username: espm_username, + password: espm_password + }, { + responseType: 'arraybuffer' + }).then(function (response) { + return response.data; + }).catch(function (response) { + console.log('Could not get ESPM building from service with status:' + response.status); + return response.data; + }); + }; + + const update_building_with_espm_xlsx = function (org_id, cycle_id, property_view_id, mapping_profile, file_data) { + let body = new FormData(); + let blob = new Blob([file_data], { type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' }); + body.append('file', blob, ['espm_', new Date().getTime(), '.xlsx'].join('')); + let headers = {'Content-Type': undefined}; + + return $http.put([ + '/api/v3/properties/', property_view_id, '/update_with_espm/?', + 'cycle_id=', cycle_id, '&', + 'organization_id=', org_id, '&', + 'mapping_profile_id=', mapping_profile + ].join(''), body, { headers: headers }, + ).then(function (response) { + return response.data; + }).catch(function (response) { + return response.data; + }); + }; + + const analyses_factory = { + get_espm_building_xlsx: get_espm_building_xlsx, + update_building_with_espm_xlsx: update_building_with_espm_xlsx + }; + + return analyses_factory; + }]); diff --git a/seed/static/seed/locales/en_US.json b/seed/static/seed/locales/en_US.json index 24b04c47fd..3851bda4f6 100644 --- a/seed/static/seed/locales/en_US.json +++ b/seed/static/seed/locales/en_US.json @@ -88,6 +88,7 @@ "Audit Template Email": "Audit Template Email", "Audit Template Organization Token": "Audit Template Organization Token", "Audit Template Password": "Audit Template Password", + "Audit Template Upload Results": "Audit Template Upload Results", "Auditing": "Auditing", "Auto Matching": "Auto Matching", "BEFORE_GEOCODING": "Before geocoding", @@ -180,6 +181,7 @@ "Conditioned Floor Area": "Conditioned Floor Area", "Configuration": "Configuration", "Confirm": "Confirm", + "Confirm Audit Template Building Import?": "Confirm Audit Template Building Import?", "Confirm Save Mappings?": "Confirm Save Mappings?", "Confirm delete": "Confirm delete", "Confirm new password": "Confirm new password", @@ -319,6 +321,11 @@ "ENERGY STAR Score": "ENERGY STAR Score", "ENERGY_TYPE_DISPLAY_CHOICE_PLACEHOLDER": "----Choose energy type----", "ENERGY_UNIT_DISPLAY_CHOICE_PLACEHOLDER": "----Change display unit----", + "ESPM_FILE_UPLOAD": "ESPM Spreadsheet Upload (xlsx). The spreadsheet should contain a single property.", + "ESPM_IMPORT_TEXT": "Choose an EnergyStar Portfolio Manager (ESPM) data importing method below: you can either upload a property spreadsheet previously downloaded from ESPM, or you can connect to ESPM directly to access the data.", + "ESPM Password": "ESPM Password", + "ESPM Property ID": "ESPM Property ID", + "ESPM Username": "ESPM Username", "EUI": "EUI", "EXCLUDE": "EXCLUDE", "EXTRA_DATA_COL_TYPE_CHANGE": "For “extra data” fields, this allows the user to set the type, such as Text, Number, Date, etc.", @@ -369,6 +376,7 @@ "FIELD_NAMES_FOR_MATCHING": "Field names for matching", "FILE_TYPES_SUPPORTED": "File types supported: .csv<\/strong>, .xls<\/strong>, .xlsx<\/strong>, .xml<\/strong>, .zip<\/strong>, .geojson<\/strong>, and .json<\/strong>.", "Failed to delete inventory": "Failed to delete inventory", + "Fetching your buildings from Audit Template...": "Fetching your buildings from Audit Template...", "Field": "Field", "Field Name": "Field Name", "Fields that have a \"Must Contain\" or \"Must Not Contain\" Condition Check rule cannot have a \"Range\" Condition Check rule.": "Fields that have a \"Must Contain\" or \"Must Not Contain\" Condition Check rule cannot have a \"Range\" Condition Check rule.", @@ -449,6 +457,7 @@ "INVALID_DOC_FILE_EXTENSION_ALERT": "Invalid document type selected. Accepted file types are .dxf, .pdf, .idf, and .osm", "INVALID_EXTENSION_ALERT": "Sorry!<\/strong> SEED doesn't currently support that file format. Only .csv<\/strong>, .xls<\/strong>, .xlsx<\/strong>, and .xml<\/strong> files are supported.", "INVALID_GEOJSON_EXTENSION_ALERT": "Sorry!<\/strong> SEED doesn't currently support that file format. Only .geojson<\/strong> and .json<\/strong> files are supported.", + "INVALID_XLSX_EXTENSION_ALERT": "Sorry!<\/strong> SEED doesn't currently support that file format. Only .xlsx<\/strong> files are supported.", "INVALID_XML_EXTENSION_ALERT": "Sorry!<\/strong> SEED doesn't currently support that file format. Only .xml<\/strong> files are supported.", "INVALID_XML_ZIP_EXTENSION_ALERT": "Sorry!<\/strong> SEED doesn't currently support that file format. Only .xml<\/strong> and .zip<\/strong> files are supported.", "IRREVERSIBLE_OPERATION_WARNING": "This operation is irreversible.", @@ -458,7 +467,9 @@ "Ignored property duplicates within the import file": "Ignored property duplicates within the import file", "Ignored tax lot duplicates within the import file": "Ignored tax lot duplicates within the import file", "Import Access Level Instances": "Import Access Level Instances", + "Import Audit Template Buildings": "Import Audit Template Buildings", "Import Portfolio Manager Data": "Import Portfolio Manager Data", + "Import directly from ESPM": "Import directly from ESPM", "Import from Audit Template": "Import from Audit Template", "In addition, you need to specify where the field should be associated with Tax Lot data or Property data. This will affect how the data is matched and merged, as well as how it is displayed in the Inventory view.": "In addition, you need to specify where the field should be associated with Tax Lot data or Property data. This will affect how the data is matched and merged, as well as how it is displayed in the Inventory view.", "Inactive": "Inactive", @@ -894,6 +905,7 @@ "Select All": "Select All", "Select All Columns": "Select All Columns", "Select None": "Select None", + "Select Properties to Update": "Select Properties to Update", "Select a Custom Report to get started!": "Select a Custom Report to get started!", "Select a Program to get started!": "Select a Program to get started!", "Select a column to show data on this axis. Only columns with one of these data types will be listed:": "Select a column to show data on this axis. Only columns with one of these data types will be listed:", @@ -1000,6 +1012,7 @@ "There is also a link to the SEED-Platform Users forum, where you can connect with other users.": "There is also a link to the SEED-Platform Users forum, where you can connect with other users.", "There was an error loading the page": "There was an error loading the page", "This action replaces any of your current columns with the comma-delmited columns you provided. Would you like to continue?": "This action replaces any of your current columns with the comma-delmited columns you provided. Would you like to continue?", + "This action updates properties within the selected cycle with data from the Audit Template account associated with this organization. Only Properties with Audit Template Building IDs corresponding to those saved in Audit Template will be updated.": "This action updates properties within the selected cycle with data from the Audit Template account associated with this organization. Only Properties with Audit Template Building IDs corresponding to those saved in Audit Template will be updated.", "This cycle name is already taken.": "This cycle name is already taken.", "This email link is invalid.": "This email link is invalid.", "This label name is already taken.": "This label name is already taken.", @@ -1056,13 +1069,17 @@ "Update Filters": "Update Filters", "Update Salesforce": "Update Salesforce", "Update UBID": "Update UBID", + "Update with Audit Template": "Update with Audit Template", "Update with BuildingSync": "Update with BuildingSync", + "Update with ESPM": "Update with ESPM", "Updated": "Updated", + "Updating selected properties...": "Updating selected properties...", "Upload Access Level Instances": "Upload Access Level Instances", "Upload Audit Template XML": "Upload Audit Template XML", "Upload BuildingSync Data": "Upload BuildingSync Data", "Upload Green Button Data": "Upload Green Button Data", "Upload Portfolio Manager Data": "Upload Portfolio Manager Data", + "Upload ESPM Property": "Upload ESPM Property", "Upload a Spreadsheet": "Upload a Spreadsheet", "Upload another energy data file": "Upload another energy data file", "Upload your buildings list": "Upload your buildings list", @@ -1235,4 +1252,4 @@ "users": "users", "white": "white", "your data set name": "your data set name" -} \ No newline at end of file +} diff --git a/seed/static/seed/locales/fr_CA.json b/seed/static/seed/locales/fr_CA.json index 3a35776c69..3b0613a3e9 100644 --- a/seed/static/seed/locales/fr_CA.json +++ b/seed/static/seed/locales/fr_CA.json @@ -88,6 +88,7 @@ "Audit Template Email": "Audit Template Email", "Audit Template Organization Token": "Audit Template jeton d'organisation", "Audit Template Password": "Audit Template le mot de passe", + "Audit Template Upload Results": "Résultats du téléchargement du modèle d'audit", "Auditing": "Audit", "Auto Matching": "Correspondance automatique", "BEFORE_GEOCODING": "Avant le géocodage", @@ -180,6 +181,7 @@ "Conditioned Floor Area": "Surface climatisé", "Configuration": "Configuration", "Confirm": "Confirmer", + "Confirm Audit Template Building Import?": "Confirmer l'importation de la création du modèle d'audit ?", "Confirm Save Mappings?": "Confirmer enregistrer les mappages?", "Confirm delete": "Confirmer la supprimation", "Confirm new password": "Confirmer le nouveau mot de passe", @@ -319,6 +321,11 @@ "ENERGY STAR Score": "Compte ENERGY STAR", "ENERGY_TYPE_DISPLAY_CHOICE_PLACEHOLDER": "---- Choisissez le type d'énergie ----", "ENERGY_UNIT_DISPLAY_CHOICE_PLACEHOLDER": "---- Changer l'unité d'affichage ----", + "ESPM_FILE_UPLOAD": "Téléchargement de la feuille de calcul ESPM (.xlsx)", + "ESPM_IMPORT_TEXT": "Choisissez une méthode d'importation de données EnergyStar Portfolio Manager (ESPM) ci-dessous: vous pouvez soit télécharger une feuille de calcul de propriétés précédemment téléchargée depuis ESPM, soit vous connecter directement à ESPM pour accéder aux données.", + "ESPM Password": "Mot de passe ESPM", + "ESPM Property ID": "ID de propriété ESPM", + "ESPM Username": "Nom d'utilisateur ESPM", "EUI": "IUE", "EXCLUDE": "EXCLURE", "EXTRA_DATA_COL_TYPE_CHANGE": "Pour les champs «données supplémentaires», cela permet à l'utilisateur de définir le type, tel que Texte, Numéro, Date, etc.", @@ -369,6 +376,7 @@ "FIELD_NAMES_FOR_MATCHING": "Noms de zone pour l'appariement", "FILE_TYPES_SUPPORTED": "Types de fichiers pris en charge: .csv<\/strong>, .xls<\/strong>, .xlsx<\/strong>, .xml<\/strong>, .zip<\/strong>, .geojson<\/strong>, et .json<\/strong>.", "Failed to delete inventory": "Échec de la suppression de l'inventaire", + "Fetching your buildings from Audit Template...": "Récupération de vos bâtiments à partir du modèle d'audit...", "Field": "Champ", "Field Name": "Nom de champ", "Fields that have a \"Must Contain\" or \"Must Not Contain\" Condition Check rule cannot have a \"Range\" Condition Check rule.": "Les champs qui ont une règle de vérification de condition «Doit contenir» ou «Ne doit pas contenir» ne peuvent pas avoir de règle de vérification de condition «Plage».", @@ -447,10 +455,11 @@ "INTERNAL": "INTERNE", "INVALID_CSV_EXTENSION_ALERT": "Désolé!<\/strong> SEED ne prend actuellement pas en charge ce format de fichier. Seuls les fichiers .csv<\/strong> sont pris en charge.", "INVALID_DOC_FILE_EXTENSION_ALERT": "Type de document sélectionné non valide. Les types de fichiers acceptés sont .dxf, .pdf, .idf et .osm", - "INVALID_EXTENSION_ALERT": "Désolée!<\/strong> SEED ne supporte actuellement pas ce format de fichier. Seuls les fichiers .csv<\/strong>, .xls<\/strong>, .xlsx<\/strong>, et .xml<\/strong> sont pris en charge.", - "INVALID_GEOJSON_EXTENSION_ALERT": "Désolée!<\/strong> SEED ne supporte actuellement pas ce format de fichier. Seuls les fichiers .geojson<\/strong>, et .json<\/strong> sont pris en charge.", - "INVALID_XML_EXTENSION_ALERT": "Désolée!<\/strong> SEED ne supporte actuellement pas ce format de fichier. Seuls les fichiers .xml<\/strong> sont pris en charge.", - "INVALID_XML_ZIP_EXTENSION_ALERT": "Désolée!<\/strong> SEED ne supporte actuellement pas ce format de fichier. Seuls les fichiers .xml<\/strong> et .zip<\/strong> sont pris en charge.", + "INVALID_EXTENSION_ALERT": "Désolé!<\/strong> SEED ne supporte actuellement pas ce format de fichier. Seuls les fichiers .csv<\/strong>, .xls<\/strong>, .xlsx<\/strong>, et .xml<\/strong> sont pris en charge.", + "INVALID_GEOJSON_EXTENSION_ALERT": "Désolé!<\/strong> SEED ne supporte actuellement pas ce format de fichier. Seuls les fichiers .geojson<\/strong>, et .json<\/strong> sont pris en charge.", + "INVALID_XLSX_EXTENSION_ALERT": "Désolé!< \/strong > SEED ne prend actuellement pas en charge ce format de fichier. Seuls les fichiers .xlsx < \/strong > sont pris en charge.", + "INVALID_XML_EXTENSION_ALERT": "Désolé!<\/strong> SEED ne supporte actuellement pas ce format de fichier. Seuls les fichiers .xml<\/strong> sont pris en charge.", + "INVALID_XML_ZIP_EXTENSION_ALERT": "Désolé!<\/strong> SEED ne supporte actuellement pas ce format de fichier. Seuls les fichiers .xml<\/strong> et .zip<\/strong> sont pris en charge.", "IRREVERSIBLE_OPERATION_WARNING": "Cette opération est irréversible.", "ITEMS_WILL_NOT_CHANGE": "ceux-ci ne changeront pas", "Ignored duplicates of existing properties": "Copies ignorées des propriétés existantes", @@ -458,7 +467,9 @@ "Ignored property duplicates within the import file": "Copies de propriété ignorées dans le fichier d'importation", "Ignored tax lot duplicates within the import file": "Copies des lots d'impôt ignorées dans the fichier d'importation", "Import Access Level Instances": "Importer des instances de niveau d'accès", + "Import Audit Template Buildings": "Importer des bâtiments de modèle d'audit", "Import Portfolio Manager Data": "Importer les données de Portfolio Manager", + "Import directly from ESPM": "Importer directement depuis ESPM", "Import from Audit Template": "Importer à partir du Audit Template", "In addition, you need to specify where the field should be associated with Tax Lot data or Property data. This will affect how the data is matched and merged, as well as how it is displayed in the Inventory view.": "En outre, vous devez spécifier où le champ doit être associé aux données du lot d'impôt ou aux données de la propriété. Cela affectera la manière dont les données sont mises en correspondance et fusionnées, ainsi que la manière dont elles sont affichées dans la vue Inventaire.", "Inactive": "Inactif", @@ -894,6 +905,7 @@ "Select All": "Tout sélectionner", "Select All Columns": "Sélectionnez toutes les colonnes", "Select None": "Sélectionnez Aucun", + "Select Properties to Update": "Sélectionnez les propriétés à mettre à jour", "Select a Custom Report to get started!": "Sélectionnez un rapport personnalisé pour commencer !", "Select a Program to get started!": "Sélectionnez un programme pour commencer !", "Select a column to show data on this axis. Only columns with one of these data types will be listed:": "Sélectionnez une colonne pour afficher les données sur cet axe. Seules les colonnes contenant l'un de ces types de données seront répertoriées :", @@ -1000,6 +1012,7 @@ "There is also a link to the SEED-Platform Users forum, where you can connect with other users.": "Il y a aussi un lien vers le forum SEED-Platform Users, où vous pouvez vous connecter avec d'autres utilisateurs.", "There was an error loading the page": "Une erreur s'est produite lors du chargement de la page", "This action replaces any of your current columns with the comma-delmited columns you provided. Would you like to continue?": "Cette action remplace n'importe laquelle de vos colonnes actuelles par les colonnes délimitées par des virgules que vous avez fournies. Voulez-vous continuer?", + "This action updates properties within the selected cycle with data from the Audit Template account associated with this organization. Only Properties with Audit Template Building IDs corresponding to those saved in Audit Template will be updated.": "Cette action met à jour les propriétés du cycle sélectionné avec les données du compte de modèle d'audit associé à cette organisation. Seules les propriétés avec des ID de bâtiment de modèle d'audit correspondant à ceux enregistrés dans le modèle d'audit seront mises à jour.", "This cycle name is already taken.": "Ce nom de cycle est déjà pris.", "This email link is invalid.": "Cette opération est irréversible.", "This label name is already taken.": "Ce nom d'étiquette est déjà pris.", @@ -1056,13 +1069,17 @@ "Update Filters": "Mise à jour les filtres", "Update Salesforce": "Mettre à jour Salesforce", "Update UBID": "Mettre à jour UBID", + "Update with Audit Template": "Mise à jour avec Audit Template", "Update with BuildingSync": "Mettre à jour avec BuildingSync", + "Update with ESPM": "Mise à jour avec ESPM", "Updated": "Mise à jour", + "Updating selected properties...": "Mise à jour des propriétés sélectionnées...", "Upload Access Level Instances": "Importer des instances de niveau d'accès", "Upload Audit Template XML": "Téléchargement Audit Template XML", "Upload BuildingSync Data": "Télécharger les données du BuildingSync", "Upload Green Button Data": "Télécharger les données du Green Button", "Upload Portfolio Manager Data": "Télécharger les données du Portfolio Manager", + "Upload Single ESPM Property": "Télécharger une seule propriété ESPM", "Upload a Spreadsheet": "Télécharger une feuille de calcul", "Upload another energy data file": "Télécharger un autre fichier de données d'énergie", "Upload your buildings list": "Télécharger votre liste de bâtiments", @@ -1235,4 +1252,4 @@ "users": "utilisateurs", "white": "blanc", "your data set name": "votre nom du jeu de données" -} \ No newline at end of file +} diff --git a/seed/static/seed/partials/data_upload_espm_modal.html b/seed/static/seed/partials/data_upload_espm_modal.html new file mode 100644 index 0000000000..4d59d18d7e --- /dev/null +++ b/seed/static/seed/partials/data_upload_espm_modal.html @@ -0,0 +1,80 @@ + + +
+ + +
diff --git a/seed/static/seed/partials/inventory_detail.html b/seed/static/seed/partials/inventory_detail.html index 5ea688def3..03acbfa694 100644 --- a/seed/static/seed/partials/inventory_detail.html +++ b/seed/static/seed/partials/inventory_detail.html @@ -64,6 +64,10 @@

{$:: 'Update with Audit Template' | translate $} + +
  • {$:: 'Export BuildingSync' | translate $}
  • @@ -104,6 +108,7 @@

    +
    diff --git a/seed/static/seed/scss/style.scss b/seed/static/seed/scss/style.scss index 86c63ba834..51724da3f7 100755 --- a/seed/static/seed/scss/style.scss +++ b/seed/static/seed/scss/style.scss @@ -4872,3 +4872,16 @@ ul.r-list { border-color: #dd2c00; } } + +.or-text { + font-size: 1.3em; + text-align: center; +} + +.modal-content-section { + padding: 1em; + + &:not(:last-child) { + border-bottom: 1px solid #ddd; + } +} diff --git a/seed/templates/seed/_scripts.html b/seed/templates/seed/_scripts.html index ccd52971d7..4a7b5edd3c 100644 --- a/seed/templates/seed/_scripts.html +++ b/seed/templates/seed/_scripts.html @@ -51,6 +51,7 @@ + @@ -141,6 +142,7 @@ + diff --git a/seed/tests/data/mappings/espm-single-mapping.csv b/seed/tests/data/mappings/espm-single-mapping.csv new file mode 100644 index 0000000000..101b3df8bb --- /dev/null +++ b/seed/tests/data/mappings/espm-single-mapping.csv @@ -0,0 +1,25 @@ +Raw Columns,units,SEED Table,SEED Columns +How Many Buildings?,,PropertyState,building_count +City/Municipality,,PropertyState,city +Construction Status,,PropertyState,Construction Status +Country,,PropertyState,Country +Federal Agency/Department,,PropertyState,Federal Agency/Department +GFA Units,,PropertyState,GFA Units +Gross Floor Area,ft**2,PropertyState,gross_floor_area +Irrigated Area,,PropertyState,Irrigated Area +Irrigated Area Units,,PropertyState,Irrigated Area Units +Is this an Institutional Property? (Applicable only for Canadian properties),,PropertyState,Is this an Institutional Property? (Applicable only for Canadian properties) +Is this Property Owned or Operated by the US or Canadian Federal Government?,,PropertyState,Is this Property Owned or Operated by the US or Canadian Federal Government? +Number of Buildings,,PropertyState,Number of Buildings +Occupancy (%),,PropertyState,Occupancy +Other State/Province,,PropertyState,Other State +Parent Property ID,,PropertyState,Parent Property ID +Parent Property Name (if Applicable),,PropertyState,Parent Property Name (if Applicable) +Portfolio Manager ID,,PropertyState,pm_property_id +Postal Code,,PropertyState,postal_code +Property Name,,PropertyState,property_name +Property Type - Self-Selected,,PropertyState,property_type +State/Province,,PropertyState,state +Street Address,,PropertyState,address_line_1 +Street Address 2,,PropertyState,address_line_2 +Year Built,,PropertyState,year_built diff --git a/seed/tests/data/portfolio-manager-single-22482007.xlsx b/seed/tests/data/portfolio-manager-single-22482007.xlsx new file mode 100644 index 0000000000..0317d2c7f3 Binary files /dev/null and b/seed/tests/data/portfolio-manager-single-22482007.xlsx differ diff --git a/seed/tests/test_account_views.py b/seed/tests/test_account_views.py index 8fb6c31b4a..497da3cff7 100644 --- a/seed/tests/test_account_views.py +++ b/seed/tests/test_account_views.py @@ -85,7 +85,6 @@ def test_dict_org(self): 'mapquest_api_key': '', 'geocoding_enabled': True, 'better_analysis_api_key': '', - 'better_host_url': 'https://better-lbnl-staging.herokuapp.com', 'property_display_field': 'address_line_1', 'taxlot_display_field': 'address_line_1', 'display_meter_units': Organization._default_display_meter_units, @@ -98,13 +97,18 @@ def test_dict_org(self): 'at_organization_token': '', 'audit_template_user': '', 'audit_template_password': '', - 'at_host_url': 'https://api.labworks.org', 'salesforce_enabled': False, 'ubid_threshold': 1 } org_payload = _dict_org(self.fake_request, [self.org]) + self.assertEqual(len(org_payload), 1) + # pull out and test the URLs that can be configured differently based on the test environment. + better_url = org_payload[0].pop('better_host_url') + self.assertRegexpMatches(better_url, r'^https://.*better.*$') + at_url = org_payload[0].pop('at_host_url') + self.assertRegexpMatches(at_url, r'^https://.*labworks.*$|https://buildingenergyscore.energy.gov$') self.assertDictEqual(org_payload[0], expected_single_org_payload) # Now let's make sure that we pick up related buildings correctly. @@ -124,10 +128,11 @@ def test_dict_org(self): 'name': self.cal_year_name, 'cycle_id': self.cycle.pk }] - self.assertDictEqual( - _dict_org(self.fake_request, [self.org])[0], - expected_single_org_payload - ) + org_payload_2 = _dict_org(self.fake_request, [self.org])[0] + # pop the urls again + org_payload_2.pop('better_host_url') + org_payload_2.pop('at_host_url') + self.assertDictEqual(org_payload_2, expected_single_org_payload) def test_dict_org_w_member_in_parent_and_child(self): """What happens when a user has a role in parent and child.""" @@ -179,7 +184,6 @@ def test_dict_org_w_member_in_parent_and_child(self): 'mapquest_api_key': '', 'geocoding_enabled': True, 'better_analysis_api_key': '', - 'better_host_url': 'https://better-lbnl-staging.herokuapp.com', 'property_display_field': 'address_line_1', 'taxlot_display_field': 'address_line_1', 'display_meter_units': Organization._default_display_meter_units, @@ -192,7 +196,6 @@ def test_dict_org_w_member_in_parent_and_child(self): 'at_organization_token': '', 'audit_template_user': '', 'audit_template_password': '', - 'at_host_url': 'https://api.labworks.org', 'salesforce_enabled': False, 'ubid_threshold': 1 }], @@ -211,7 +214,6 @@ def test_dict_org_w_member_in_parent_and_child(self): 'mapquest_api_key': '', 'geocoding_enabled': True, 'better_analysis_api_key': '', - 'better_host_url': 'https://better-lbnl-staging.herokuapp.com', 'property_display_field': 'address_line_1', 'taxlot_display_field': 'address_line_1', 'display_meter_units': Organization._default_display_meter_units, @@ -224,13 +226,18 @@ def test_dict_org_w_member_in_parent_and_child(self): 'at_organization_token': '', 'audit_template_user': '', 'audit_template_password': '', - 'at_host_url': 'https://api.labworks.org', 'salesforce_enabled': False, 'ubid_threshold': 1 } org_payload = _dict_org(self.fake_request, Organization.objects.all()) + # pop the better and at urls + org_payload[0].pop('better_host_url') + org_payload[0].pop('at_host_url') + org_payload[0]['sub_orgs'][0].pop('better_host_url') + org_payload[0]['sub_orgs'][0].pop('at_host_url') + self.assertEqual(len(org_payload), 2) self.assertDictEqual(org_payload[0], expected_multiple_org_payload) diff --git a/seed/tests/test_api_mixins.py b/seed/tests/test_api_mixins.py index 4e4fdd0a08..9badca358e 100644 --- a/seed/tests/test_api_mixins.py +++ b/seed/tests/test_api_mixins.py @@ -374,18 +374,18 @@ def test_get_profile_id(self): self.assertListEqual(columns['extra_data'], ['field_1']) # no extra data - columnlistprofile = self.column_list_factory.get_columnlistprofile( + column_list_profile = self.column_list_factory.get_columnlistprofile( columns=['address_line_1', 'site_eui'] ) - columns = self.mixin_class.get_show_columns(self.org.id, columnlistprofile.id) + columns = self.mixin_class.get_show_columns(self.org.id, column_list_profile.id) self.assertListEqual(columns['fields'], ['extra_data', 'id', 'address_line_1', 'site_eui']) self.assertListEqual(columns['extra_data'], []) # with extra data - columnlistprofile = self.column_list_factory.get_columnlistprofile( + column_list_profile = self.column_list_factory.get_columnlistprofile( columns=['address_line_1', 'site_eui', 'field_1'] ) - columns = self.mixin_class.get_show_columns(self.org.id, columnlistprofile.id) + columns = self.mixin_class.get_show_columns(self.org.id, column_list_profile.id) self.assertListEqual(columns['fields'], ['extra_data', 'id', 'address_line_1', 'site_eui']) self.assertListEqual(columns['extra_data'], ['field_1']) diff --git a/seed/tests/test_portfoliomanager.py b/seed/tests/test_portfoliomanager.py index 9a25ad1560..891d2dc9c4 100644 --- a/seed/tests/test_portfoliomanager.py +++ b/seed/tests/test_portfoliomanager.py @@ -15,6 +15,7 @@ import xmltodict from django.test import TestCase from django.urls import reverse_lazy +from xlrd import open_workbook from seed.landing.models import SEEDUser as User from seed.utils.organizations import create_organization @@ -87,7 +88,7 @@ def test_template_list_interface_no_username(self): # status should be error # message should have "missing username" self.assertEqual(400, resp.status_code) - data = json.loads(resp.content) + data = resp.json() self.assertIn('status', data) self.assertIn('message', data) self.assertEqual('error', data['status']) @@ -103,7 +104,7 @@ def test_template_list_interface_no_password(self): # status should be error # message should have "missing password" self.assertEqual(400, resp.status_code) - data = json.loads(resp.content) + data = resp.json() self.assertIn('status', data) self.assertIn('message', data) self.assertEqual('error', data['status']) @@ -120,7 +121,7 @@ def test_template_list_invalid_credentials(self): # status should be error # message should have "missing template" self.assertEqual(400, resp.status_code) - data = json.loads(resp.content) + data = resp.json() self.assertIn('status', data) self.assertIn('message', data) self.assertEqual('error', data['status']) @@ -209,7 +210,7 @@ def test_report_interface_no_username(self): # status should be error # message should have "missing username" self.assertEqual(400, resp.status_code) - data = json.loads(resp.content) + data = resp.json() self.assertIn('status', data) self.assertIn('message', data) self.assertEqual('error', data['status']) @@ -225,7 +226,7 @@ def test_report_interface_no_password(self): # status should be error # message should have "missing password" self.assertEqual(400, resp.status_code) - data = json.loads(resp.content) + data = resp.json() self.assertIn('status', data) self.assertIn('message', data) self.assertEqual('error', data['status']) @@ -241,7 +242,7 @@ def test_report_interface_no_template(self): # status should be error # message should have "missing template" self.assertEqual(400, resp.status_code) - data = json.loads(resp.content) + data = resp.json() self.assertIn('status', data) self.assertIn('message', data) self.assertEqual('error', data['status']) @@ -266,7 +267,7 @@ def test_report_invalid_credentials(self): # status should be error # message should have "missing template" self.assertEqual(400, resp.status_code) - data = json.loads(resp.content) + data = resp.json() self.assertIn('status', data) self.assertIn('message', data) self.assertEqual('error', data['status']) @@ -368,6 +369,8 @@ def test_report_generation_empty_child_template(self): class PortfolioManagerReportSinglePropertyUploadTest(TestCase): + """Test case for downloading a report with a single building and saving + it to SEED's Dataset upload api.""" def setUp(self): user_details = { @@ -398,7 +401,7 @@ def setUp(self): @pm_skip_test_check def test_single_property_template_for_upload(self): - # create a single property report with template + # create a single ESPM property report with template template = { "children": [], "display_name": "SEED_Test - Single Property", @@ -415,11 +418,11 @@ def test_single_property_template_for_upload(self): ) self.assertEqual(200, report_response.status_code) - property_info = json.loads(report_response.content) + property_info = report_response.json() self.assertEqual(1, len(property_info['properties'])) self.assertIsInstance(property_info['properties'], list) - # add report to dataset + # add report to SEED's dataset response = self.client.post( reverse_lazy('api:v3:upload-create-from-pm-import'), json.dumps({ @@ -431,6 +434,70 @@ def test_single_property_template_for_upload(self): self.assertEqual(200, response.status_code) +class PortfolioManagerSingleReportXSLX(TestCase): + """Test downloading a single ESPM report in XSLX format.""" + + def setUp(self): + user_details = { + 'username': 'test_user@demo.com', + 'password': 'test_pass', + } + self.user = User.objects.create_superuser( + email='test_user@demo.com', **user_details + ) + self.org, _, _ = create_organization(self.user) + self.client.login(**user_details) + + self.pm_un = os.environ.get(PM_UN, False) + self.pm_pw = os.environ.get(PM_PW, False) + if not self.pm_un or not self.pm_pw: + self.fail('Somehow PM test was initiated without %s or %s in the environment' % (PM_UN, PM_PW)) + + self.output_dir = Path(__file__).parent.absolute() / 'output' + if not self.output_dir.exists(): + os.mkdir(self.output_dir) + + @pm_skip_test_check + def test_single_report_download(self): + # PM ID 22178850 is a more complete test case with meter data + pm_id = 22178850 + + # remove the file if it exists + new_file = self.output_dir / f"single_property_{pm_id}.xlsx" + if new_file.exists(): + new_file.unlink() + self.assertFalse(new_file.exists()) + + pm = PortfolioManagerImport(self.pm_un, self.pm_pw) + + content = pm.return_single_property_report(pm_id) + self.assertIsNotNone(content) + with open(new_file, 'wb') as file: + file.write(content) + + self.assertTrue(new_file.exists()) + + # TODO: load the xlsx file and ensure that it has the right tabs + workbook = open_workbook(new_file) + self.assertIn('Property', workbook.sheet_names()) + self.assertIn('Meters', workbook.sheet_names()) + self.assertIn('Meter Entries', workbook.sheet_names()) + + # verify that the Property worksheet has the PM id in it + sheet = workbook.sheet_by_name('Property') + self.assertTrue(str(pm_id) in str(sheet._cell_values)) + + @pm_skip_test_check + def test_single_report_view(self): + pm_id = 22178850 + response = self.client.post( + reverse_lazy('api:v3:portfolio_manager-download', args=[pm_id]), + json.dumps({"username": self.pm_un, "password": self.pm_pw}), + content_type='application/json', + ) + self.assertEqual(200, response.status_code) + + class PortfolioManagerReportParsingTest(TestCase): """Test the parsing of the resulting PM XML file. This is only for the version 2 parsing""" diff --git a/seed/tests/test_property_views.py b/seed/tests/test_property_views.py index 010cb5ae28..b14d6572a6 100644 --- a/seed/tests/test_property_views.py +++ b/seed/tests/test_property_views.py @@ -29,8 +29,10 @@ from seed.lib.xml_mapping.mapper import default_buildingsync_profile_mappings from seed.models import ( DATA_STATE_MAPPING, + DATA_STATE_MATCHING, GREEN_BUTTON, PORTFOLIO_METER_USAGE, + PORTFOLIO_RAW, SEED_DATA_SOURCES, BuildingFile, Column, @@ -47,6 +49,10 @@ TaxLotView ) from seed.models.sensors import DataLogger, Sensor, SensorReading +from seed.serializers.properties import ( + PropertyStatePromoteWritableSerializer, + PropertyStateSerializer +) from seed.test_helpers.fake import ( FakeColumnFactory, FakeColumnListProfileFactory, @@ -93,6 +99,76 @@ def setUp(self): self.column_list_factory = FakeColumnListProfileFactory(organization=self.org) self.client.login(**user_details) + def test_create_property(self): + state = self.property_state_factory.get_property_state() + cycle_id = self.cycle.id + + params = json.dumps({ + "cycle_id": cycle_id, + "state": PropertyStateSerializer(state).data + }) + + url = reverse('api:v3:properties-list') + '?organization_id={}'.format(self.org.pk) + response = self.client.post(url, params, content_type='application/json') + self.assertEqual(response.status_code, 201) + self.assertEqual(response.json()['status'], 'success') + + def test_create_property_in_diff_org(self): + state = self.property_state_factory.get_property_state() + cycle_id = self.cycle.id + user_2 = User.objects.create_superuser( + **{'username': 'test_user2@demo.com', 'password': 'test_pass', 'email': 'test_user2@demo.com'}) + org_2, _, _ = create_organization(user_2) + + # verify that user (1) can't post to user_2's org + params = json.dumps({ + "cycle_id": cycle_id, + "state": PropertyStateSerializer(state).data + }) + url = reverse('api:v3:properties-list') + '?organization_id={}'.format(org_2.pk) + response = self.client.post(url, params, content_type='application/json') + self.assertEqual(response.status_code, 403) + self.assertEqual(response.json()['detail'], 'You do not have permission to perform this action.') + + def test_create_property_with_protected_fields(self): + state = self.property_state_factory.get_property_state() + state.normalized_address = '741 Evergreen Terrace' + state.data_state = 999 + cycle_id = self.cycle.id + + params = json.dumps({ + "cycle_id": cycle_id, + "state": PropertyStateSerializer(state).data + }) + + url = reverse('api:v3:properties-list') + '?organization_id={}'.format(self.org.pk) + response = self.client.post(url, params, content_type='application/json') + self.assertEqual(response.status_code, 201) + self.assertEqual(response.json()['status'], 'success') + + # verify that the protected fields were not overwritten + new_state_data = response.json()['view']['state'] + self.assertNotEqual(new_state_data['normalized_address'], state.normalized_address) + self.assertNotEqual(new_state_data['data_state'], state.data_state) + self.assertEqual(new_state_data['data_state'], DATA_STATE_MATCHING) + + # above was for spot checking, now look at the serializer and make sure that the + # protected column objects are read_only. + serializer = PropertyStatePromoteWritableSerializer(new_state_data) + protected_columns = list(set(Column.EXCLUDED_MAPPING_FIELDS + Column.COLUMN_EXCLUDE_FIELDS)) + # go through each of the Column's class columns and ensure that the serializer is read only + # map the related object ids to the column names + protected_columns.pop(protected_columns.index('import_file')) + protected_columns.pop(protected_columns.index('extra_data')) # extra_data is allowed + protected_columns.append('import_file_id') + protected_columns.append('measures') + protected_columns.append('scenarios') + protected_columns.append('files') + + for column in protected_columns: + self.assertIsNotNone(serializer.fields.get(column), f"Column {column} is not in the serializer") + self.assertTrue(serializer.fields[column].read_only, f"Column {column} is not read_only in the write serializer") + def test_get_and_edit_properties(self): state = self.property_state_factory.get_property_state() prprty = self.property_factory.get_property() @@ -149,8 +225,9 @@ def test_upload_inventory_document_and_delete(self): view = PropertyView.objects.create( property=prprty, cycle=self.cycle, state=state ) - location = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))) - test_filepath = os.path.relpath(os.path.join(location, 'data', 'test-document.pdf')) + + test_filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data', 'test-document.pdf') + url = reverse('api:v3:properties-detail', args=[view.id]) + f'upload_inventory_document/?organization_id={self.org.pk}' document = open(test_filepath, 'rb') @@ -2132,3 +2209,84 @@ def test_property_meter_usage_can_filter_when_usages_span_a_single_month(self): self.assertCountEqual(result_dict['readings'], expectation['readings']) self.assertCountEqual(result_dict['column_defs'], expectation['column_defs']) + + +class PropertyViewUpdateWithESPMTests(DataMappingBaseTestCase): + def setUp(self): + user_details = { + 'username': 'test_user@demo.com', + 'password': 'test_pass', + 'email': 'test_user@demo.com' + } + selfvars = self.set_up( + PORTFOLIO_RAW, user_details['username'], user_details['password'] + ) + self.user, self.org, self.import_file_1, self.import_record_1, self.cycle_1 = selfvars + + # create the test factories + self.column_factory = FakeColumnFactory(organization=self.org) + self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) + self.property_factory = FakePropertyFactory(organization=self.org) + self.property_state_factory = FakePropertyStateFactory(organization=self.org) + self.property_view_factory = FakePropertyViewFactory(organization=self.org) + self.column_list_factory = FakeColumnListProfileFactory(organization=self.org) + + # log into the client + self.client.login(**user_details) + + def test_update_property_view_with_espm(self): + """Simple test to verify that the property state is merged with an updated + ESPM download XLSX file.""" + pm_property_id = '22482007' + pv = self.property_view_factory.get_property_view( + cycle=self.cycle_1, pm_property_id=pm_property_id + ) + self.assertTrue(pv.state.pm_property_id, pm_property_id) + + # save some of the pv state's data to verify merging + pv_city = pv.state.city + pv_address_line_1 = pv.state.address_line_1 + pv_site_eui = pv.state.site_eui + + mapping_filepath = os.path.join( + os.path.dirname(os.path.abspath(__file__)), 'data', 'mappings', 'espm-single-mapping.csv' + ) + + # need to upload the mappings for the ESPM data to a new profile + mapping_profile = ColumnMappingProfile.create_from_file( + mapping_filepath, self.org, 'ESPM', overwrite_if_exists=True + ) + + test_filepath = os.path.join( + os.path.dirname(os.path.abspath(__file__)), 'data', f'portfolio-manager-single-{pm_property_id}.xlsx' + ) + + url = reverse('api:v3:properties-update-with-espm', args=[pv.id]) + url += f"?organization_id={self.org.id}&cycle_id={self.cycle_1.id}&mapping_profile_id={mapping_profile.id}" + doc = open(test_filepath, 'rb') + # need to encode the data as multipart form data since this is a PUT. A + # POST in the client defaults to multipart, so in a PUT we have to construct it. + response = self.client.put( + path=url, + data=encode_multipart(data=dict( + file=doc, + file_type='XLSX', + name=doc.name), + boundary=BOUNDARY), + content_type=MULTIPART_CONTENT + ) + self.assertEqual(response.status_code, 200) + + # now spot check that some of fields were updated + pv.refresh_from_db() + self.assertNotEqual(pv.state.city, pv_city) + self.assertNotEqual(pv.state.address_line_1, pv_address_line_1) + # site_eui should not have changed + self.assertEqual(pv.state.site_eui.magnitude, pv_site_eui) + + # check that the values are what is in the XLSX file + self.assertEqual(pv.state.city, 'WASHINGTON') + self.assertEqual(pv.state.address_line_1, '2425 N STREET NW') + + # verify that the property has meters too, which came from the XLSX file + self.assertEqual(pv.property.meters.count(), 2) diff --git a/seed/tests/util.py b/seed/tests/util.py index 7f2624cd0b..f505821866 100644 --- a/seed/tests/util.py +++ b/seed/tests/util.py @@ -89,14 +89,14 @@ def tearDown(self): class DataMappingBaseTestCase(DeleteModelsTestCase): """Base Test Case Class to handle data import""" - def set_up(self, import_file_source_type): + def set_up(self, import_file_source_type, user_name='test_user@demo.com', user_password='test_pass'): # default_values import_file_data_state = getattr(self, 'import_file_data_state', DATA_STATE_IMPORT) - if not User.objects.filter(username='test_user@demo.com').exists(): - user = User.objects.create_user('test_user@demo.com', password='test_pass') + if not User.objects.filter(username=user_name).exists(): + user = User.objects.create_user(user_name, password=user_password) else: - user = User.objects.get(username='test_user@demo.com') + user = User.objects.get(username=user_name) org, _, _ = create_organization(user, "test-organization-a") diff --git a/seed/utils/api_schema.py b/seed/utils/api_schema.py index 44409894f3..9013990c41 100644 --- a/seed/utils/api_schema.py +++ b/seed/utils/api_schema.py @@ -16,6 +16,7 @@ class AutoSchemaHelper(SwaggerAutoSchema): 'string': openapi.TYPE_STRING, 'boolean': openapi.TYPE_BOOLEAN, 'integer': openapi.TYPE_INTEGER, + 'object': openapi.TYPE_OBJECT, } @classmethod diff --git a/seed/views/v3/analyses.py b/seed/views/v3/analyses.py index c6982eece0..a126ebf32c 100644 --- a/seed/views/v3/analyses.py +++ b/seed/views/v3/analyses.py @@ -369,7 +369,7 @@ def get_counts(field_name): property_types = get_counts('extra_data__Largest Property Use Type') year_built = get_counts('year_built') energy = get_counts('site_eui') - sqftage = get_counts('gross_floor_area') + square_footage = get_counts('gross_floor_area') from collections import defaultdict @@ -443,16 +443,16 @@ def get_counts(field_name): e[f['site_eui']] += f['count'] energy_list2 = [{'site_eui': site_eui, 'percentage': count / views.count() * 100} for site_eui, count in e.items()] - sqftage_list = [] - for i in sqftage: + square_footage_list = [] + for i in square_footage: dict = i.copy() for k, v in i.items(): if isinstance(v, Quantity): dict[k] = v.to(ureg.feet**2).magnitude - sqftage_list.append(dict) + square_footage_list.append(dict) - sqftage_agg = [] - for record in sqftage_list: + square_footage_agg = [] + for record in square_footage_list: dict = record.copy() if isinstance(record['gross_floor_area'], float): if 0 < record['gross_floor_area'] <= 1000: @@ -475,12 +475,12 @@ def get_counts(field_name): dict['gross_floor_area'] = "500,000-1,000,000" else: dict['gross_floor_area'] = "> 1,000,000" - sqftage_agg.append(dict) + square_footage_agg.append(dict) g = defaultdict(int) - for h in sqftage_agg: + for h in square_footage_agg: g[h['gross_floor_area']] += h['count'] - sqftage_list2 = [{'gross_floor_area': gross_floor_area, 'percentage': count / views.count() * 100} for gross_floor_area, count in g.items()] + square_footage_list2 = [{'gross_floor_area': gross_floor_area, 'percentage': count / views.count() * 100} for gross_floor_area, count in g.items()] extra_data_list = [] for data in states.values_list('extra_data', flat=True): @@ -509,7 +509,7 @@ def get_counts(field_name): 'property_types': property_types, 'year_built': year_built_list, 'energy': energy_list2, - 'square_footage': sqftage_list2 + 'square_footage': square_footage_list2 }) @swagger_auto_schema(manual_parameters=[ diff --git a/seed/views/v3/columns.py b/seed/views/v3/columns.py index c51e7701b0..2508fb903e 100644 --- a/seed/views/v3/columns.py +++ b/seed/views/v3/columns.py @@ -115,7 +115,7 @@ def list(self, request): @api_endpoint_class @ajax_request_class def create(self, request): - self.get_organization(self.request) + org_id = self.get_organization(self.request) table_name = self.request.data.get("table_name") if table_name != "PropertyState" and table_name != "TaxLotState": @@ -125,6 +125,9 @@ def create(self, request): }, status=status.HTTP_400_BAD_REQUEST) try: + # set request data organization_id to org_id just in case it is not set or incorrectly set + self.request.data['organization_id'] = org_id + new_column = Column.objects.create( is_extra_data=True, **self.request.data diff --git a/seed/views/v3/portfolio_manager.py b/seed/views/v3/portfolio_manager.py index 7832c2cbfb..c25e6032e2 100644 --- a/seed/views/v3/portfolio_manager.py +++ b/seed/views/v3/portfolio_manager.py @@ -7,10 +7,11 @@ import json import logging import time +from datetime import datetime import requests import xmltodict -from django.http import JsonResponse +from django.http import HttpResponse, JsonResponse from drf_yasg.utils import swagger_auto_schema from rest_framework import serializers, status from rest_framework.decorators import action @@ -31,7 +32,7 @@ class PortfolioManagerSerializer(serializers.Serializer): class PortfolioManagerViewSet(GenericViewSet): """ - This viewset contains two API views: /template_list/ and /report/ that are used to interface SEED with ESPM + This ViewSet contains two API views: /template_list/ and /report/ that are used to interface SEED with ESPM """ serializer_class = PortfolioManagerSerializer @@ -100,7 +101,8 @@ def template_list(self, request): 'password': 'string', 'template': { '[copy information from template_list]': 'string' - } + }, + 'report_format': 'string' }, description='ESPM account credentials.', required=['username', 'password'] @@ -146,9 +148,13 @@ def report(self, request): {'status': 'error', 'message': 'Invalid call to PM worker: missing template for PM account'}, status=status.HTTP_400_BAD_REQUEST ) + username = request.data['username'] password = request.data['password'] template = request.data['template'] + # report format defaults to XML if not provided + report_format = request.data.get('report_format', 'XML') + pm = PortfolioManagerImport(username, password) try: try: @@ -162,14 +168,34 @@ def report(self, request): status=status.HTTP_400_BAD_REQUEST ) if template['z_seed_child_row']: - content = pm.generate_and_download_child_data_request_report(template) + content = pm.generate_and_download_child_data_request_report(template, report_format) else: - content = pm.generate_and_download_template_report(template) + content = pm.generate_and_download_template_report(template, report_format) except PMExcept as pme: _log.debug("%s: %s" % (str(pme), str(template))) return JsonResponse({'status': 'error', 'message': str(pme)}, status=status.HTTP_400_BAD_REQUEST) + + if report_format == 'EXCEL': + try: + # return the excel file + filename = 'pm_report_export.xlsx' + response = HttpResponse( + content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') + response['Content-Disposition'] = 'attachment; filename="{}"'.format(filename) + response.write(content) + return response + + except Exception as e: + _log.debug("ERROR downloading EXCEL report: %s" % str(e)) + return JsonResponse( + {'status': 'error', 'message': 'Malformed XML from template download'}, + status=status.HTTP_400_BAD_REQUEST + ) + + # rest is for XML reports try: content_object = xmltodict.parse(content, dict_constructor=dict) + except Exception: # catch all because xmltodict doesn't specify a class of Exceptions _log.debug("Malformed XML from template download: %s" % str(content)) return JsonResponse( @@ -205,15 +231,67 @@ def report(self, request): _log.debug("%s: %s" % (e, str(request.data))) return JsonResponse({'status': 'error', 'message': e}, status=status.HTTP_400_BAD_REQUEST) + @swagger_auto_schema( + manual_parameters=[ + AutoSchemaHelper.query_integer_field('id', True, 'ID of the ESPM Property to download') + ], + request_body=AutoSchemaHelper.schema_factory( + { + 'username': 'string', + 'password': 'string', + 'filename': 'string', + }, + description='ESPM account credentials.', + required=['username', 'password'] + ), + ) + @action(detail=True, methods=['POST']) + def download(self, request, pk): + """Download a single property report from Portfolio Manager. The PK is the + PM property ID that is on ESPM""" + if 'username' not in request.data: + _log.debug("Invalid call to PM worker: missing username for PM account: %s" % str(request.data)) + return JsonResponse( + {'status': 'error', 'message': 'Invalid call to PM worker: missing username for PM account'}, + status=status.HTTP_400_BAD_REQUEST + ) + if 'password' not in request.data: + _log.debug("Invalid call to PM worker: missing password for PM account: %s" % str(request.data)) + return JsonResponse( + {'status': 'error', 'message': 'Invalid call to PM worker: missing password for PM account'}, + status=status.HTTP_400_BAD_REQUEST + ) + + username = request.data['username'] + password = request.data['password'] + if 'filename' not in request.data: + filename = f'pm_{pk}_{datetime.strftime(datetime.now(), "%Y%m%d_%H%M%S")}.xlsx' + else: + filename = request.data['filename'] + + pm = PortfolioManagerImport(username, password) + try: + content = pm.return_single_property_report(pk) + + # return the excel file as the HTTP response + response = HttpResponse( + content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') + response['Content-Disposition'] = 'attachment; filename="{}"'.format(filename) + response.write(content) + return response + except PMExcept as pme: + _log.debug(f"{str(pme)}: PM Property ID {pk}") + return JsonResponse({'status': 'error', 'message': str(pme)}, status=status.HTTP_400_BAD_REQUEST) + + +# TODO: Move this object to /seed/utils/portfolio_manager.py class PortfolioManagerImport(object): - """ - This class is essentially a wrapper around the ESPM login/template/report operations + """This class is essentially a wrapper around the ESPM login/template/report operations """ def __init__(self, m_username, m_password): - """ - To instantiate this class, provide ESPM username and password. Currently, this constructor doesn't do anything + """To instantiate this class, provide ESPM username and password. Currently, this constructor doesn't do anything except store the credentials. :param m_username: The ESPM username @@ -231,9 +309,10 @@ def __init__(self, m_username, m_password): # The root URL for downloading the report, code will add the template ID and the XML self.DOWNLOAD_REPORT_URL = "https://portfoliomanager.energystar.gov/pm/reports/download" + self.DOWNLOAD_SINGLE_PROPERTY_REPORT_URL = "https://portfoliomanager.energystar.gov/pm/property" + def login_and_set_cookie_header(self): - """ - This method calls out to ESPM to perform a login operation and get a session authentication token. This token + """This method calls out to ESPM to perform a login operation and get a session authentication token. This token is then stored in the proper form to allow authenticated calls into ESPM. :return: None @@ -266,8 +345,7 @@ def login_and_set_cookie_header(self): } def get_list_of_report_templates(self): - """ - New method to support update to ESPM + """New method to support update to ESPM :return: Returns a list of template objects. All rows will have a z_seed_child_row key that is False for main rows and True for child rows @@ -315,7 +393,7 @@ def get_list_of_report_templates(self): raise PMExcept('Unsuccessful response from child row template lookup; aborting.') try: # the data are now in the string of the data key of the returned dictionary with an excessive amount of - # escaped doublequotes. + # escaped double quotes. # e.g., response = {"data": "{"customReportsData":"..."}"} decoded = json.loads(children_response.text) # .encode('utf-8').decode('unicode_escape') @@ -351,11 +429,45 @@ def get_template_by_name(templates, template_name): _log.debug("Desired report name found, template info: " + json.dumps(matched_template, indent=2)) return matched_template - def generate_and_download_template_report(self, matched_template): + # TODO: Is there a need to call just this instead of generate_and_download...? + def update_template_report(self, template, start_month, start_year, end_month, end_year, property_ids): + """This method calls out to ESPM to (re)generate a specific template + + :param template: A specific template object + :param start_month: reporting period start month + :param start_year: reporting period start year + :param end_month: reporting period end month + :param end_year: reporting period end year + :property_ids: list of property ids to include in report + :return: TODO + """ + # login if needed + if not self.authenticated_headers: + self.login_and_set_cookie_header() + + template_report_id = template['id'] + update_report_url = 'https://portfoliomanager.energystar.gov/pm/reports/generateData/' + str(template_report_id) + + new_authenticated_headers = self.authenticated_headers.copy() + new_authenticated_headers['Content-Type'] = 'application/x-www-form-urlencoded' + + try: + response = requests.post(update_report_url, headers=self.authenticated_headers) + except requests.exceptions.SSLError: + raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.') + if not response.status_code == status.HTTP_200_OK: + raise PMExcept('Unsuccessful response from POST to update report; aborting.') + _log.debug('Triggered report update,\n status code=' + str( + response.status_code) + '\n response headers=' + str( + response.headers)) + + return response.content + + def generate_and_download_template_report(self, matched_template, report_format='XML'): """ This method calls out to ESPM to trigger generation of a report for the supplied template. The process requires calling out to the generateData/ endpoint on ESPM, followed by a waiting period for the template status to be - updated to complete. Once complete, a download URL allows download of the report in XML format. + updated to complete. Once complete, a download URL allows download of the report in XML or EXCEL format. This response content can be enormous, so ... TODO: Evaluate whether we should just download this XML to file here. It would require re-reading the file @@ -402,7 +514,7 @@ def generate_and_download_template_report(self, matched_template): template_objects = response.json()['reportTabData'] for t in template_objects: - if 'id' in t and t['id'] == matched_template['id']: + if 'id' in t and t['id'] == template_report_id: this_matched_template = t break else: @@ -423,18 +535,18 @@ def generate_and_download_template_report(self, matched_template): # Finally we can download the generated report try: - response = requests.get(self.download_url(matched_template['id']), headers=self.authenticated_headers) + response = requests.get(self.download_url(template_report_id, report_format), headers=self.authenticated_headers) except requests.exceptions.SSLError: raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.') if not response.status_code == status.HTTP_200_OK: error_message = 'Unsuccessful response from GET trying to download generated report;' error_message += ' Generated report name: ' + matched_template['name'] + ';' - error_message += ' Tried to download report from URL: ' + self.download_url(matched_template['id']), + ';' + error_message += ' Tried to download report from URL: ' + self.download_url(template_report_id), + ';' error_message += ' Returned with a status code = ' + response.status_code + ';' raise PMExcept(error_message) return response.content - def generate_and_download_child_data_request_report(self, matched_data_request): + def generate_and_download_child_data_request_report(self, matched_data_request, report_format='XML'): """ Updated for recent update of ESPM @@ -454,7 +566,7 @@ def generate_and_download_child_data_request_report(self, matched_data_request): # Generate the url to download this file try: - response = requests.get(self.download_url(matched_data_request["id"]), headers=self.authenticated_headers, allow_redirects=True) + response = requests.get(self.download_url(matched_data_request["id"], report_format), headers=self.authenticated_headers, allow_redirects=True) except requests.exceptions.SSLError: raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.') @@ -463,6 +575,41 @@ def generate_and_download_child_data_request_report(self, matched_data_request): return response.content + def return_single_property_report(self, pm_property_id: int): + """Return (in memory) a single property report from ESPM based on the passed + ESPM Property ID (SEED calls this the pm_property_id). This method returns + the XLSX file in memory with all the tabs for the single property. + + This method differs from the others in that this it does not need to know + the template of the report, it is simply the entire ESPM record (meters and all). + + Args: + pm_property_id (int): The ESPM Property ID to download. + + Returns: + str: Content of an XLSX file which will need to be persisted + """ + # login if needed + if not self.authenticated_headers: + self.login_and_set_cookie_header() + + # Generate the url to download this file + try: + response = requests.get( + self.download_url_single_report(pm_property_id), + headers=self.authenticated_headers, + allow_redirects=True + ) + + if response.status_code == status.HTTP_200_OK: + + return response.content + else: + raise PMExcept('Unsuccessful response from GET trying to download single report; aborting.') + + except requests.exceptions.SSLError: + raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.') + def _parse_properties_v1(self, xml): """Parse the XML (in dict format) response from the ESPM API and return a list of properties. This version was implemented prior to 02/13/2023 @@ -532,6 +679,19 @@ def _flatten_property_metrics(pm): return True, return_data - def download_url(self, template_id): - """helper method to assemble the download url for a given template id""" - return f'{self.DOWNLOAD_REPORT_URL}/{template_id}/XML?testEnv=false&filterResponses=false' + def download_url(self, template_id, report_format='XML'): + """helper method to assemble the download url for a given template id. Default format is XML""" + return f"{self.DOWNLOAD_REPORT_URL}/{template_id}/{report_format}?testEnv=false&filterResponses=false" + + def download_url_single_report(self, pm_property_id: int) -> str: + """helper method to assemble the download url for a single property report. + + Args: + pm_property_id (int): PM Property ID to download + + Returns: + str: URL + """ + url = f"{self.DOWNLOAD_SINGLE_PROPERTY_REPORT_URL}/{pm_property_id}/download/{pm_property_id}.xlsx" + _log.debug(f"ESPM single property download URL is {url}") + return url diff --git a/seed/views/v3/properties.py b/seed/views/v3/properties.py index 3bf53d1437..34d56cfe04 100644 --- a/seed/views/v3/properties.py +++ b/seed/views/v3/properties.py @@ -6,6 +6,8 @@ import os from collections import namedtuple +from django.conf import settings +from django.core.files.storage import FileSystemStorage from django.db.models import Q, Subquery from django.http import HttpResponse, JsonResponse from django_filters import CharFilter, DateFilter @@ -17,16 +19,26 @@ from rest_framework.renderers import JSONRenderer from seed.building_sync.building_sync import BuildingSync +from seed.data_importer import tasks +from seed.data_importer.match import save_state_match +from seed.data_importer.meters_parser import MetersParser +from seed.data_importer.models import ImportFile, ImportRecord +from seed.data_importer.tasks import _save_pm_meter_usage_data_task from seed.data_importer.utils import kbtu_thermal_conversion_factors from seed.decorators import ajax_request_class from seed.hpxml.hpxml import HPXML +from seed.lib.progress_data.progress_data import ProgressData from seed.lib.superperms.orgs.decorators import has_perm_class from seed.models import ( + AUDIT_USER_CREATE, AUDIT_USER_EDIT, + DATA_STATE_MAPPING, DATA_STATE_MATCHING, MERGE_STATE_DELETE, MERGE_STATE_MERGED, MERGE_STATE_NEW, + PORTFOLIO_RAW, + SEED_DATA_SOURCES, Analysis, BuildingFile, Column, @@ -36,6 +48,7 @@ InventoryDocument, Meter, Note, + Organization, Property, PropertyAuditLog, PropertyMeasure, @@ -50,6 +63,7 @@ from seed.serializers.pint import PintJSONEncoder from seed.serializers.properties import ( PropertySerializer, + PropertyStatePromoteWritableSerializer, PropertyStateSerializer, PropertyViewAsStateSerializer, PropertyViewSerializer, @@ -1012,6 +1026,139 @@ def retrieve(self, request, pk=None): else: return JsonResponse(result, status=status.HTTP_404_NOT_FOUND) + @swagger_auto_schema( + manual_parameters=[ + AutoSchemaHelper.query_org_id_field(), + ], + request_body=AutoSchemaHelper.schema_factory( + { + 'cycle_id': 'integer', + 'state': 'object', + }, + required=['cycle_id', 'state'] + ), + ) + @api_endpoint_class + @ajax_request_class + @has_perm_class('can_modify_data') + def create(self, request): + """ + Create a propertyState and propertyView via promote for given cycle + """ + org_id = self.get_organization(self.request) + data = request.data + # get state data + property_state_data = data.get('state', None) + cycle_pk = data.get('cycle_id', None) + + if cycle_pk is None: + return JsonResponse({ + 'status': 'error', + 'message': 'Missing required parameter cycle_id', + }, status=status.HTTP_400_BAD_REQUEST) + + if property_state_data is None: + return JsonResponse({ + 'status': 'error', + 'message': 'Missing required parameter state', + }, status=status.HTTP_400_BAD_REQUEST) + + # ensure that state organization_id is set to org in the request + state_org_id = property_state_data.get('organization_id', org_id) + if state_org_id != org_id: + return JsonResponse({ + 'status': 'error', + 'message': 'State organization_id does not match request organization_id', + }, status=status.HTTP_400_BAD_REQUEST) + property_state_data['organization_id'] = state_org_id + + # get cycle + try: + cycle = Cycle.objects.get(pk=cycle_pk, organization_id=org_id) + except Cycle.DoesNotExist: + return JsonResponse({ + 'status': 'error', + 'message': 'Invalid cycle_id', + }, status=status.HTTP_400_BAD_REQUEST) + + # set empty strings to None + try: + for key, val in property_state_data.items(): + if val == '': + property_state_data[key] = None + except AttributeError: + return JsonResponse({ + 'status': 'error', + 'message': 'Invalid state', + }, status=status.HTTP_400_BAD_REQUEST) + + # extra data fields that do not match existing columns will not be imported + extra_data_columns = list(Column.objects.filter( + organization_id=org_id, + table_name='PropertyState', + is_extra_data=True, + derived_column_id=None + ).values_list('column_name', flat=True)) + + extra_data = property_state_data.get('extra_data', {}) + new_data = {} + + for k, v in extra_data.items(): + # keep only those that match a column + if k in extra_data_columns: + new_data[k] = v + + property_state_data['extra_data'] = new_data + + # this serializer is meant to be used by a `create` action + property_state_serializer = PropertyStatePromoteWritableSerializer( + data=property_state_data + ) + + try: + valid = property_state_serializer.is_valid() + except ValueError as e: + return JsonResponse({ + 'status': 'error', + 'message': 'Invalid state: {}'.format(str(e)) + }, status=status.HTTP_400_BAD_REQUEST) + + if valid: + # create the new property state, and perform an initial save + new_state = property_state_serializer.save() + # set `merge_state` to new, rather than unknown + new_state.merge_state = MERGE_STATE_NEW + + # Log this appropriately - "Import Creation" ? + PropertyAuditLog.objects.create(organization_id=org_id, + parent1=None, + parent2=None, + parent_state1=None, + parent_state2=None, + state=new_state, + name='Import Creation', + description='Creation from API', + import_filename=None, + record_type=AUDIT_USER_CREATE) + + # promote to view + view = new_state.promote(cycle) + + return JsonResponse({ + 'status': 'success', + 'property_view_id': view.id, + 'property_state_id': new_state.id, + 'property_id': view.property.id, + 'view': PropertyViewSerializer(view).data + }, encoder=PintJSONEncoder, status=status.HTTP_201_CREATED) + + else: + # invalid request + return JsonResponse({ + 'status': 'error', + 'message': 'Invalid state: {}'.format(property_state_serializer.errors) + }, status=status.HTTP_400_BAD_REQUEST) + @swagger_auto_schema_org_query_param @api_endpoint_class @ajax_request_class @@ -1412,6 +1559,200 @@ def update_with_building_sync(self, request, pk): 'message': "Could not process building file with messages {}".format(messages) }, status=status.HTTP_400_BAD_REQUEST) + @swagger_auto_schema( + manual_parameters=[ + AutoSchemaHelper.path_id_field( + description='ID of the property view to update' + ), + AutoSchemaHelper.query_org_id_field(), + AutoSchemaHelper.query_integer_field( + 'cycle_id', + required=True, + description='ID of the cycle of the property view' + ), + AutoSchemaHelper.query_integer_field( + 'mapping_profile_id', + required=True, + description='ID of the column mapping profile to use' + ), + AutoSchemaHelper.upload_file_field( + 'file', + required=True, + description='ESPM property report to use (in XLSX format)', + ), + ], + request_body=no_body, + ) + @action(detail=True, methods=['PUT'], parser_classes=(MultiPartParser,)) + @has_perm_class('can_modify_data') + def update_with_espm(self, request, pk): + """Update an existing PropertyView with an exported singular ESPM file. + """ + if len(request.FILES) == 0: + return JsonResponse({ + 'success': False, + 'message': 'Must pass file in as a multipart/form-data request' + }, status=status.HTTP_400_BAD_REQUEST) + + the_file = request.data['file'] + cycle_pk = request.query_params.get('cycle_id', None) + org_id = self.get_organization(self.request) + org_inst = Organization.objects.get(pk=org_id) + + # get mapping profile (ensure it is part of the org) + mapping_profile_id = request.query_params.get('mapping_profile_id', None) + if not mapping_profile_id: + return JsonResponse({ + 'success': False, + 'message': 'Must provide a column mapping profile' + }, status=status.HTTP_400_BAD_REQUEST) + + column_mapping_profile = org_inst.columnmappingprofile_set.filter( + pk=mapping_profile_id + ) + if len(column_mapping_profile) == 0: + return JsonResponse({ + 'success': False, + 'message': 'Could not find ESPM column mapping profile' + }, status=status.HTTP_400_BAD_REQUEST) + elif len(column_mapping_profile) > 1: + return JsonResponse({ + 'success': False, + 'message': f"Found multiple ESPM column mapping profiles, found {len(column_mapping_profile)}" + }, status=status.HTTP_400_BAD_REQUEST) + column_mapping_profile = column_mapping_profile[0] + + try: + Cycle.objects.get(pk=cycle_pk, organization_id=org_id) + except Cycle.DoesNotExist: + return JsonResponse({ + 'success': False, + 'message': 'Cycle ID is missing or Cycle does not exist' + }, status=status.HTTP_404_NOT_FOUND) + + try: + # note that this is a "safe" query b/c we should have already returned + # if the cycle was not within the user's organization + property_view = PropertyView.objects.select_related( + 'property', 'cycle', 'state' + ).get(pk=pk, cycle_id=cycle_pk) + except PropertyView.DoesNotExist: + return JsonResponse({ + 'status': 'error', + 'message': 'property view does not exist' + }, status=status.HTTP_404_NOT_FOUND) + + # create a new "datafile" object to store the file + import_record, _ = ImportRecord.objects.get_or_create( + name='Manual ESPM Records', + owner=request.user, + last_modified_by=request.user, + super_organization_id=org_id + ) + + filename = the_file.name + path = os.path.join(settings.MEDIA_ROOT, "uploads", filename) + + # Get a unique filename using the get_available_name method in FileSystemStorage + s = FileSystemStorage() + path = s.get_available_name(path) + + # verify the directory exists + if not os.path.exists(os.path.dirname(path)): + os.makedirs(os.path.dirname(path)) + + # save the file + with open(path, 'wb+') as temp_file: + for chunk in the_file.chunks(): + temp_file.write(chunk) + + import_file = ImportFile.objects.create( + cycle_id=cycle_pk, + import_record=import_record, + uploaded_filename=filename, + file=path, + source_type=SEED_DATA_SOURCES[PORTFOLIO_RAW][1], + source_program='PortfolioManager', + source_program_version='1.0', + ) + + # save the raw data, but do it synchronously in the foreground + tasks.save_raw_espm_data_synchronous(import_file.pk) + + # verify that there is only one property in the file + import_file.refresh_from_db() + if import_file.num_rows != 1: + return JsonResponse({ + 'success': False, + 'message': f"File must contain exactly one property, found {import_file.num_rows or 0} properties" + }, status=status.HTTP_400_BAD_REQUEST) + + # create the column mappings + Column.retrieve_mapping_columns(import_file.pk) + + # assign the mappings to the import file id + Column.create_mappings(column_mapping_profile.mappings, org_inst, request.user, import_file.pk) + + # call the mapping process - but do this in the foreground, not asynchronously. + tasks.map_data_synchronous(import_file.pk) + + # The data should now be mapped, but since we called the task, we have the IDs of the + # mapped files, so query for the files. + new_property_state = PropertyState.objects.filter( + organization_id=org_id, + import_file_id=import_file.pk, + data_state=DATA_STATE_MAPPING, + ) + if len(new_property_state) == 0: + return JsonResponse({ + 'success': False, + 'message': "Could not find newly mapped property state" + }, status=status.HTTP_400_BAD_REQUEST) + elif len(new_property_state) > 1: + return JsonResponse({ + 'success': False, + 'message': f"Found multiple newly mapped property states, found {len(new_property_state)}" + }, status=status.HTTP_400_BAD_REQUEST) + new_property_state = new_property_state[0] + + # retrieve the column merge priorities and then save the update new property state. + # This is called merge protection on the front end. + priorities = Column.retrieve_priorities(org_id) + merged_state = save_state_match(property_view.state, new_property_state, priorities) + + # save the merged state to the latest property view + property_view.state = merged_state + property_view.save() + + # now save the meters, need a progress_data object to pass to the tasks, although + # not used. + progress_data = ProgressData(func_name='meter_import', unique_id=import_file.pk) + # -- Start -- + # For now, we are duplicating the methods that are called in the tasks in order + # to circumvent the celery background task management (i.e., run in foreground) + meters_parser = MetersParser.factory(import_file.local_file, org_id) + meters_and_readings = meters_parser.meter_and_reading_objs + for meter_readings in meters_and_readings: + _save_pm_meter_usage_data_task(meter_readings, import_file.id, progress_data.key) + # -- End -- of duplicate (and simplified) meter import methods + progress_data.delete() + + if merged_state: + return JsonResponse({ + 'success': True, + 'status': 'success', + 'message': 'successfully updated property with ESPM file', + 'data': { + 'status': 'success', + 'property_view': PropertyViewAsStateSerializer(property_view).data, + }, + }, status=status.HTTP_200_OK) + else: + return JsonResponse({ + 'status': 'error', + 'message': "Could not process ESPM file" + }, status=status.HTTP_400_BAD_REQUEST) + @action(detail=True, methods=['PUT'], parser_classes=(MultiPartParser,)) @has_perm_class('can_modify_data') def upload_inventory_document(self, request, pk):