diff --git a/omf/models/hostingCapacity.py b/omf/models/hostingCapacity.py index 700b198da..0d3454baf 100644 --- a/omf/models/hostingCapacity.py +++ b/omf/models/hostingCapacity.py @@ -138,7 +138,7 @@ def run_traditional_algorithm(modelDir, inputDict, outData): tree = opendss.dssConvert.omdToTree(path_to_omd) opendss.dssConvert.treeToDss(tree, Path(modelDir, 'circuit.dss')) traditional_start_time = time.time() - traditionalHCResults = opendss.hosting_capacity_all( FNAME = Path(modelDir, 'circuit.dss'), max_test_kw=int(inputDict["traditionalHCMaxTestkw"]), multiprocess=False) + traditionalHCResults = opendss.hosting_capacity_all( FNAME = Path(modelDir, 'circuit.dss'), max_test_kw=int(inputDict["traditionalHCMaxTestkw"]), multiprocess=True) traditional_end_time = time.time() # - opendss.hosting_capacity_all() changes the cwd, so change it back so other code isn't affected tradHCDF = pd.DataFrame(traditionalHCResults) diff --git a/omf/models/resilientCommunity.py b/omf/models/resilientCommunity.py index a137cb3d8..7753bb85c 100644 --- a/omf/models/resilientCommunity.py +++ b/omf/models/resilientCommunity.py @@ -651,6 +651,26 @@ def getPercentile(loads, columnName): for i, (k,v) in enumerate(loads.items()): loads[k][new_str] = round(result[i],2) +def coordCheck(long, lat, latlonList): + point = Point(long, lat) + + for k,v in latlonList.items(): + coords, geoType = v[0], v[1] + + ## Need to figure out when we are dealing with a multipolygon or polygon list + + if geoType == 'Polygon': + poly = Polygon(coords) + + if poly.intersects(point): + return k + else: + for i in coords: + if poly.intersects(point): + return k + + return '' + def getDownLineLoadsEquipment(pathToOmd,nriGeoJson, equipmentList): ''' @@ -665,6 +685,7 @@ def getDownLineLoadsEquipment(pathToOmd,nriGeoJson, equipmentList): tracts = {} tractData = [] geos = [] + lon_lat = {} cols = ['TRACT','BUILDVALUE','AGRIVALUE','EAL_VALT','EAL_VALB','EAL_VALP','EAL_VALA','SOVI_SCORE','SOVI_RATNG','RESL_RATNG','RESL_VALUE','AVLN_AFREQ','CFLD_AFREQ','CWAV_AFREQ','DRGT_AFREQ','ERQK_AFREQ','HAIL_AFREQ','HWAV_AFREQ','HRCN_AFREQ','ISTM_AFREQ','LNDS_AFREQ','LTNG_AFREQ','RFLD_AFREQ','SWND_AFREQ','TRND_AFREQ','TSUN_AFREQ','VLCN_AFREQ','WFIR_AFREQ','WNTW_AFREQ'] for ob in omd.get('tree', {}).values(): obType = ob['object'] @@ -685,11 +706,20 @@ def getDownLineLoadsEquipment(pathToOmd,nriGeoJson, equipmentList): loads[key]["base crit score"]= ((math.sqrt((kw * kw) + (kvar * kvar) ))/ (5)) * 4 - - lat = float(ob['latitude']) long = float(ob['longitude']) - - tract = findCensusTract(lat, long) + lat = float(ob['latitude']) + + if lon_lat: + check = coordCheck(long,lat, lon_lat) + if check: + svi_score = round(float(tracts.get(tract)['SOVI_SCORE']),2) + loads[key]["community crit score"] = round((((math.sqrt((kw * kw) + (kvar * kvar) ))/ (5)) * 4) * svi_score,2) + loads[key]['SOVI_SCORE'] = svi_score + continue + else: + tract = findCensusTract(lat,long) + else: + tract = findCensusTract(lat, long) # if api call failed. repeat it while tract == None: @@ -725,13 +755,21 @@ def getDownLineLoadsEquipment(pathToOmd,nriGeoJson, equipmentList): tracts[tractID] = i['properties'] if (i['geometry']['type'] == 'MultiPolygon'): + lon_lat_list = [] for j in i['geometry']['coordinates']: + ## changes values so make copy + lon_lat_list.append(transform(j.copy())) geos.append(j) tractData.append(vals) + lon_lat[tract] = (lon_lat_list,'MultiPolygon') else: + # changes values so make copy + lon_lat[tract] = (transform(i['geometry']['coordinates'][0].copy()), 'Polygon') geos.append(i['geometry']['coordinates'][0]) tractData.append(vals) + + break diff --git a/omf/solvers/opendss/__init__.py b/omf/solvers/opendss/__init__.py index 63e5badbb..68101ee16 100644 --- a/omf/solvers/opendss/__init__.py +++ b/omf/solvers/opendss/__init__.py @@ -348,7 +348,7 @@ def get_hosting_capacity_of_single_bus_multiprocessing(FILE_PATH:str, BUS_NAME:s lower_kw_bound = 1 upper_kw_bound = 1 while True: - results = check_hosting_capacity_of_single_bus(FILE_PATH, BUS_NAME, upper_kw_bound, lock) + results = check_hosting_capacity_of_single_bus(FILE_PATH, BUS_NAME, upper_kw_bound) thermal_violation = results['thermal_violation'] voltage_violation = results['voltage_violation'] if thermal_violation or voltage_violation or upper_kw_bound == max_test_kw: @@ -484,14 +484,15 @@ def hosting_capacity_single_bus(FILE_PATH:str, kwSTEPS:int, kwValue:float, BUS_N return {'bus':BUS_NAME, 'max_kw':kwValue * step, 'reached_max':False, 'thermal_violation':therm_violation, 'voltage_violation':volt_violation} def multiprocessor_function( FILE_PATH, max_test_kw, lock, BUS_NAME): - print( "inside multiprocessor function" ) - try: - single_output = get_hosting_capacity_of_single_bus_multiprocessing( FILE_PATH, BUS_NAME, max_test_kw, lock) - return single_output - except: - print(f'Could not solve hosting capacity for BUS_NAME={BUS_NAME}') + with lock: + print( "inside multiprocessor function" ) + try: + single_output = get_hosting_capacity_of_single_bus_multiprocessing( FILE_PATH, BUS_NAME, max_test_kw, lock) + return single_output + except: + print(f'Could not solve hosting capacity for BUS_NAME={BUS_NAME}') - + #Jenny def hosting_capacity_all(FNAME:str, max_test_kw:float=50000, BUS_LIST:list = None, multiprocess=False, cores: int=8): ''' Generate hosting capacity results for all_buses. ''' @@ -504,12 +505,12 @@ def hosting_capacity_all(FNAME:str, max_test_kw:float=50000, BUS_LIST:list = Non all_output = [] # print('GEN_BUSES', gen_buses) if multiprocess == True: - lock = multiprocessing.Lock() - pool = multiprocessing.Pool( processes=cores ) - print(f'Running multiprocessor {len(gen_buses)} times with {cores} cores') - # Executes parallel_hc_func in parallel for each item in gen_buses - all_output.extend(pool.starmap(multiprocessor_function, [(fullpath, max_test_kw, lock, bus) for bus in gen_buses])) - print( "multiprocess all output: ", all_output) + with multiprocessing.Manager() as manager: + lock = manager.Lock() + pool = multiprocessing.Pool( processes=cores ) + print(f'Running multiprocessor {len(gen_buses)} times with {cores} cores') + all_output.extend(pool.starmap(multiprocessor_function, [(fullpath, max_test_kw, lock, bus) for bus in gen_buses])) + print( "multiprocess all output: ", all_output) elif multiprocess == False: for bus in gen_buses: try: