# -*- coding: utf-8 -*- """ Scenario includes: (1) Changes in demand (2) Demand-side investment in DG (3) Supply-side investment in grid-connected generation Forecast transmission revenue includes unapproved major capex (1) Demand is differentiated by: (a) type {Mass,DirectConnect} (b) time of use {Peak,Shoulder,OffPeak,Low} (c) region {Backbone node...} (d) connection (DG, Grid) """ import numpy as np import pandas as pd import random import matplotlib import matplotlib.pyplot as plt #A. DATA import and export # File input/output structure should include a 'Data' file for input data and an 'Output' folder containing scenario = "All_major_capex_gen_benefits/" input_dir = "C:/Users/theot/Dropbox (Sense.)/2018-19 EA TPM CBA (Shared)/Step 3/AoB model/Data/" output_dir = "C:/Users/theot/Dropbox (Sense.)/2018-19 EA TPM CBA (Shared)/Step 3/AoB model/Output/" out_fn=output_dir+scenario # File output folder # Data import data=pd.read_csv(input_dir+"model_data_bb.csv") data_gen=pd.read_csv(input_dir+"model_data_bb_gen.csv") coef_m=pd.read_csv(input_dir+"tou_coef_mass.csv") coef_i=pd.read_csv(input_dir+"tou_coef_ind.csv") reg_pop_params=pd.read_csv(input_dir+"reg_pop.csv") reg_inc_params=pd.read_csv(input_dir+"reg_inc.csv") tp_rev=pd.read_csv(input_dir+"forecast_revenue.csv") exist_g=pd.read_csv(input_dir+"existing_gen.csv") poss_g=pd.read_csv(input_dir+"possible_gen.csv") major = pd.read_csv(input_dir+"major_capex.csv") initial_aob = pd.read_csv(input_dir+"Initial_AoB.csv") # B. PARAMETERS # B.1 Number of trading periods pk_tp = 1600 sh_tp = 3075 off_tp = 12845 mwh=np.array([pk_tp/2,sh_tp/2,off_tp/2]) # B.2 Base period values - that do not change yr_0 = 2017 #capacity measurement year ended August 2017 base_year = 2010 # Base period for elasticity calculation # B.3 Mass market Time of Use (TOU) elasticity/LAAIDS parameters - from data gamma=np.column_stack(np.array(coef_m[coef_m['type']=='gamma']['coef'])) gamma = np.transpose(np.reshape(gamma,(4,4))) beta=np.column_stack(np.array(coef_m[coef_m['type']=='beta']['coef'])) beta=np.transpose(np.matrix(beta)) dg_c = np.column_stack(np.array(coef_m[coef_m['type']=='dg']['coef'])) #Battery demand displacement/charging parameters # B.4 Large industrials TOU elasticity/LAAIDS parameters - from data gamma_i=np.column_stack(np.array(coef_i[coef_i['type']=='gamma']['coef'])) gamma_i = np.transpose(np.reshape(gamma_i,(4,4))) beta_i=np.column_stack(np.array(coef_i[coef_i['type']=='beta']['coef'])) beta_i=np.transpose(np.matrix(beta_i)) dg_c_i = np.column_stack(np.array(coef_i[coef_i['type']=='dg']['coef'])) # B.5 Market aggregate price and demand parameters (non TOU) p_e = -0.11 #Aggregate mass market price (per ICP) elasticity m_e = 0.11 #Aggregate mass market income (per ICP) elasticity p_e_i = -0.02 #Aggregate industrial price (per ICP) elasticity # B.6 Dimensions share_n=4 ones=np.matrix(np.ones((1,share_n))) T=33 #Projections time period yr_l=list(range(yr_0,yr_0+T)) #Years for projections reg_l=["MDN","OTA","HLY","TRK","WKM","RDF","SFD", "BPE","HAY","KIK","ISL","BEN","ROX","TWI"] # Backbone nodes in model ind_l=["OTA","HLY","TRK","RDF","SFD","BPE","ISL","TWI"] #Nodes with large industrial or direct connect demands gen_l=["HLY","TRK","WKM","RDF","SFD", "BPE","HAY","KIK","ISL","BEN","ROX","TWI"] #Nodes with grid connected generation (excludes Otahuhu gen now closed) si_nodes=["KIK","ISL","BEN","ROX","TWI"] ni_nodes=["MDN","OTA","HLY","TRK","WKM","RDF","SFD", "BPE","HAY"] ctype_l=[1,2] #Consumer types, 1= Mass market, 2=Large industrial or direct connect demand # B.7 Initial demand growth parameters nz_m_g_mu = 1.01 #Mean income growth rate (1+g), labour market earnings per ICP nz_m_g_sd = 0.005 #Std deviation of income growth rate (1+g), earnings per ICP icp_g_mu = 1.01 #Mean ICP growth rate (1+g) icp_g_sd = 0.005 #Std deviation of ICP growth rate pot4_mw = 50 #Adding Tiwai fourth potline from 2019 to 2022 pot4_on = 2018 # year after which potline turns on pot4_off = 2022 # year after which potline turns off pot4 = np.array([(pk_tp/2)*pot4_mw,0,(sh_tp/2)*pot4_mw,(off_tp/2)*pot4_mw]) # B.8 Initial energy cost parameters # (a) Parameters (not used in this version as generation output prices are endogenous) pg_mu = [115.3,115.3,91.6,72.8] #2018 dollars: Mean and standard deviation of generation prices pg_sd = [49.6,49.6,34.1,28.0] # (b) Function for generating the expected value of generation costs, given a weighed average and std deviation def ln_exp(mu=pg_mu,sd=pg_sd,n=1): cases=len(mu) out=np.zeros((n,cases)) for i in range(cases): loc=np.log(mu[i]**2/np.sqrt(sd[i]**2+mu[i]**2)) scale=np.sqrt(np.log(1+(sd[i]**2/mu[i]**2))) out[:,i] = np.exp(loc+0.5*scale) return out # (c) Battery parameters dg_lrmc_mu=250 # lrmc - capital and fixed O&M only, based on network batteries, with input/'fuel' cost calculated in the model dg_capex = 733000 dg_fixed = 67000 hybrid_arbitrage_discount = 0.225 # To discount arbitrage outside peak demand months max_dg = 2 # Maximum MW batteries capacity (not MWh) to total peak demand, 2 = 50% of peak met by batteries dg_mwh = 3225 #Expected MWh, based on energy arbitrage strategy (conservative assumption, peak avoidance strategies imply a higher number and earlier investment) dg_lrmc_g = 0.93 #Assume a 7% cost reduction dg_ds = 0.5 #Supply elasticity - implies 1% cost advantage increases DG by 0.5%. Investment is also decreasing function of dg share of peak demand pk_mw/(dg_mw+pk_mw) dg_horizon = 15 #Years factored into the DG investment decision dg_c_arbtrg = np.array([[-1.0,0.5,0.0,1.11]]) # Parameters for reallocating demand when batteries are used solely for arbitrage # B.9 AOB parameters r_s = 0.7273 #Share of DC + AC IC revenue not recovered via AoB [file aob_existing has the info on IC and DC allocations] aob_yr=2022 #Year implemented - for measurement purposes(i.e. m_yr) aob_tp=[pk_tp,pk_tp,sh_tp,off_tp] # B.10 Transmission revenue parameters roi=0.06 # A constant return on investment opex_r=0.06 # An opex allowance - as a share of RAB deprn=0.05 # Constant average depreciation gror = roi+opex_r+deprn #Gross rate of return [missing tax?] ic_s = [1.0,0.0,0.0,0.0] #Share of trading periods to which RCPD applies, by TOU categories - note assumption that average peak price is what matters ic_s_mwh = [1.0,0.0,1.0,1.0] #Values for application of RCPD to MWh - not applied to DG # Function for updating RCPD interconnection/transmission charges - # from expectations to "actuals" based on consumption (price component is forward looking) def rcpd_ic_update(data,year,tp_rev=tp_rev,rcpd_n=ic_s): ac_ic = np.asscalar(tp_rev.loc[(tp_rev['m_yr']==year+2),['ac_ic']].values) # Interconnection charge q_tou=data.loc[(data['m_yr']==year+1),['pk_q','dg_q','sh_q','off_q']] # Market TOU quantities Q_tou=np.array(q_tou.sum()) #Aggregate market quantities by TOU Q_ic_tou=np.multiply(Q_tou,rcpd_n) #Adjust for shares of charges by TOU - default peak only Q_ic = np.sum(Q_ic_tou) ic_p_update=np.multiply(np.divide(ac_ic,Q_ic),rcpd_n) #Get grid price - ex IC px1=np.array(data.loc[(data['m_yr']==year+1),['pk_px','dg_px','sh_px','off_px']]) #Add new IC price into the calculations p_update = np.add(px1,ic_p_update) #Add new prices back into dataframe data.loc[(data['m_yr']==year+1),['pk_i','dg_i','sh_i','off_i']]=ic_p_update data.loc[(data['m_yr']==year+1),['pk_p','dg_p','sh_p','off_p']]=p_update return data # Function for updating RCPD revenue per MWh collected by Transpower in each measurement year # this takes the amount of real revenue to be collected in the year, from forecast/actual revenue # gets peak demand from the prior year and calculates prices def rcpd_rev_update(data,year,tp_rev=tp_rev,ic_s=ic_s,rcpd_n=ic_s): ac_ic = np.asscalar(tp_rev.loc[(tp_rev['m_yr']==year+1),['ac_ic']].values) # Interconnection charge q_tou=data.loc[(data['m_yr']==year),['pk_q','dg_q','sh_q','off_q']] # Market TOU quantities Q_tou=np.array(q_tou.sum()) #Aggregate market quantities by TOU Q_ic_tou=np.multiply(Q_tou,rcpd_n) #Adjust for shares of charges by TOU - default peak only Q_ic = np.sum(Q_ic_tou) ic_pr_update=np.divide(ac_ic,Q_ic) #New price return ic_pr_update # D. PROPOSAL PARAMETERS and functions for calculating proposal parameters # D.1 Residual revenue, calculated from a given year of AOB implementation tp_rev["resid_rev"] = 0 # Initialise residual revenue series, then get intial value: rev0=np.asscalar(tp_rev.loc[(tp_rev['m_yr']==aob_yr),['totrev']].values)*r_s unallocated = 160000000 # Constant (minimum) value for overheads recovered through the residual # Revenue calculated as a function of gror and depreciation tp_rev.loc[(tp_rev['m_yr']>=aob_yr),['resid_rev']]=unallocated + np.array([gror*(((rev0-unallocated)/gror)*(1-deprn)**i) for i in range(tp_rev['m_yr'].max()-(aob_yr-1))]) # D.2 AOB revenue - pre approvals or major capex tp_rev["aob_rev"]=tp_rev["totrev"]-tp_rev['resid_rev'] tp_rev.loc[(tp_rev['m_yr']0),['lce']]=0 b = a.lce.sum()*(-1*gen_aob_adj) else: a.loc[(a['lce']<0),['lce']]=0 b = a.lce.sum() return b # (ii) Function for calculating shares of transport costs - for allocating benefit based charges def lceshr(data=data,data_gen=data_gen,reg="OTA",year=yr_0,ctype=1): #All load a = data.loc[data['m_yr']==year,['pk_q','sh_q','off_q','pk_s_pt_mu','sh_s_pt_mu','off_s_pt_mu', 'pk_ns_pt_mu','sh_ns_pt_mu','off_ns_pt_mu','pk_pg','sh_pg','off_pg','pk_theta','sh_theta','off_theta']] a['lce'] = (a.pk_theta*(a.pk_q*(a.pk_s_pt_mu-1)*a.pk_pg)+(1-a.pk_theta)*(a.pk_q*(a.pk_ns_pt_mu-1)*a.pk_pg)+ a.sh_theta*(a.sh_q*(a.sh_s_pt_mu-1)*a.sh_pg)+(1-a.sh_theta)*(a.sh_q*(a.sh_ns_pt_mu-1)*a.sh_pg)+ a.off_theta*(a.off_q*(a.off_s_pt_mu-1)*a.off_pg)+(1-a.off_theta)*(a.off_q*(a.off_ns_pt_mu-1)*a.off_pg)) a.loc[(a['lce']<0),['lce']]=0 b = a.lce.sum() #All Generation c = data_gen.loc[data_gen['m_yr']==year,['pk_q','sh_q','off_q','pk_s_pt_mu','sh_s_pt_mu','off_s_pt_mu', 'pk_ns_pt_mu','sh_ns_pt_mu','off_ns_pt_mu','pk_pg','sh_pg','off_pg','pk_theta','sh_theta','off_theta']]#generation c['lce'] = (c.pk_theta*(c.pk_q*(c.pk_s_pt_mu-1)*c.pk_pg)+(1-c.pk_theta)*(c.pk_q*(c.pk_ns_pt_mu-1)*c.pk_pg)+ c.sh_theta*(c.sh_q*(c.sh_s_pt_mu-1)*c.sh_pg)+(1-c.sh_theta)*(c.sh_q*(c.sh_ns_pt_mu-1)*c.sh_pg)+ c.off_theta*(c.off_q*(c.off_s_pt_mu-1)*c.off_pg)+(1-c.off_theta)*(c.off_q*(c.off_ns_pt_mu-1)*c.off_pg)) c['lce'] = c['lce']*(-1*gen_aob_adj) #Reverse sign and adjust value to account for area under offer curves c.loc[(c['lce']<0),['lce']]=0 d=c.lce.sum() e = d+b #Numerator if ctype==0: f=lce(data_gen,reg=reg,year=year,ctype=ctype) else: f=lce(data,reg=reg,year=year,ctype=ctype) if f.size==0: g=0 else: g = np.asscalar(f/e) return g # D.5 Create an benefit based charge (AoB) revenue account - for convenience # Columns that indicate node + type cols=['p_yr','m_yr']+[s + str(1) for s in reg_l]+[s + str(2) for s in ind_l]+[s + str(0) for s in reg_l] # AOB account initial_aob['aob_rev0']=np.asscalar(tp_rev.loc[(tp_rev['m_yr']==aob_yr),['aob_rev']].values)*initial_aob.aob_s0 cols=['p_yr','m_yr']+[s + str(1) for s in reg_l]+[s + str(2) for s in ind_l]+[s + str(0) for s in reg_l] aob_accnt=pd.DataFrame(columns=cols) aob_accnt['m_yr']=tp_rev['m_yr'] aob_accnt['p_yr']=tp_rev['p_yr'] aob_accnt=aob_accnt.fillna(0) load_cols=[s + str(1) for s in reg_l]+[s + str(2) for s in ind_l] gen_cols = [s + str(0) for s in reg_l] for i in load_cols: aob_accnt.loc[aob_accnt['m_yr']==aob_yr,[i]]=initial_aob.loc[initial_aob['bbtype']==i,['aob_rev0']].values for i in gen_cols: aob_accnt.loc[aob_accnt['m_yr']==aob_yr,[i]]=initial_aob.loc[initial_aob['bbtype']==i,['aob_rev0']].values # D.6 AMD function def amdshr_f(data=data,window=5,gross=1): amd0=data.loc[:,['p_yr','m_yr','bb','type']] amd0['Load']=data.pk_q+data.dg_q amd0['Offtake']=data.loc[:,['pk_q']] if gross==1: amd0['AMDvol_t']=amd0['Load'] else: amd0['AMDvol_t']=amd0['Offtake'] amd0['AMDvol']=amd0.groupby(['bb','type']).AMDvol_t.apply(lambda x: x.rolling(window=window).sum()).values amd0['AMDvol_tot'] = amd0['AMDvol'].groupby(amd0['m_yr']).transform('sum') amd0['AMDshr']=amd0.AMDvol/amd0.AMDvol_tot return amd0 #Set initial value for AMD share amdshr=amdshr_f() # D.7 Share of investments for reliability vs economic value econ_share=0.5 load_reliab_shr = 0.624 # E. Additional functions for calculating generation transmission revenue # (i) South island mean injection def simi_f(data_gen=data_gen,window=5,ctype=0): simi0=data_gen.loc[:,['p_yr','m_yr','bb','type']] simi0['SIMIvol_t']=data_gen.pk_q+data_gen.sh_q+data_gen.off_q simi0['SIMIvol_pk_t']=data_gen.pk_q simi0['SIMIvol_dg_t']=data_gen.dg_q simi0['SIMIvol_sh_t']=data_gen.sh_q simi0['SIMIvol_off_t']=data_gen.off_q simi0.loc[simi0['bb'].isin(ni_nodes), ['SIMIvol_t','SIMIvol_pk_t','SIMIvol_dg_t','SIMIvol_sh_t','SIMIvol_off_t']] = 0 simi0['SIMIvol']=simi0.groupby(['bb','type']).SIMIvol_t.apply(lambda x: x.rolling(window=window).sum()).values simi0['SIMIvol_pk']=simi0.groupby(['bb','type']).SIMIvol_pk_t.apply(lambda x: x.rolling(window=window).sum()).values simi0['SIMIvol_dg']=simi0.groupby(['bb','type']).SIMIvol_dg_t.apply(lambda x: x.rolling(window=window).sum()).values simi0['SIMIvol_sh']=simi0.groupby(['bb','type']).SIMIvol_sh_t.apply(lambda x: x.rolling(window=window).sum()).values simi0['SIMIvol_off']=simi0.groupby(['bb','type']).SIMIvol_off_t.apply(lambda x: x.rolling(window=window).sum()).values simi0['SIMIvol_tot'] = simi0['SIMIvol'].groupby(simi0['m_yr']).transform('sum') simi0['SIMIshr']=simi0.SIMIvol/simi0.SIMIvol_tot return simi0 # (ii) Grid generation nodal shares of total output def genshr_f(data_gen=data_gen,window=5,year=yr_0): gen=data_gen.loc[:,['p_yr','m_yr','bb','pk_q','sh_q','off_q']] gen['pk_roll']=gen.groupby(['bb']).pk_q.apply(lambda x: x.rolling(window=window).sum()).values gen['sh_roll']=gen.groupby(['bb']).sh_q.apply(lambda x: x.rolling(window=window).sum()).values gen['off_roll']=gen.groupby(['bb']).off_q.apply(lambda x: x.rolling(window=window).sum()).values gen['pk_tot']=gen.groupby(['m_yr']).pk_roll.transform('sum') gen['sh_tot']=gen.groupby(['m_yr']).sh_roll.transform('sum') gen['off_tot']=gen.groupby(['m_yr']).off_roll.transform('sum') gen['pk_s']=gen.pk_roll/gen.pk_tot gen['sh_s']=gen.sh_roll/gen.sh_tot gen['off_s']=gen.off_roll/gen.off_tot shares = gen.loc[gen['m_yr']==year,['m_yr','bb','pk_s','sh_s','off_s']] return shares # (iii) Update function - SIMI charges def simi_rev_update(data_gen=data_gen,tp_rev=tp_rev,year=yr_0,window=5): dc_ic = np.asscalar(tp_rev.loc[(tp_rev['m_yr']==year+1),['dc_ic']].values) # DC charge simishr=simi_f(data_gen=data_gen,window=5) vol = simishr[simishr['m_yr']==year].SIMIvol_tot.mean()/window vol_q = simishr.loc[simishr['m_yr']==year,['SIMIvol_pk','SIMIvol_dg','SIMIvol_sh','SIMIvol_off']]/window price = dc_ic/vol rev_q = vol_q*price data_gen.loc[(data_gen['m_yr']==year+1),['pk_pr','dg_pr','sh_pr','off_pr']]=price data_gen.loc[data_gen['bb'].isin(ni_nodes), ['pk_pr','dg_pr','sh_pr','off_pr']] = 0 data_gen.loc[(data_gen['m_yr']==year+1),['pk_rev','dg_rev','sh_rev','off_rev']]= rev_q.values return data_gen # (iv) Update AOB revenue invoiced to generators def gen_aob_update(data=data,data_gen=data_gen,aob_accnt=aob_accnt,reg="BEN",year=yr_0,ctype=0): if (year+1) == aob_yr: aob1 = np.asscalar(aob_accnt.loc[aob_accnt['m_yr']==(year+1),[reg+str(ctype)]].values) else: reliab_shr=genshr_f(data_gen=data_gen,window=5,year=year) reliab_shr_bb=np.asscalar(reliab_shr.loc[(reliab_shr['m_yr']==year)& (reliab_shr['bb']==reg),['pk_s']].values) inv = (np.asscalar(tp_rev.loc[(tp_rev['m_yr']==(year+1)),['base_capex']].values)+ np.asscalar(tp_rev.loc[(tp_rev['m_yr']==(year+1)),['major_capex']].values)) inv_shr = (econ_share*(lceshr(data=data,data_gen=data_gen, reg=reg,year=year,ctype=ctype)) + (1-econ_share)*reliab_shr_bb*(1-load_reliab_shr)) d_aob = inv*inv_shr #New investment aob0 = np.asscalar(aob_accnt.loc[aob_accnt['m_yr']==(year),[reg+str(ctype)]].values) aob1 = aob0*(1-deprn)+d_aob #Update AOB account aob_accnt.loc[aob_accnt['m_yr']==(year+1),[reg+str(ctype)]]=aob1 #Calculate revenue q0=np.matrix(data_gen.loc[(data_gen['bb']==reg)&(data_gen['type']==ctype)& (data_gen['m_yr']==year),['pk_q','dg_q','sh_q','off_q']]) #TOU quantities if np.sum(q0)==0: #Adjustment for areas where plants close pr1 = np.matrix([[aob1/(pk_tp/2),0,aob1/(sh_tp/2),aob1/(off_tp/2)]])#Revenue paid per MWh rev_q = np.matrix([[aob1*(pk_tp/(pk_tp+sh_tp+off_tp)),0,aob1*(sh_tp/(pk_tp+sh_tp+off_tp)),aob1*(off_tp/(pk_tp+sh_tp+off_tp))]]) else: pr1 = np.matrix(np.repeat(aob1/np.sum(q0),share_n)) #Revenue paid per MWh rev_q = np.multiply(q0,pr1) data_gen.loc[(data_gen['bb']==reg)&(data_gen['type']==ctype)& (data_gen['m_yr']==year+1),['pk_pr','dg_pr','sh_pr','off_pr']]=pr1 data_gen.loc[(data_gen['bb']==reg)&(data_gen['type']==ctype)& (data_gen['m_yr']==year+1),['pk_rev','dg_rev','sh_rev','off_rev']]= rev_q return data_gen, aob_accnt # (F) Grid generation output and investment functions # (i) Merit order dispatch function for calculating avearge national annual wholesale prices def dispatch(data=data,exist_g=exist_g,year=yr_0): Q_tou0=np.array(data.loc[(data['m_yr']==year),['pk_q','sh_q','off_q']].sum()) MW_tou0=np.divide(Q_tou0,mwh) #Set a reserve overhead uplift = 1.15 #Accounting for observed prices at peak in excess of a simple SRMC calculation, # given e.g. opportunity costs of water and dispatch constraints - calibrated to cause prices similar to 10 year average (real $87/MWh) dg_share=0.90 #accounting for DG contribution to generation in the shoulder and off-peak # Collect volumes/offers - based on capacity and srmc offer = exist_g.loc[exist_g['m_yr']==year,].copy() offer=offer.sort_values('srmc') #Peak offer['pk_cum_mw'] = offer.pk_mw.cumsum() offer['pk_balance']=(MW_tou0[0]*uplift)-offer.pk_cum_mw offer['pk_dispatch']=offer.pk_balance+offer.pk_mw offer['pk_dispatch']=np.where(offer['pk_balance']<0,offer.pk_dispatch,offer.pk_mw) offer['pk_share']=offer.loc[offer.pk_dispatch>0,['pk_dispatch']]/(MW_tou0[0]*uplift) #Shoulder offer['sh_cum_mw'] = offer.sh_mw.cumsum() offer['sh_balance']=(MW_tou0[1]*(dg_share))-offer.sh_cum_mw offer['sh_dispatch']=offer.sh_balance+offer.sh_mw offer['sh_dispatch']=np.where(offer['sh_balance']<0,offer.sh_dispatch,offer.sh_mw) offer['sh_share']=offer.loc[offer.sh_dispatch>0,['sh_dispatch']]/(MW_tou0[1]*dg_share) #Off peak offer['off_cum_mw'] = offer.off_mw.cumsum() offer['off_balance']=(MW_tou0[2]*(dg_share))-offer.off_cum_mw offer['off_dispatch']=offer.off_balance+offer.off_mw offer['off_dispatch']=np.where(offer['off_balance']<0,offer.off_dispatch,offer.off_mw) offer['off_share']=offer.loc[offer.off_dispatch>0,['off_dispatch']]/(MW_tou0[2]*dg_share) #Get shares shares=offer.groupby(['bb'])['pk_share','sh_share','off_share'].apply(lambda x : x.sum()) shares.reset_index(level=0, inplace=True) #Get prices if offer.pk_balance.min() >= 0: pk_p = 246.0 #Set to maximum observed peak average generation price (2008) else: pk_p = offer.loc[offer.pk_balance < 0,['srmc']].iloc[0]['srmc'] if offer.sh_balance.min() >= 0: sh_p = 178.0 #set to maxiumum observed shoulder average generation price (2008) else: sh_p = offer.loc[offer.sh_balance < 0,['srmc']].iloc[0]['srmc'] if offer.off_balance.min() >= 0: off_p = 139.0 #set to maxiumum observed off-peak average generation price (2008) else: off_p = offer.loc[offer.off_balance < 0,['srmc']].iloc[0]['srmc'] if pk_p>246.0: #Limit to maximum observed peak average generation price (2008) pk_p=246.0 if sh_p>178.0: #Limit to maximum observed shoulder average generation pice (2008) sh_p=178.0 if off_p>79.0: #Limit to maximum observed off-peak average generation pice (2008) off_p=79.0 if pk_p<79.0: #Limit to minimum observed peak average generation pice (2015) pk_p=79.0 if sh_p<79.0: #Limit to minimum observed shoulder average generation pice (2009) sh_p=79.0 if off_p<40.0: off_p=40.0 #Limit to minimum observed off-peak average generation pice (2009) if sh_p>pk_p: sh_p=pk_p p = [pk_p,sh_p,off_p] return shares, p # (ii) Investment function - note this is an updating function,using this year as input and getting next year value # Current plant is in "exist_g", possible investments in "poss_g", when plant is commissioned it is removed from # the list of possible investments and added to the list of current def invest_gen(data=data,data_gen=data_gen,exist_g=exist_g,poss_g=poss_g,year=yr_0,max_inv=2): #Decommission plant if year==2024: dcom2023 = ['HuntC3','HuntC4','HuntlyG3','HuntlyG4'] exist_g = exist_g[~((exist_g.name.isin(dcom2023))&(exist_g.m_yr>=2024))] if year == 2028: dcom2027 = ['HlyUnit6'] exist_g = exist_g[~((exist_g.name.isin(dcom2027))&(exist_g.m_yr>=2028))] #Force new plant to join - in this case Todd peaker if year == 2019: ToddPkr = poss_g.loc[(poss_g['name']=='ToddPeak_npl')&(poss_g['m_yr']>=year+1),] #Append new plant to existing plant exist_g=exist_g.append(ToddPkr,ignore_index=True) #Remove the invested plant from the list of possible poss_g=poss_g[~((poss_g['name']=='ToddPeak_npl')&(poss_g.m_yr>=year+1))] # Check of their are any plans for the year - and not ones previously used plan = poss_g.loc[(poss_g['m_yr']==year+1)&(poss_g['available']==1),].copy() if plan.empty: pass else: _,dispatch_p=dispatch(data=data,exist_g=exist_g,year=year) #Add nodal price differentials plan=plan.merge(data_gen.loc[data_gen['m_yr']==year,['bb','pk_pt_mu','sh_pt_mu','off_pt_mu']],on='bb',how='left') #Revenue from average MW at dispatch prices plan['rev']=((plan.pk_mw*plan.pk_pt_mu*dispatch_p[0]+ plan.sh_mw*plan.sh_pt_mu*dispatch_p[1]+ plan.off_mw*plan.off_pt_mu*dispatch_p[2])/ (plan.pk_mw+plan.sh_mw+plan.off_mw)) #Add interconnection charges plan=plan.merge(data_gen.loc[data_gen['m_yr']==year,['bb','pk_pr','dg_pr','sh_pr','off_pr']],on='bb',how='left') plan['ic']=((plan.pk_mw*plan.pk_pr+plan.sh_mw*plan.sh_pr+plan.off_mw*plan.off_pr)/ (plan.pk_mw+plan.sh_mw+plan.off_mw)) # Add interconnection charge to lrmc plan['lrmc_ic']=plan.lrmc+plan.ic #Select one plant, with highest return plan['return']=plan.rev-(plan.lrmc+plan.ic) if plan['return'].max()>0: long_list=plan.loc[(plan['return']>0),] if len(long_list)>max_inv: short_list = long_list.nsmallest(max_inv,'lrmc_ic') else: short_list = long_list invest_names = short_list.loc[(short_list['return']>0),['name']] lookup_names = invest_names.name.unique().tolist() #Get all current and future invest = poss_g.loc[(poss_g['name'].isin(lookup_names))&(poss_g['m_yr']>=year+1),] #Append new plant to existing plant exist_g=exist_g.append(invest,ignore_index=True) #Remove the invested plant from the list of possible poss_g=poss_g[~((poss_g.name.isin(lookup_names))&(poss_g.m_yr>=year+1))] return exist_g, poss_g # (iii) Update generation output def gen_update(data=data,data_gen=data_gen,year=yr_0,exist_g=exist_g): qtou1=data.loc[(data['m_yr']==year+1),['pk_q','dg_q','sh_q','off_q']] # Market TOU quantities Q_tou1=np.array(qtou1.sum())#Aggregate market quantities by TOU dg_share=0.90 #accounting for DG contribution to generation in the shoulder and off-peak genshr,_= dispatch(data=data,exist_g=exist_g,year=year) genshr['pk_q']=genshr.pk_share*Q_tou1[0] genshr['sh_q']=genshr.sh_share*Q_tou1[2]*dg_share genshr['off_q']=genshr.off_share*Q_tou1[3]*dg_share for reg in gen_l: data_gen.loc[(data_gen['m_yr']==year+1)&(data_gen['bb']==reg),['pk_q','sh_q','off_q']]=genshr.loc[genshr['bb']==reg,['pk_q','sh_q','off_q']].values return data_gen # G. Main function for calculating demand, supply and prices in each year # Takes last year's values and model parameters and steps demand and prices forward by a year def dmnd(data=data,aob_accnt=aob_accnt,data_gen=data_gen,reg="MDN",ctype=1, year=yr_0,base_year=base_year,exist_g=exist_g, pg_mu=pg_mu,pg_sd=pg_sd,nz_m_g_mu=nz_m_g_mu,nz_m_g_sd=nz_m_g_sd, share_n=share_n,gamma=gamma,gamma_i=gamma_i, beta=beta,beta_i=beta_i,dg_g=1.0,ic_eg=1.0,ic_g=1.0,pop='med', pop_data=reg_pop_params,inc_data=reg_inc_params,tp_rev=tp_rev,aob=False, aob_yr=aob_yr,amd_yr=2017,amdshr=amdshr,econ_share=econ_share,rcpd_n=ic_s): # Data #Get prior variables from dataframes - 0 suffix indicates prior period p0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_p','dg_p','sh_p','off_p']]) #Price s0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_s','dg_s','sh_s','off_s']]) #TOU expenditure shares q0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_q','dg_q','sh_q','off_q']]) #TOU quantities e0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_e','dg_e','sh_e','off_e']]) #Expenditures sb=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['p_yr']==base_year),['pk_s','dg_s','sh_s','off_s']]) #TOU expenditure shares in base year (2010) dgmax0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['dg_max']]) # Maximum DG output, as a measure of capacity of access to grid alternatives dg0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['dg_icp']]) # Maximum DG output per ICP icp0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['icp']]) # Number of ICPs m0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['earn_icp']]) #Averaeg earnings per ICP pt0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_pt','dg_pt','sh_pt','off_pt']]) # Transport price pi0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& #Interconnection price per MWh equivalent (data['m_yr']==year),['pk_i','dg_i','sh_i','off_i']]) ic0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& #Interconnection charge (IC) - total annual (data['m_yr']==year),['pk_ic','dg_ic','sh_ic','off_ic']]) pg0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_pg','dg_pg','sh_pg','off_pg']]) # Generation price ptmu0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_pt_mu','dg_pt_mu','sh_pt_mu','off_pt_mu']]) # Transport mean multiplier (parameter) px0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_px','dg_px','sh_px','off_px']]) # Grid price - ex IC exp0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['exp_all']]) # Expenditure by node exp_icp0=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['exp_per_icp']]) # Expenditure per ICP theta = np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_theta','dg_theta','sh_theta','off_theta']]) pt_mu0 = np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_pt_mu','dg_pt_mu','sh_pt_mu','off_pt_mu']]) #Transport price ratio, mean pt_s_mu0 = np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_s_pt_mu','dg_s_pt_mu','sh_s_pt_mu','off_s_pt_mu']]) #Transport price ratio, mean pt_ns_mu0 =np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),['pk_ns_pt_mu','dg_ns_pt_mu','sh_ns_pt_mu','off_ns_pt_mu']]) #Transport price ratio, mean params=np.matrix(data.loc[(data['bb']==reg)&(data['type']==ctype)& (data['m_yr']==year),list(data.iloc[:,58:74])]) #Other parameters - related to e.g. transport charges during scarcity # Income and population parameters icp_g_mu=np.asscalar(pop_data.loc[(pop_data['bb']==reg), [pop]].values) #Reg ICP growth m_g_reg_mu=np.asscalar(inc_data.loc[(inc_data['bb']==reg), ['reg_m_mu']].values)#Reg income growth relative to NZ m_g_mu=nz_m_g_mu*m_g_reg_mu #Income growth scenario #Expected price for generation in the next period #Expected price for generation in the next period, based on price in current period _, dispatch_p0=dispatch(data=data,exist_g=exist_g,year=year) _, dispatch_p1=dispatch(data=data,exist_g=exist_g,year=year-1) dispatch_p=np.divide(np.add(dispatch_p0,dispatch_p1),2) pg_mu=[dispatch_p[0],dispatch_p[0],dispatch_p[1],dispatch_p[2]] pg1 = ln_exp(mu=pg_mu,sd=pg_sd) pg_g = np.divide(pg1,pg0) dp_g = pg_g-1 #New price for transport costs ptmu1=ptmu0 #For the purposes of the demand model this value is unchanged from the average pt1 = np.multiply(pg1,ptmu1-1) #New grid price px1=pg1+pt1 dpx = np.divide(px1,px0) #New IC prices - interim ones - to be updated after demand calculated to ensure all required revenue is recovered if aob==True and (year+1) >= aob_yr: #Use initial shares amd_share=np.asscalar(amdshr.loc[(amdshr['m_yr']==amd_yr)&(amdshr['bb']==reg)& (amdshr['type']==ctype),["AMDshr"]].values) amd=amd_share*np.asscalar( tp_rev.loc[(tp_rev['m_yr']==(year+1)),['resid_rev']].values) if (year+1) == aob_yr: aob1 = np.asscalar(aob_accnt.loc[aob_accnt['m_yr']==(year+1),[reg+str(ctype)]].values) else: # Note capex values here are revenue years - assume assets commissioned at start of year too. reliab_shr=amdshr_f(data=data,window=3,gross=0) reliab_shr_bb=np.asscalar(reliab_shr.loc[(reliab_shr['m_yr']==year)& (reliab_shr['type']==ctype)& (reliab_shr['bb']==reg),['AMDshr']].values) inv = (np.asscalar(tp_rev.loc[(tp_rev['m_yr']==(year+1)),['base_capex']].values)+ np.asscalar(tp_rev.loc[(tp_rev['m_yr']==(year+1)),['major_capex']].values)) inv_shr = (econ_share*(lceshr(data=data,data_gen=data_gen, reg=reg,year=year,ctype=ctype)) + (1-econ_share)*reliab_shr_bb*load_reliab_shr) d_aob = inv*inv_shr #New investment aob0 = np.asscalar(aob_accnt.loc[aob_accnt['m_yr']==(year),[reg+str(ctype)]].values) aob1 = aob0*(1-deprn)+d_aob ic_tmp = amd+aob1 pi1 = np.matrix(np.repeat(ic_tmp/np.sum(q0),share_n)) aob_accnt.loc[aob_accnt['m_yr']==(year+1),[reg+str(ctype)]]=aob1 pr1 = pi1 #Interconnection revenue per MWh paid in year else: ic_g = np.asscalar(tp_rev.loc[(tp_rev['m_yr']==year+2),['ic_g']].values) pi1 = np.multiply(pi0,rcpd_n)*ic_g pr1=rcpd_rev_update(data=data,year=year,rcpd_n=rcpd_n) #New wholesale prices, faced by consumers p1 = np.add(px1,pi1) p_g = np.divide(p1,p0) dp = p_g-1 # Matrix of demand elasticities if ctype== 1: elas_tou=(-np.eye(share_n)+(gamma/(np.transpose(s0)*ones))- (((np.multiply(beta*ones,np.transpose(ones)*sb)))/ (np.transpose(s0)*ones))) #This adds the market elasticity to the TOU elasticities, to get a total effect elas = elas_tou*abs(p_e) else: elas_tou=(-np.eye(share_n)+(gamma_i/(np.transpose(s0)*ones))- (((np.multiply(beta_i*ones,np.transpose(ones)*sb)))/ (np.transpose(s0)*ones))) elas = elas_tou*abs(p_e_i) #Battery investment decision #(i) Expected interconnection charges (take the average of initial and end point for simplicity) exp_ic_g = tp_rev.loc[tp_rev['m_yr']>year,['totrev_g']].mean()-1 pv_pi=np.zeros(shape=(1,share_n)) for i in range(share_n): pv_pi[0,i] = (pi0[0,i]+(pi0[0,i]*(1+exp_ic_g)**dg_horizon))/2 #Values during high and low priced (locally scarce/abundant generation) p_s_g=np.multiply(pt_s_mu0,pg1) p_ns_g=np.multiply(pt_ns_mu0,pg1) #Expected grid price p_grid = np.add(np.multiply(theta,p_s_g),np.multiply((1-theta),p_ns_g)) #Expected gain from DG - based on peak and shoulder prices # Peak volumes as share of peak and shoulder pk_sh_shr = q0[0,0]/(q0[0,0]+q0[0,2]) # Divided by lrmc - capital cost - plus SRMC at off-peak price, with an energy loss uplift (1.1) dg_lrmc = (dg_capex*(dg_lrmc_g**(year-yr_0))+ dg_fixed)/dg_mwh if (aob==False or (year+1)0 and (q0[0,0]/(pk_tp/2))>(dgmax0*max_dg): dg_g=1+p_gain*dg_ds*((q0[0,0]/(pk_tp/2))/((q0[0,0]/(pk_tp/2))+dgmax0)) dgmax1=max(dgmax0*dg_g,dgmax0+1) #Updated dgmax else: dgmax1=dgmax0*dg_g #Updated dgmax # Get new demand - in response to price # First adjust quantity bases for change in DG capacity inlcusive of batteries d_dg_g = dgmax1/dgmax0 #Percent change in capacity if (aob==False or (year+1)= aob_yr: ic1=np.multiply(pi1,q0) rev1 = ic1 #Interconnection revenue paid in year +1 else: ic1=np.multiply(pi1,q1) revq=np.multiply(q0,rcpd_n) rev1=np.multiply(pr1,revq) pr1=np.reshape(np.multiply(rcpd_n,pr1),(1,4)) #Transport charge adjustment - for demand growth dq = np.divide(q1,q0) dpt_dq_s = 0.13 #elasticity of transport price to demand in areas with scarce generation dpt_dq_ns = 0.05 #elasticity of transport price to demand in areas without scarce generation #Limit the value of demand changes - affecting transport charges - to 20% in any period - due to large % changes from small bases (WKM only) dq[dq>1.2]=1.2 # Keep DG changes to 1 (increases do not affect) dq[0,1]=1 #Initialise next period values pt_mu1=pt_mu0 pt_s_mu1=pt_s_mu0 pt_ns_mu1=pt_ns_mu0 #Apply elasticity for i in range(share_n): pt_mu1[0,i]=(1+(dq[0,i]-1)*dpt_dq_ns)*pt_mu0[0,i] if pt_mu1[0,i]<1 else (1+(dq[0,i]-1)*dpt_dq_s)*pt_mu0[0,i] pt_s_mu1[0,i]=(1+(dq[0,i]-1)*dpt_dq_ns)*pt_s_mu0[0,i] if pt_s_mu1[0,i]<1 else (1+(dq[0,i]-1)*dpt_dq_s)*pt_s_mu0[0,i] pt_ns_mu1[0,i]=(1+(dq[0,i]-1)*dpt_dq_ns)*pt_ns_mu0[0,i] if pt_ns_mu1[0,i]<1 else (1+(dq[0,i]-1)*dpt_dq_s)*pt_ns_mu0[0,i] #Keep peak non-grid transport costs the same as peak grid pt_mu1[0,1]=pt_mu1[0,0] pt_s_mu1[0,1]=pt_s_mu1[0,0] pt_ns_mu1[0,1]=pt_ns_mu1[0,0] #Add extras that need to be accounted for in the output data ctype1 = ctype p_yr1 = year+3 m_yr1=year+1 # List of data/values to add to dataframe nums=pd.DataFrame(np.concatenate((np.array([[ctype1]],float), #1 np.array([[p_yr1]],float), #2 np.array([[m_yr1]],float), #3 s1, #7 p1, #11 q1, # 15 exp_icp1, #16 np.array([[exp1]]), #17 np.array(icp1), #18 dgmax1, m1, dg1, pt1, pi1, ic1, pg1, px1, e1, pt_mu1, pt_s_mu1, pt_ns_mu1, params, pr1, rev1), axis=1), columns=list(data.iloc[:,1:])) node=pd.Series(reg) vals=pd.concat([node,nums],axis=1) vals.columns=list(data) main = data.append(vals,ignore_index=True) #Generation data - add but only in steps with mass market demand so there are no double ups if ctype==1 and reg in gen_l: gen_params=np.matrix(data_gen.loc[(data_gen['bb']==reg)& (data_gen['m_yr']==year),list(data_gen.iloc[:,58:75])]) #Values for generation to add to dataframe - only price information added at this stage nums_gen=pd.DataFrame(np.concatenate((np.array([[0]],float), np.array([[p_yr1]],float), np.array([[m_yr1]],float), np.matrix([0.0,0.0,0.0,0.0]), #s1 px1, np.matrix([0.0,0.0,0.0,0.0]), #q1 np.array([[0]],float), #exp_icp1 np.array([[exp1]]), np.array(icp1), np.array([[0]],float), #dgmax1 np.array([[0]],float), #m1 np.array([[0]],float), #dg1 pt1, np.matrix([0.0,0.0,0.0,0.0]), #pi1 np.matrix([0.0,0.0,0.0,0.0]), #ic1 pg1, px1, np.matrix([0.0,0.0,0.0,0.0]), #e1 pt_mu1, pt_s_mu1, pt_ns_mu1, gen_params, np.matrix([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])), #pr1,rev1 axis=1), columns=list(data_gen.iloc[:,1:])) vals_gen=pd.concat([node,nums_gen],axis=1) vals_gen.columns=list(data_gen) gen_out=data_gen.append(vals_gen,ignore_index=True) else: gen_out=data_gen return main, aob_accnt, gen_out # H. Run models - with AOB version and an RCPD version # H.1 AOB/proposal version df_aob=data #Initialise dataframe for main results accnt_aob=aob_accnt # Initialise an AOB account dataframe gen_aob=data_gen # Initialise a generation data frame - used for calculating shares of AoB exist_g_aob=exist_g poss_g_aob=poss_g # Loop over each customer type and BB node for each year for t in range(T-1): year=yr_0+t aob=True aob_yr=aob_yr for node in reg_l: df_aob,accnt_aob,gen_aob = dmnd(data=df_aob,reg=node,ctype=1,year=year,dg_g=1.00,aob=aob,aob_yr=aob_yr, aob_accnt=accnt_aob,data_gen=gen_aob,exist_g=exist_g_aob) for node in ind_l: df_aob,accnt_aob, gen_aob = dmnd(data=df_aob,reg=node,ctype=2,year=year,dg_g=1.00,aob=aob,aob_yr=aob_yr, aob_accnt=accnt_aob,data_gen=gen_aob,exist_g=exist_g_aob) # Update RCPD interconnection charges if aob==False or (year+1)