Skip to content

Commit 1877c71

Browse files
committed
clean up tmy.py
1 parent fa0b280 commit 1877c71

File tree

1 file changed

+79
-74
lines changed

1 file changed

+79
-74
lines changed

pvlib/tmy.py

Lines changed: 79 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,6 @@
22
Import functions for TMY2 and TMY3 data files.
33
"""
44

5-
import logging
6-
pvl_logger = logging.getLogger('pvlib')
7-
85
import re
96
import datetime
107
import dateutil
@@ -15,9 +12,6 @@
1512
from urllib.request import urlopen
1613

1714
import pandas as pd
18-
import numpy as np
19-
20-
from pvlib import tools
2115

2216

2317
def readtmy3(filename=None, coerce_year=None, recolumn=True):
@@ -164,7 +158,9 @@ def readtmy3(filename=None, coerce_year=None, recolumn=True):
164158
try:
165159
filename = _interactive_load()
166160
except:
167-
raise Exception('Interactive load failed. Tkinter not supported on this system. Try installing X-Quartz and reloading')
161+
raise Exception('Interactive load failed. Tkinter not supported ' +
162+
'on this system. Try installing X-Quartz and ' +
163+
'reloading')
168164

169165
head = ['USAF', 'Name', 'State', 'TZ', 'latitude', 'longitude', 'altitude']
170166

@@ -184,24 +180,24 @@ def readtmy3(filename=None, coerce_year=None, recolumn=True):
184180
meta['TZ'] = float(meta['TZ'])
185181
meta['USAF'] = int(meta['USAF'])
186182

187-
TMYData = pd.read_csv(
183+
data = pd.read_csv(
188184
filename, header=1,
189-
parse_dates={'datetime':['Date (MM/DD/YYYY)','Time (HH:MM)']},
185+
parse_dates={'datetime': ['Date (MM/DD/YYYY)', 'Time (HH:MM)']},
190186
date_parser=lambda *x: _parsedate(*x, year=coerce_year),
191187
index_col='datetime')
192188

193189
if recolumn:
194-
_recolumn(TMYData) #rename to standard column names
190+
_recolumn(data) # rename to standard column names
195191

196-
TMYData = TMYData.tz_localize(int(meta['TZ']*3600))
192+
data = data.tz_localize(int(meta['TZ']*3600))
197193

198-
return TMYData, meta
194+
return data, meta
199195

200196

201197
def _interactive_load():
202198
import Tkinter
203199
from tkFileDialog import askopenfilename
204-
Tkinter.Tk().withdraw() #Start interactive file input
200+
Tkinter.Tk().withdraw() # Start interactive file input
205201
return askopenfilename()
206202

207203

@@ -233,20 +229,25 @@ def _recolumn(tmy3_dataframe, inplace=True):
233229
"""
234230
raw_columns = 'ETR (W/m^2),ETRN (W/m^2),GHI (W/m^2),GHI source,GHI uncert (%),DNI (W/m^2),DNI source,DNI uncert (%),DHI (W/m^2),DHI source,DHI uncert (%),GH illum (lx),GH illum source,Global illum uncert (%),DN illum (lx),DN illum source,DN illum uncert (%),DH illum (lx),DH illum source,DH illum uncert (%),Zenith lum (cd/m^2),Zenith lum source,Zenith lum uncert (%),TotCld (tenths),TotCld source,TotCld uncert (code),OpqCld (tenths),OpqCld source,OpqCld uncert (code),Dry-bulb (C),Dry-bulb source,Dry-bulb uncert (code),Dew-point (C),Dew-point source,Dew-point uncert (code),RHum (%),RHum source,RHum uncert (code),Pressure (mbar),Pressure source,Pressure uncert (code),Wdir (degrees),Wdir source,Wdir uncert (code),Wspd (m/s),Wspd source,Wspd uncert (code),Hvis (m),Hvis source,Hvis uncert (code),CeilHgt (m),CeilHgt source,CeilHgt uncert (code),Pwat (cm),Pwat source,Pwat uncert (code),AOD (unitless),AOD source,AOD uncert (code),Alb (unitless),Alb source,Alb uncert (code),Lprecip depth (mm),Lprecip quantity (hr),Lprecip source,Lprecip uncert (code),PresWth (METAR code),PresWth source,PresWth uncert (code)'
235231

236-
new_columns = ['ETR','ETRN','GHI','GHISource','GHIUncertainty',
237-
'DNI','DNISource','DNIUncertainty','DHI','DHISource','DHIUncertainty',
238-
'GHillum','GHillumSource','GHillumUncertainty','DNillum','DNillumSource',
239-
'DNillumUncertainty','DHillum','DHillumSource','DHillumUncertainty',
240-
'Zenithlum','ZenithlumSource','ZenithlumUncertainty','TotCld','TotCldSource',
241-
'TotCldUnertainty','OpqCld','OpqCldSource','OpqCldUncertainty','DryBulb',
242-
'DryBulbSource','DryBulbUncertainty','DewPoint','DewPointSource',
243-
'DewPointUncertainty','RHum','RHumSource','RHumUncertainty','Pressure',
244-
'PressureSource','PressureUncertainty','Wdir','WdirSource','WdirUncertainty',
245-
'Wspd','WspdSource','WspdUncertainty','Hvis','HvisSource','HvisUncertainty',
246-
'CeilHgt','CeilHgtSource','CeilHgtUncertainty','Pwat','PwatSource',
247-
'PwatUncertainty','AOD','AODSource','AODUncertainty','Alb','AlbSource',
248-
'AlbUncertainty','Lprecipdepth','Lprecipquantity','LprecipSource',
249-
'LprecipUncertainty','PresWth','PresWthSource','PresWthUncertainty']
232+
new_columns = [
233+
'ETR', 'ETRN', 'GHI', 'GHISource', 'GHIUncertainty',
234+
'DNI', 'DNISource', 'DNIUncertainty', 'DHI', 'DHISource',
235+
'DHIUncertainty', 'GHillum', 'GHillumSource', 'GHillumUncertainty',
236+
'DNillum', 'DNillumSource', 'DNillumUncertainty', 'DHillum',
237+
'DHillumSource', 'DHillumUncertainty', 'Zenithlum',
238+
'ZenithlumSource', 'ZenithlumUncertainty', 'TotCld', 'TotCldSource',
239+
'TotCldUnertainty', 'OpqCld', 'OpqCldSource', 'OpqCldUncertainty',
240+
'DryBulb', 'DryBulbSource', 'DryBulbUncertainty', 'DewPoint',
241+
'DewPointSource', 'DewPointUncertainty', 'RHum', 'RHumSource',
242+
'RHumUncertainty', 'Pressure', 'PressureSource',
243+
'PressureUncertainty', 'Wdir', 'WdirSource', 'WdirUncertainty',
244+
'Wspd', 'WspdSource', 'WspdUncertainty', 'Hvis', 'HvisSource',
245+
'HvisUncertainty', 'CeilHgt', 'CeilHgtSource', 'CeilHgtUncertainty',
246+
'Pwat', 'PwatSource', 'PwatUncertainty', 'AOD', 'AODSource',
247+
'AODUncertainty', 'Alb', 'AlbSource', 'AlbUncertainty',
248+
'Lprecipdepth', 'Lprecipquantity', 'LprecipSource',
249+
'LprecipUncertainty', 'PresWth', 'PresWthSource',
250+
'PresWthUncertainty']
250251

251252
mapping = dict(zip(raw_columns.split(','), new_columns))
252253

@@ -395,7 +396,7 @@ def readtmy2(filename):
395396
columns = 'year,month,day,hour,ETR,ETRN,GHI,GHISource,GHIUncertainty,DNI,DNISource,DNIUncertainty,DHI,DHISource,DHIUncertainty,GHillum,GHillumSource,GHillumUncertainty,DNillum,DNillumSource,DNillumUncertainty,DHillum,DHillumSource,DHillumUncertainty,Zenithlum,ZenithlumSource,ZenithlumUncertainty,TotCld,TotCldSource,TotCldUnertainty,OpqCld,OpqCldSource,OpqCldUncertainty,DryBulb,DryBulbSource,DryBulbUncertainty,DewPoint,DewPointSource,DewPointUncertainty,RHum,RHumSource,RHumUncertainty,Pressure,PressureSource,PressureUncertainty,Wdir,WdirSource,WdirUncertainty,Wspd,WspdSource,WspdUncertainty,Hvis,HvisSource,HvisUncertainty,CeilHgt,CeilHgtSource,CeilHgtUncertainty,PresentWeather,Pwat,PwatSource,PwatUncertainty,AOD,AODSource,AODUncertainty,SnowDepth,SnowDepthSource,SnowDepthUncertainty,LastSnowfall,LastSnowfallSource,LastSnowfallUncertaint'
396397
hdr_columns = 'WBAN,City,State,TZ,latitude,longitude,altitude'
397398

398-
TMY2, TMY2_meta = _readTMY2(string, columns, hdr_columns, filename)
399+
TMY2, TMY2_meta = _read_tmy2(string, columns, hdr_columns, filename)
399400

400401
return TMY2, TMY2_meta
401402

@@ -405,7 +406,6 @@ def _parsemeta_tmy2(columns, line):
405406
406407
Parameters
407408
----------
408-
409409
columns : string
410410
String of column headings in the header
411411
@@ -414,86 +414,91 @@ def _parsemeta_tmy2(columns, line):
414414
415415
Returns
416416
-------
417-
418417
meta : Dict of metadata contained in the header string
419-
420418
"""
421-
rawmeta=" ".join(line.split()).split(" ") #Remove sduplicated spaces, and read in each element
422-
meta=rawmeta[:3] #take the first string entries
419+
# Remove duplicated spaces, and read in each element
420+
rawmeta = " ".join(line.split()).split(" ")
421+
meta = rawmeta[:3] # take the first string entries
423422
meta.append(int(rawmeta[3]))
424-
longitude=(float(rawmeta[5])+float(rawmeta[6])/60)*(2*(rawmeta[4]=='N')-1)#Convert to decimal notation with S negative
425-
latitude=(float(rawmeta[8])+float(rawmeta[9])/60)*(2*(rawmeta[7]=='E')-1) #Convert to decimal notation with W negative
423+
# Convert to decimal notation with S negative
424+
longitude = (
425+
float(rawmeta[5]) + float(rawmeta[6])/60) * (2*(rawmeta[4] == 'N') - 1)
426+
# Convert to decimal notation with W negative
427+
latitude = (
428+
float(rawmeta[8]) + float(rawmeta[9])/60) * (2*(rawmeta[7] == 'E') - 1)
426429
meta.append(longitude)
427430
meta.append(latitude)
428431
meta.append(float(rawmeta[10]))
429432

430-
meta_dict = dict(zip(columns.split(','),meta)) #Creates a dictionary of metadata
431-
pvl_logger.debug('meta: %s', meta_dict)
432-
433+
# Creates a dictionary of metadata
434+
meta_dict = dict(zip(columns.split(','), meta))
433435
return meta_dict
434436

435437

436-
def _readTMY2(string, columns, hdr_columns, fname):
437-
head=1
438-
date=[]
438+
def _read_tmy2(string, columns, hdr_columns, fname):
439+
head = 1
440+
date = []
439441
with open(fname) as infile:
440-
fline=0
442+
fline = 0
441443
for line in infile:
442-
#Skip the header
443-
if head!=0:
444-
meta = _parsemeta_tmy2(hdr_columns,line)
445-
head-=1
444+
# Skip the header
445+
if head != 0:
446+
meta = _parsemeta_tmy2(hdr_columns, line)
447+
head -= 1
446448
continue
447-
#Reset the cursor and array for each line
448-
cursor=1
449-
part=[]
449+
# Reset the cursor and array for each line
450+
cursor = 1
451+
part = []
450452
for marker in string.split('%'):
451-
#Skip the first line of markers
452-
if marker=='':
453+
# Skip the first line of markers
454+
if marker == '':
453455
continue
454456

455-
#Read the next increment from the marker list
456-
increment=int(re.findall('\d+',marker)[0])
457-
458-
#Extract the value from the line in the file
459-
val=(line[cursor:cursor+increment])
460-
#increment the cursor by the length of the read value
461-
cursor=cursor+increment
457+
# Read the next increment from the marker list
458+
increment = int(re.findall('\d+', marker)[0])
462459

460+
# Extract the value from the line in the file
461+
val = (line[cursor:cursor+increment])
462+
# increment the cursor by the length of the read value
463+
cursor = cursor+increment
463464

464465
# Determine the datatype from the marker string
465-
if marker[-1]=='d':
466+
if marker[-1] == 'd':
466467
try:
467-
val=float(val)
468+
val = float(val)
468469
except:
469-
raise Exception('WARNING: In'+__name__+' Read value is not an integer" '+val+' " ')
470-
elif marker[-1]=='s':
470+
raise Exception('WARNING: In' + fname +
471+
' Read value is not an integer " ' +
472+
val + ' " ')
473+
elif marker[-1] == 's':
471474
try:
472-
val=str(val)
475+
val = str(val)
473476
except:
474-
raise Exception('WARNING: In'+__name__+' Read value is not a string" '+val+' " ')
477+
raise Exception('WARNING: In' + fname +
478+
' Read value is not a string" ' +
479+
val + ' " ')
475480
else:
476-
raise Exception('WARNING: In'+__name__+'Improper column DataFrameure " %'+marker+' " ')
481+
raise Exception('WARNING: In' + __name__ +
482+
'Improper column DataFrame " %' +
483+
marker + ' " ')
477484

478485
part.append(val)
479486

480-
if fline==0:
481-
axes=[part]
482-
year=part[0]+1900
483-
fline=1
487+
if fline == 0:
488+
axes = [part]
489+
year = part[0]+1900
490+
fline = 1
484491
else:
485492
axes.append(part)
486493

487-
#Create datetime objects from read data
494+
# Create datetime objects from read data
488495
date.append(datetime.datetime(year=int(year),
489496
month=int(part[1]),
490497
day=int(part[2]),
491498
hour=int(part[3])-1))
492499

493-
TMYData = pd.DataFrame(
500+
data = pd.DataFrame(
494501
axes, index=date,
495502
columns=columns.split(',')).tz_localize(int(meta['TZ']*3600))
496503

497-
return TMYData, meta
498-
499-
504+
return data, meta

0 commit comments

Comments
 (0)