2
2
Import functions for TMY2 and TMY3 data files.
3
3
"""
4
4
5
- import logging
6
- pvl_logger = logging .getLogger ('pvlib' )
7
-
8
5
import re
9
6
import datetime
10
7
import dateutil
15
12
from urllib .request import urlopen
16
13
17
14
import pandas as pd
18
- import numpy as np
19
-
20
- from pvlib import tools
21
15
22
16
23
17
def readtmy3 (filename = None , coerce_year = None , recolumn = True ):
@@ -164,7 +158,9 @@ def readtmy3(filename=None, coerce_year=None, recolumn=True):
164
158
try :
165
159
filename = _interactive_load ()
166
160
except :
167
- raise Exception ('Interactive load failed. Tkinter not supported on this system. Try installing X-Quartz and reloading' )
161
+ raise Exception ('Interactive load failed. Tkinter not supported ' +
162
+ 'on this system. Try installing X-Quartz and ' +
163
+ 'reloading' )
168
164
169
165
head = ['USAF' , 'Name' , 'State' , 'TZ' , 'latitude' , 'longitude' , 'altitude' ]
170
166
@@ -184,24 +180,24 @@ def readtmy3(filename=None, coerce_year=None, recolumn=True):
184
180
meta ['TZ' ] = float (meta ['TZ' ])
185
181
meta ['USAF' ] = int (meta ['USAF' ])
186
182
187
- TMYData = pd .read_csv (
183
+ data = pd .read_csv (
188
184
filename , header = 1 ,
189
- parse_dates = {'datetime' :['Date (MM/DD/YYYY)' ,'Time (HH:MM)' ]},
185
+ parse_dates = {'datetime' : ['Date (MM/DD/YYYY)' , 'Time (HH:MM)' ]},
190
186
date_parser = lambda * x : _parsedate (* x , year = coerce_year ),
191
187
index_col = 'datetime' )
192
188
193
189
if recolumn :
194
- _recolumn (TMYData ) # rename to standard column names
190
+ _recolumn (data ) # rename to standard column names
195
191
196
- TMYData = TMYData .tz_localize (int (meta ['TZ' ]* 3600 ))
192
+ data = data .tz_localize (int (meta ['TZ' ]* 3600 ))
197
193
198
- return TMYData , meta
194
+ return data , meta
199
195
200
196
201
197
def _interactive_load ():
202
198
import Tkinter
203
199
from tkFileDialog import askopenfilename
204
- Tkinter .Tk ().withdraw () # Start interactive file input
200
+ Tkinter .Tk ().withdraw () # Start interactive file input
205
201
return askopenfilename ()
206
202
207
203
@@ -233,20 +229,25 @@ def _recolumn(tmy3_dataframe, inplace=True):
233
229
"""
234
230
raw_columns = 'ETR (W/m^2),ETRN (W/m^2),GHI (W/m^2),GHI source,GHI uncert (%),DNI (W/m^2),DNI source,DNI uncert (%),DHI (W/m^2),DHI source,DHI uncert (%),GH illum (lx),GH illum source,Global illum uncert (%),DN illum (lx),DN illum source,DN illum uncert (%),DH illum (lx),DH illum source,DH illum uncert (%),Zenith lum (cd/m^2),Zenith lum source,Zenith lum uncert (%),TotCld (tenths),TotCld source,TotCld uncert (code),OpqCld (tenths),OpqCld source,OpqCld uncert (code),Dry-bulb (C),Dry-bulb source,Dry-bulb uncert (code),Dew-point (C),Dew-point source,Dew-point uncert (code),RHum (%),RHum source,RHum uncert (code),Pressure (mbar),Pressure source,Pressure uncert (code),Wdir (degrees),Wdir source,Wdir uncert (code),Wspd (m/s),Wspd source,Wspd uncert (code),Hvis (m),Hvis source,Hvis uncert (code),CeilHgt (m),CeilHgt source,CeilHgt uncert (code),Pwat (cm),Pwat source,Pwat uncert (code),AOD (unitless),AOD source,AOD uncert (code),Alb (unitless),Alb source,Alb uncert (code),Lprecip depth (mm),Lprecip quantity (hr),Lprecip source,Lprecip uncert (code),PresWth (METAR code),PresWth source,PresWth uncert (code)'
235
231
236
- new_columns = ['ETR' ,'ETRN' ,'GHI' ,'GHISource' ,'GHIUncertainty' ,
237
- 'DNI' ,'DNISource' ,'DNIUncertainty' ,'DHI' ,'DHISource' ,'DHIUncertainty' ,
238
- 'GHillum' ,'GHillumSource' ,'GHillumUncertainty' ,'DNillum' ,'DNillumSource' ,
239
- 'DNillumUncertainty' ,'DHillum' ,'DHillumSource' ,'DHillumUncertainty' ,
240
- 'Zenithlum' ,'ZenithlumSource' ,'ZenithlumUncertainty' ,'TotCld' ,'TotCldSource' ,
241
- 'TotCldUnertainty' ,'OpqCld' ,'OpqCldSource' ,'OpqCldUncertainty' ,'DryBulb' ,
242
- 'DryBulbSource' ,'DryBulbUncertainty' ,'DewPoint' ,'DewPointSource' ,
243
- 'DewPointUncertainty' ,'RHum' ,'RHumSource' ,'RHumUncertainty' ,'Pressure' ,
244
- 'PressureSource' ,'PressureUncertainty' ,'Wdir' ,'WdirSource' ,'WdirUncertainty' ,
245
- 'Wspd' ,'WspdSource' ,'WspdUncertainty' ,'Hvis' ,'HvisSource' ,'HvisUncertainty' ,
246
- 'CeilHgt' ,'CeilHgtSource' ,'CeilHgtUncertainty' ,'Pwat' ,'PwatSource' ,
247
- 'PwatUncertainty' ,'AOD' ,'AODSource' ,'AODUncertainty' ,'Alb' ,'AlbSource' ,
248
- 'AlbUncertainty' ,'Lprecipdepth' ,'Lprecipquantity' ,'LprecipSource' ,
249
- 'LprecipUncertainty' ,'PresWth' ,'PresWthSource' ,'PresWthUncertainty' ]
232
+ new_columns = [
233
+ 'ETR' , 'ETRN' , 'GHI' , 'GHISource' , 'GHIUncertainty' ,
234
+ 'DNI' , 'DNISource' , 'DNIUncertainty' , 'DHI' , 'DHISource' ,
235
+ 'DHIUncertainty' , 'GHillum' , 'GHillumSource' , 'GHillumUncertainty' ,
236
+ 'DNillum' , 'DNillumSource' , 'DNillumUncertainty' , 'DHillum' ,
237
+ 'DHillumSource' , 'DHillumUncertainty' , 'Zenithlum' ,
238
+ 'ZenithlumSource' , 'ZenithlumUncertainty' , 'TotCld' , 'TotCldSource' ,
239
+ 'TotCldUnertainty' , 'OpqCld' , 'OpqCldSource' , 'OpqCldUncertainty' ,
240
+ 'DryBulb' , 'DryBulbSource' , 'DryBulbUncertainty' , 'DewPoint' ,
241
+ 'DewPointSource' , 'DewPointUncertainty' , 'RHum' , 'RHumSource' ,
242
+ 'RHumUncertainty' , 'Pressure' , 'PressureSource' ,
243
+ 'PressureUncertainty' , 'Wdir' , 'WdirSource' , 'WdirUncertainty' ,
244
+ 'Wspd' , 'WspdSource' , 'WspdUncertainty' , 'Hvis' , 'HvisSource' ,
245
+ 'HvisUncertainty' , 'CeilHgt' , 'CeilHgtSource' , 'CeilHgtUncertainty' ,
246
+ 'Pwat' , 'PwatSource' , 'PwatUncertainty' , 'AOD' , 'AODSource' ,
247
+ 'AODUncertainty' , 'Alb' , 'AlbSource' , 'AlbUncertainty' ,
248
+ 'Lprecipdepth' , 'Lprecipquantity' , 'LprecipSource' ,
249
+ 'LprecipUncertainty' , 'PresWth' , 'PresWthSource' ,
250
+ 'PresWthUncertainty' ]
250
251
251
252
mapping = dict (zip (raw_columns .split (',' ), new_columns ))
252
253
@@ -395,7 +396,7 @@ def readtmy2(filename):
395
396
columns = 'year,month,day,hour,ETR,ETRN,GHI,GHISource,GHIUncertainty,DNI,DNISource,DNIUncertainty,DHI,DHISource,DHIUncertainty,GHillum,GHillumSource,GHillumUncertainty,DNillum,DNillumSource,DNillumUncertainty,DHillum,DHillumSource,DHillumUncertainty,Zenithlum,ZenithlumSource,ZenithlumUncertainty,TotCld,TotCldSource,TotCldUnertainty,OpqCld,OpqCldSource,OpqCldUncertainty,DryBulb,DryBulbSource,DryBulbUncertainty,DewPoint,DewPointSource,DewPointUncertainty,RHum,RHumSource,RHumUncertainty,Pressure,PressureSource,PressureUncertainty,Wdir,WdirSource,WdirUncertainty,Wspd,WspdSource,WspdUncertainty,Hvis,HvisSource,HvisUncertainty,CeilHgt,CeilHgtSource,CeilHgtUncertainty,PresentWeather,Pwat,PwatSource,PwatUncertainty,AOD,AODSource,AODUncertainty,SnowDepth,SnowDepthSource,SnowDepthUncertainty,LastSnowfall,LastSnowfallSource,LastSnowfallUncertaint'
396
397
hdr_columns = 'WBAN,City,State,TZ,latitude,longitude,altitude'
397
398
398
- TMY2 , TMY2_meta = _readTMY2 (string , columns , hdr_columns , filename )
399
+ TMY2 , TMY2_meta = _read_tmy2 (string , columns , hdr_columns , filename )
399
400
400
401
return TMY2 , TMY2_meta
401
402
@@ -405,7 +406,6 @@ def _parsemeta_tmy2(columns, line):
405
406
406
407
Parameters
407
408
----------
408
-
409
409
columns : string
410
410
String of column headings in the header
411
411
@@ -414,86 +414,91 @@ def _parsemeta_tmy2(columns, line):
414
414
415
415
Returns
416
416
-------
417
-
418
417
meta : Dict of metadata contained in the header string
419
-
420
418
"""
421
- rawmeta = " " .join (line .split ()).split (" " ) #Remove sduplicated spaces, and read in each element
422
- meta = rawmeta [:3 ] #take the first string entries
419
+ # Remove duplicated spaces, and read in each element
420
+ rawmeta = " " .join (line .split ()).split (" " )
421
+ meta = rawmeta [:3 ] # take the first string entries
423
422
meta .append (int (rawmeta [3 ]))
424
- longitude = (float (rawmeta [5 ])+ float (rawmeta [6 ])/ 60 )* (2 * (rawmeta [4 ]== 'N' )- 1 )#Convert to decimal notation with S negative
425
- latitude = (float (rawmeta [8 ])+ float (rawmeta [9 ])/ 60 )* (2 * (rawmeta [7 ]== 'E' )- 1 ) #Convert to decimal notation with W negative
423
+ # Convert to decimal notation with S negative
424
+ longitude = (
425
+ float (rawmeta [5 ]) + float (rawmeta [6 ])/ 60 ) * (2 * (rawmeta [4 ] == 'N' ) - 1 )
426
+ # Convert to decimal notation with W negative
427
+ latitude = (
428
+ float (rawmeta [8 ]) + float (rawmeta [9 ])/ 60 ) * (2 * (rawmeta [7 ] == 'E' ) - 1 )
426
429
meta .append (longitude )
427
430
meta .append (latitude )
428
431
meta .append (float (rawmeta [10 ]))
429
432
430
- meta_dict = dict (zip (columns .split (',' ),meta )) #Creates a dictionary of metadata
431
- pvl_logger .debug ('meta: %s' , meta_dict )
432
-
433
+ # Creates a dictionary of metadata
434
+ meta_dict = dict (zip (columns .split (',' ), meta ))
433
435
return meta_dict
434
436
435
437
436
- def _readTMY2 (string , columns , hdr_columns , fname ):
437
- head = 1
438
- date = []
438
+ def _read_tmy2 (string , columns , hdr_columns , fname ):
439
+ head = 1
440
+ date = []
439
441
with open (fname ) as infile :
440
- fline = 0
442
+ fline = 0
441
443
for line in infile :
442
- #Skip the header
443
- if head != 0 :
444
- meta = _parsemeta_tmy2 (hdr_columns ,line )
445
- head -= 1
444
+ # Skip the header
445
+ if head != 0 :
446
+ meta = _parsemeta_tmy2 (hdr_columns , line )
447
+ head -= 1
446
448
continue
447
- #Reset the cursor and array for each line
448
- cursor = 1
449
- part = []
449
+ # Reset the cursor and array for each line
450
+ cursor = 1
451
+ part = []
450
452
for marker in string .split ('%' ):
451
- #Skip the first line of markers
452
- if marker == '' :
453
+ # Skip the first line of markers
454
+ if marker == '' :
453
455
continue
454
456
455
- #Read the next increment from the marker list
456
- increment = int (re .findall ('\d+' ,marker )[0 ])
457
-
458
- #Extract the value from the line in the file
459
- val = (line [cursor :cursor + increment ])
460
- #increment the cursor by the length of the read value
461
- cursor = cursor + increment
457
+ # Read the next increment from the marker list
458
+ increment = int (re .findall ('\d+' , marker )[0 ])
462
459
460
+ # Extract the value from the line in the file
461
+ val = (line [cursor :cursor + increment ])
462
+ # increment the cursor by the length of the read value
463
+ cursor = cursor + increment
463
464
464
465
# Determine the datatype from the marker string
465
- if marker [- 1 ]== 'd' :
466
+ if marker [- 1 ] == 'd' :
466
467
try :
467
- val = float (val )
468
+ val = float (val )
468
469
except :
469
- raise Exception ('WARNING: In' + __name__ + ' Read value is not an integer" ' + val + ' " ' )
470
- elif marker [- 1 ]== 's' :
470
+ raise Exception ('WARNING: In' + fname +
471
+ ' Read value is not an integer " ' +
472
+ val + ' " ' )
473
+ elif marker [- 1 ] == 's' :
471
474
try :
472
- val = str (val )
475
+ val = str (val )
473
476
except :
474
- raise Exception ('WARNING: In' + __name__ + ' Read value is not a string" ' + val + ' " ' )
477
+ raise Exception ('WARNING: In' + fname +
478
+ ' Read value is not a string" ' +
479
+ val + ' " ' )
475
480
else :
476
- raise Exception ('WARNING: In' + __name__ + 'Improper column DataFrameure " %' + marker + ' " ' )
481
+ raise Exception ('WARNING: In' + __name__ +
482
+ 'Improper column DataFrame " %' +
483
+ marker + ' " ' )
477
484
478
485
part .append (val )
479
486
480
- if fline == 0 :
481
- axes = [part ]
482
- year = part [0 ]+ 1900
483
- fline = 1
487
+ if fline == 0 :
488
+ axes = [part ]
489
+ year = part [0 ]+ 1900
490
+ fline = 1
484
491
else :
485
492
axes .append (part )
486
493
487
- #Create datetime objects from read data
494
+ # Create datetime objects from read data
488
495
date .append (datetime .datetime (year = int (year ),
489
496
month = int (part [1 ]),
490
497
day = int (part [2 ]),
491
498
hour = int (part [3 ])- 1 ))
492
499
493
- TMYData = pd .DataFrame (
500
+ data = pd .DataFrame (
494
501
axes , index = date ,
495
502
columns = columns .split (',' )).tz_localize (int (meta ['TZ' ]* 3600 ))
496
503
497
- return TMYData , meta
498
-
499
-
504
+ return data , meta
0 commit comments