Ticket #2847: US_Ha1_run_ba137b_v_run_ba137f_ALL_gb.py

File US_Ha1_run_ba137b_v_run_ba137f_ALL_gb.py, 15.8 KB (added by NoelClancy, 8 months ago)
Line 
1from netCDF4 import Dataset
2from datetime import datetime as dt
3
4import numpy as np
5import pandas as pd
6#from pandas import Series, DataFrame, Panel
7
8
9#import matplotlib.pyplot as plt
10#import matplotlib.dates as mdates
11from sklearn.metrics import mean_squared_error
12from math import sqrt
13
14
15import os
16import datetime
17import glob
18
19import numpy as np
20
21# Set the matplotlib backend
22#import matplotlib as mpl
23#mpl.use('Agg')
24
25import matplotlib.pyplot as plt
26import matplotlib.dates as mdates
27#import scripty.stats
28
29import iris
30import iris.coord_categorisation
31import iris.quickplot as qplt
32import iris.plot as iplt
33
34import jules
35
36import make_time_coord
37
38from iris.coords import DimCoord
39from iris.cube import Cube
40
41
42
43#This is a relative absolute path to data files and can be used easily in other python programmes
44
45import os.path as op
46data_path = op.abspath(op.join(op.dirname(__file__), '../data'))
47
48def jules_output_cube(cube, field, filename):
49    '''
50    Callback to add an x and y coord (otherwise load returns a cube with anonymous
51    dimensions, which can't be concatenated).
52    '''
53
54    n = cube.ndim
55
56    try:
57        if cube.coord_dims('time') == (0,): # assume if time is a dimcoord, it is at position 0
58            n -= 1
59    except iris.exceptions.CoordinateNotFoundError:
60        pass
61
62    if n >= 1:
63        x_coord = iris.coords.DimCoord(range(cube.shape[-1]), var_name='x')
64        xdim = cube.ndim - 1
65        cube.add_dim_coord(x_coord, (xdim, ))
66
67    if n >= 2:
68        y_coord = iris.coords.DimCoord([0], var_name='y')
69        ydim = cube.ndim - 2
70        cube.add_dim_coord(y_coord, (ydim, ))
71
72    #print cube.ndim
73    return
74
75
76#This enables the path to be automatically adjusted if necessary for either Unix or Windows operating systems
77
78filename_run_ba137b = op.normpath(op.join(data_path, 'US_Ha1-r9227_branch-presc0.D.1991-2012_run_ba137b.nc'))
79filename_run_ba137f = op.normpath(op.join(data_path, 'US_Ha1-r9227_branch-presc0.D.1991-2012_run_ba137f.nc'))
80filename_run_ba137b_mm = op.normpath(op.join(data_path, 'US_Ha1-r9227_branch-presc0.D.1991-2012_run_ba137b_mm.nc'))
81filename_run_ba137f_mm = op.normpath(op.join(data_path, 'US_Ha1-r9227_branch-presc0.D.1991-2012_run_ba137f_mm.nc'))
82
83df = pd.read_csv(op.normpath(op.join(data_path, 'FLX_US-Ha1_FLUXNET2015_FULLSET_MM_1991-2012_1-3.csv')))
84
85
86
87
88df.TIMESTAMP = df['TIMESTAMP']
89print df.TIMESTAMP.shape
90print df.TIMESTAMP[0:]
91
92df.GPP = df['GPP_NT_VUT_REF']
93print df.GPP.shape
94print df.GPP[0:]
95
96obs_raw = df.GPP[0:]
97obs_cor = (abs(obs_raw)+obs_raw)/2
98print("Corrected Observations = ", obs_cor)
99
100"""
101daily_carbon_file_headings = [
102            'TIMESTAMP','NEE_VUT_REF','NEE_VUT_REF_RANDUNC','NEE_VUT_25','NEE_VUT_50','NEE_VUT_75',
103            'RECO_NT_VUT_REF','RECO_NT_VUT_25','RECO_NT_VUT_50','RECO_NT_VUT_75',
104            'GPP_NT_VUT_REF','GPP_NT_VUT_25','GPP_NT_VUT_50','GPP_NT_VUT_75',
105            'RECO_DT_VUT_REF','RECO_DT_VUT_25','RECO_DT_VUT_50','RECO_DT_VUT_75',
106            'GPP_DT_VUT_REF','GPP_DT_VUT_25','GPP_DT_VUT_50','GPP_DT_VUT_75']
107"""
108
109
110nc_run_ba137b = Dataset(filename_run_ba137b)
111#print nc_run_ba137b.variables
112
113y_run_ba137b = nc_run_ba137b.variables['gpp_gb']
114y_run_ba137b_units = nc_run_ba137b.variables['gpp_gb'].units
115
116x_run_ba137b = nc_run_ba137b.variables['time']
117x_run_ba137b_units = nc_run_ba137b.variables['time'].units
118
119y_run_ba137b = nc_run_ba137b.variables['gpp_gb'][0:,0,0]
120x_run_ba137b = nc_run_ba137b.variables['time'][0:]
121nc_run_ba137b.close
122
123
124nc_run_ba137f = Dataset(filename_run_ba137f)
125#print nc_run_ba137f.variables
126
127y_run_ba137f = nc_run_ba137f.variables['gpp_gb']
128y_run_ba137f_units = nc_run_ba137f.variables['gpp_gb'].units
129
130x_run_ba137f = nc_run_ba137f.variables['time']
131x_run_ba137f_units = nc_run_ba137f.variables['time'].units
132
133y_run_ba137f = nc_run_ba137f.variables['gpp_gb'][0:,0,0]
134x_run_ba137f = nc_run_ba137f.variables['time'][0:]
135print len(x_run_ba137f)
136nc_run_ba137f.close
137
138nc_run_ba137b_mm = Dataset(filename_run_ba137b_mm)
139
140y_run_ba137b_mm = nc_run_ba137b_mm.variables['gpp_gb']
141y_run_ba137b_mm_units = nc_run_ba137b_mm.variables['gpp_gb'].units
142
143x_run_ba137b_mm = nc_run_ba137b_mm.variables['time']
144x_run_ba137b_mm_units = nc_run_ba137b_mm.variables['time'].units
145
146y_run_ba137b_mm = nc_run_ba137b_mm.variables['gpp_gb'][0:,0,0]
147x_run_ba137b_mm = nc_run_ba137b_mm.variables['time'][0:]
148nc_run_ba137b_mm.close
149print nc_run_ba137b_mm.dimensions
150#print nc_run_ba137b_mm.variables['gpp_gb'][0:,0,0]
151print nc_run_ba137b_mm.variables['time'][0:]
152#print(y_run_ba137b_mm*60*60*24*1000)
153
154
155
156nc_run_ba137f_mm =Dataset(filename_run_ba137f_mm)
157
158y_run_ba137f_mm = nc_run_ba137f_mm.variables['gpp_gb']
159y_run_ba137f_mm_units = nc_run_ba137f_mm.variables['gpp_gb'].units
160
161x_run_ba137f_mm = nc_run_ba137f_mm.variables['time']
162x_run_ba137f_mm_units = nc_run_ba137f_mm.variables['time'].units
163
164y_run_ba137f_mm = nc_run_ba137f_mm.variables['gpp_gb'][0:,0,0]
165x_run_ba137f_mm = nc_run_ba137f_mm.variables['time'][0:]
166nc_run_ba137f_mm.close
167#print(nc_run_ba137f_mm.variables['gpp_gb'][0:,0,0])
168print nc_run_ba137f_mm.variables['time'][0:]
169print('shape of Jan_1991-Dec_2012 in months = ', nc_run_ba137f_mm.variables['time'][0:].shape)
170
171
172
173#obs = df.GPP[0:]
174#mod_dynam = y_run_ba137f_mm*60*60*24*1000
175#diff_mod_dynam_obs = mod_dynam-obs
176#diff_mod_dynam_obs_squared = (mod_dynam-obs)**2
177#rmse_dynam = np.sqrt(diff_mod_dynam_obs_squared)
178#print("obs = " + str(["%.8f" % elem for elem in obs]))
179#print("mod_dynam = " + str(["%.8f" % elem for elem in mod_dynam]))
180#print("diff_mod_dynam_obs = " + str(["%.8f" % elem for elem in diff_mod_dynam_obs]))
181#print("diff_mod_dynam_obs_squared = " + str(["%.8f" % elem for elem in diff_mod_dynam_obs_squared]))
182#print("rmse_dynam = " + str(["%.8f" % elem for elem in rmse_dynam]))
183
184#coords.convert_units('celsius')
185
186
187#time = DimCoord(x_run_ba137b_mm, standard_name='time', units='seconds since 2001-06-19 00:00:00')
188#time_coord = make_time_coord.make_time_coord(nc_run_ba137b)
189#time_coord.var_name = 'time'
190
191#cube_137b = Cube((y_run_ba137b_mm)*60*60*24*1000)
192#cube.add_dim_coord(time_coord, (0,))
193
194
195#cube_137b.convert_units('years')
196var_name_constraint = iris.Constraint(cube_func=lambda x: x.var_name == 'gpp_gb')
197ba137b_cubelist = jules.load(filename_run_ba137b_mm, var_name_constraint, conv_to_grid=False, callback=jules_output_cube)
198ba137b_cube = ba137b_cubelist.concatenate_cube()
199ba137b_cube = iris.util.squeeze(ba137b_cube)
200#ba137b_cube *= 60*60*24*1000
201ba137b_cube.units = 'kg/m2/s' 
202print ba137b_cube
203
204gpp_ba137b_cube = ba137b_cube
205gpp_ba137b_cube.units = 'kg/m2/s'
206
207var_name_constraint = iris.Constraint(cube_func=lambda x: x.var_name == 'gpp_gb')
208ba137f_cubelist = jules.load(filename_run_ba137f_mm, var_name_constraint, conv_to_grid=False, callback=jules_output_cube)
209ba137f_cube = ba137f_cubelist.concatenate_cube()
210ba137f_cube = iris.util.squeeze(ba137f_cube)
211#ba137f_cube *= 60*60*24*1000
212ba137f_cube.units = 'kg/m2/s' 
213print ba137f_cube
214
215gpp_ba137f_cube = ba137f_cube
216gpp_ba137f_cube.units = 'kg/m2/s'
217
218"""
219var_name_constraint = iris.Constraint(cube_func=lambda x: x.var_name == 'gpp_gb')
220obs_cor_cubelist = jules.load(filename_run_ba137f_mm, var_name_constraint, conv_to_grid=False, callback=jules_output_cube)
221obs_cor_cube = ba137f_cubelist.concatenate_cube()
222obs_cor_cube = iris.util.squeeze(ba137f_cube)
223obs_cor_cube *= 60*60*24*1000
224obs_cor_cube.units = 'g/m2/day'
225print obs_cor_cube
226"""
227
228dt_format = '%Y%m%d'
229timestamp_convertfunc = lambda x: datetime.datetime.strptime(x, dt_format)
230
231#rename 'TIMESTAMP_START' as 'TIMESTAMP' so make_time_coord recognises it
232#names = [x.replace('TIMESTAMP_START', 'TIMESTAMP') for x in names]
233
234
235filename = '/group_workspaces/jasmin2/jules/pmcguire/fluxnet/kwilliam/suite_data/vn1.0/fluxnet_obs/daily_obs/US_Ha1-energyandcarbon-dailyUTC.dat'
236data = np.genfromtxt(filename, names=['YYYYMMDD_UTC', 'GPP', 'Reco', 'NEE', 'SH', 'LE'], converters = {'YYYYMMDD_UTC':timestamp_convertfunc},
237                     dtype=None, deletechars='', delimiter= ',',
238                     skip_header=0, skip_footer=0,
239                     usemask=True, missing_values=-9999)
240
241# add 12 hours to the datetimes
242for i, dt in enumerate(data['YYYYMMDD_UTC']):
243        data['YYYYMMDD_UTC'][i] = dt + datetime.timedelta(hours=12)
244print data
245time_coord = make_time_coord.make_time_coord(data)
246time_coord.var_name = 'time'
247
248##############################################################################################################  GPP
249
250obs_cube = iris.cube.Cube(data['GPP'], var_name='GPP')
251#obs_cube.data = np.ma.maximum(obs_cube.data, 0.0)
252#obs_cube_mean = obs_cube.aggregated_by(["year", "month"], iris.analysis.MEAN, mdtol=0.03)
253
254obs_cube.add_dim_coord(time_coord, (0,))
255
256obs_cube.data # make sure the data is read in right away, otherwise get in to trouble with biggus
257# (it gets confused about which points should be masked)
258
259# add some extra aux_coords
260iris.coord_categorisation.add_year(obs_cube, 'time')
261iris.coord_categorisation.add_month(obs_cube, 'time')
262iris.coord_categorisation.add_day_of_year(obs_cube, 'time')
263
264obs_cube = iris.util.squeeze(obs_cube)
265obs_cube.data = np.ma.maximum(obs_cube.data, 0.0)
266obs_cube_mean = obs_cube.aggregated_by(["year", "month"], iris.analysis.MEAN, mdtol=0.5)
267GPP_obs_cube_mean = obs_cube_mean
268print('GPP_obs_cube_mean.data = ', GPP_obs_cube_mean.data) 
269
270##############################################################################################################  Reco
271
272obs_cube = iris.cube.Cube(data['Reco'], var_name='Reco')
273#obs_cube.data = np.ma.maximum(obs_cube.data, 0.0)
274#obs_cube_mean = obs_cube.aggregated_by(["year", "month"], iris.analysis.MEAN, mdtol=0.03)
275
276obs_cube.add_dim_coord(time_coord, (0,))
277
278obs_cube.data # make sure the data is read in right away, otherwise get in to trouble with biggus
279# (it gets confused about which points should be masked)
280
281# add some extra aux_coords
282iris.coord_categorisation.add_year(obs_cube, 'time')
283iris.coord_categorisation.add_month(obs_cube, 'time')
284iris.coord_categorisation.add_day_of_year(obs_cube, 'time')
285
286obs_cube = iris.util.squeeze(obs_cube)
287#obs_cube.data = np.ma.maximum(obs_cube.data, 0.0)
288obs_cube_mean = obs_cube.aggregated_by(["year", "month"], iris.analysis.MEAN, mdtol=0.5)
289Reco_obs_cube_mean = obs_cube_mean
290print('Reco_obs_cube_mean.data = ', Reco_obs_cube_mean.data) 
291
292##############################################################################################################  NEE
293
294obs_cube = iris.cube.Cube(data['NEE'], var_name='NEE')
295#obs_cube.data = np.ma.maximum(obs_cube.data, 0.0)
296#obs_cube_mean = obs_cube.aggregated_by(["year", "month"], iris.analysis.MEAN, mdtol=0.03)
297
298obs_cube.add_dim_coord(time_coord, (0,))
299
300obs_cube.data # make sure the data is read in right away, otherwise get in to trouble with biggus
301# (it gets confused about which points should be masked)
302
303# add some extra aux_coords
304iris.coord_categorisation.add_year(obs_cube, 'time')
305iris.coord_categorisation.add_month(obs_cube, 'time')
306iris.coord_categorisation.add_day_of_year(obs_cube, 'time')
307
308obs_cube = iris.util.squeeze(obs_cube)
309#obs_cube.data = np.ma.maximum(obs_cube.data, 0.0)
310obs_cube_mean = obs_cube.aggregated_by(["year", "month"], iris.analysis.MEAN, mdtol=0.5)
311NEE_obs_cube_mean = obs_cube_mean
312print('NEE_obs_cube_mean.data = ', NEE_obs_cube_mean.data) 
313
314##############################################################################################################  SH
315
316obs_cube = iris.cube.Cube(data['SH'], var_name='SH')
317#obs_cube.data = np.ma.maximum(obs_cube.data, 0.0)
318#obs_cube_mean = obs_cube.aggregated_by(["year", "month"], iris.analysis.MEAN, mdtol=0.03)
319
320obs_cube.add_dim_coord(time_coord, (0,))
321
322obs_cube.data # make sure the data is read in right away, otherwise get in to trouble with biggus
323# (it gets confused about which points should be masked)
324
325# add some extra aux_coords
326iris.coord_categorisation.add_year(obs_cube, 'time')
327iris.coord_categorisation.add_month(obs_cube, 'time')
328iris.coord_categorisation.add_day_of_year(obs_cube, 'time')
329
330obs_cube = iris.util.squeeze(obs_cube)
331#obs_cube.data = np.ma.maximum(obs_cube.data, 0.0)
332obs_cube_mean = obs_cube.aggregated_by(["year", "month"], iris.analysis.MEAN, mdtol=0.5)
333SH_obs_cube_mean = obs_cube_mean
334print('SH_obs_cube_mean.data = ', SH_obs_cube_mean.data) 
335
336##############################################################################################################  LE
337"""
338obs_cube = iris.cube.Cube(data['LE'], var_name='LE')
339#obs_cube.data = np.ma.maximum(obs_cube.data, 0.0)
340#obs_cube_mean = obs_cube.aggregated_by(["year", "month"], iris.analysis.MEAN, mdtol=0.03)
341
342obs_cube.add_dim_coord(time_coord, (0,))
343
344obs_cube.data # make sure the data is read in right away, otherwise get in to trouble with biggus
345# (it gets confused about which points should be masked)
346
347# add some extra aux_coords
348iris.coord_categorisation.add_year(obs_cube, 'time')
349iris.coord_categorisation.add_month(obs_cube, 'time')
350iris.coord_categorisation.add_day_of_year(obs_cube, 'time')
351
352obs_cube = iris.util.squeeze(obs_cube)
353#obs_cube.data = np.ma.maximum(obs_cube.data, 0.0)
354obs_cube_mean = obs_cube.aggregated_by(["year", "month"], iris.analysis.MEAN, mdtol=0.5)
355print('LE_obs_cube_mean.data = ', obs_cube_mean.data)
356"""
357
358               
359qplt.plot(GPP_obs_cube_mean[0:], color='red', linewidth=1, linestyle='-', label='Observations')                         
360plt.axhline(0, linestyle=':', color='black')
361plt.xlabel('Time')
362plt.ylabel('Gridbox gross primary production (g/m2/day)')
363plt.legend(loc='upper right')
364plt.title('US_Ha1 (Jan 1991-Dec 2012)')
365#plt.savefig('/home/users/nmc/FLUXNET2015/JULES_12313_30_sites/US_Ha1/plots/US_Ha1_gpp_gb_run_ba137b_mm_v_run_ba137f_mm_v_observations_mm.png')
366#plt.savefig('/home/users/nmc/FLUXNET2015/JULES_12313_30_sites/US_Ha1/plots/US_Ha1_gpp_gb_run_ba137b_mm_v_run_ba137f_mm_v_observations_mm.pdf')
367plt.show()
368qplt.show()
369
370
371qplt.plot(Reco_obs_cube_mean[0:], color='red', linewidth=1, linestyle='-', label='Observations')                               
372plt.axhline(0, linestyle=':', color='black')
373plt.xlabel('Time')
374plt.ylabel('Gridbox total ecosystem respiration (kg/m2/s)')
375plt.legend(loc='upper right')
376plt.title('US_Ha1 (Jan 1991-Dec 2012)')
377#plt.savefig('/home/users/nmc/FLUXNET2015/JULES_12313_30_sites/US_Ha1/plots/US_Ha1_gpp_gb_run_ba137b_mm_v_run_ba137f_mm_v_observations_mm.png')
378#plt.savefig('/home/users/nmc/FLUXNET2015/JULES_12313_30_sites/US_Ha1/plots/US_Ha1_gpp_gb_run_ba137b_mm_v_run_ba137f_mm_v_observations_mm.pdf')
379plt.show()
380
381qplt.plot(NEE_obs_cube_mean[0:], color='red', linewidth=1, linestyle='-', label='Observations')                         
382plt.axhline(0, linestyle=':', color='black')
383plt.xlabel('Time')
384plt.ylabel('Gridbox net ecosystem exchange (g/m2/day)')
385plt.legend(loc='upper right')
386plt.title('US_Ha1 (Jan 1991-Dec 2012)')
387#plt.savefig('/home/users/nmc/FLUXNET2015/JULES_12313_30_sites/US_Ha1/plots/US_Ha1_gpp_gb_run_ba137b_mm_v_run_ba137f_mm_v_observations_mm.png')
388#plt.savefig('/home/users/nmc/FLUXNET2015/JULES_12313_30_sites/US_Ha1/plots/US_Ha1_gpp_gb_run_ba137b_mm_v_run_ba137f_mm_v_observations_mm.pdf')
389plt.show()
390
391qplt.plot(SH_obs_cube_mean[0:], color='red', linewidth=1, linestyle='-', label='Observations')                         
392plt.axhline(0, linestyle=':', color='black')
393plt.xlabel('Time')
394plt.ylabel('Gridbox sensible heat flux (W/m2)')
395plt.legend(loc='upper right')
396plt.title('US_Ha1 (Jan 1991-Dec 2012)')
397#plt.savefig('/home/users/nmc/FLUXNET2015/JULES_12313_30_sites/US_Ha1/plots/US_Ha1_gpp_gb_run_ba137b_mm_v_run_ba137f_mm_v_observations_mm.png')
398#plt.savefig('/home/users/nmc/FLUXNET2015/JULES_12313_30_sites/US_Ha1/plots/US_Ha1_gpp_gb_run_ba137b_mm_v_run_ba137f_mm_v_observations_mm.pdf')
399plt.show()
400
401"""
402qplt.plot(LE_obs_cube_mean[0:], color='red', linewidth=1, linestyle='-', label='Observations')                         
403plt.axhline(0, linestyle=':', color='black')
404plt.xlabel('Time')
405plt.ylabel('Gridbox latent heat flux (W/m2)')
406plt.legend(loc='upper right')
407plt.title('US_Ha1 (Jan 1991-Dec 2012)')
408#plt.savefig('/home/users/nmc/FLUXNET2015/JULES_12313_30_sites/US_Ha1/plots/US_Ha1_gpp_gb_run_ba137b_mm_v_run_ba137f_mm_v_observations_mm.png')
409#plt.savefig('/home/users/nmc/FLUXNET2015/JULES_12313_30_sites/US_Ha1/plots/US_Ha1_gpp_gb_run_ba137b_mm_v_run_ba137f_mm_v_observations_mm.pdf')
410plt.show()
411"""
412
413
414
415
416