I tried your method and it works well. I am also experimenting with the
square root of cos(latitude) as the weighting for each cell. I have
included the script and output for both methods. Thanks again.
let pi = 3.141529
let sum = xycell_size[x=@sum, y=@sum] ! unweighted sum of grid
let xycell_size = XBOX[g=air_subset] * YBOX[g=air_subset] * COS(Y[g=air_subset]*(pi/180))
let xycell_weights = xycell_size / sum ! normalize the weights
let cos_weights = (COS(Y[g=air_subset]*(pi/180)))^0.5 !^0.5 is square root
say Weights around the polar regions
say " XYCELL_SIZE XYCELL_WEI COS_WEIGHTS"
List/noheader/precision=7 xycell_size[i=1, j=1:5], xycell_weights[i=1, j=1:5], cos_weights[j=1:5]
say Weights around the equatorial regions
say " XYCELL_SIZE XYCELL_WEI COS_WEIGHTS"
List/noheader/precision=7 xycell_size[i=1, j=34:40], xycell_weights[i=1, j=34:40], cos_weights[j=34:40]
Weights around the polar regions
XYCELL_SIZE XYCELL_WEI COS_WEIGHTS
90S / 1: 0.000199 0.0000733 0.0056484
87.5S / 2: 0.272814 0.1003226 0.2089265
85S / 3: 0.544911 0.2003813 0.2952723
82.5S / 4: 0.815970 0.3000585 0.3613242
80S / 5: 1.085475 0.3991643 0.4167446
Weights around the equatorial regions
XYCELL_SIZE XYCELL_WEI COS_WEIGHTS
7.5S / 34: 6.196532 0.1421758 0.995713
5S / 35: 6.226218 0.1428569 0.998096
2.5S / 36: 6.244051 0.1432661 0.999524
0 / 37: 6.250000 0.1434026 1.000000
2.5N / 38: 6.244051 0.1432661 0.999524
5N / 39: 6.226218 0.1428569 0.998096
7.5N / 40: 6.196532 0.1421758 0.995713
2009/11/7 Steve Hankin
<Steven.C.Hankin@xxxxxxxx>
Hi Michael,
Have you tried something like
let xycell_size = XBOX[g=air_subset] * YBOX[g=air_subset] *
COS(Y[g=air_subset]*pi/180)
and then normalize these weights by dividing by xycell_size[x=@sum,
y=@sum]?
I'm typing this suggestion off-line ... untested.
- Steve
====================================
Michael Kent wrote:
Hello Everyone
I am using Ferret to pre-process my data that I got
from NCEP. I have already successfully calculated the
climatological anomalies, but I am having trouble
weighting the cell values by their areas.
My current thought has been to use a global variable with
its cell the values set to one. Then find each cell's area in
this region and divide it by the average cell area. This would
then give a global weighting variable which I could then apply
to my data. However my problem is that I can't yet
get the cell_area out of the loop, and ones[i=@din, j=@din]
returns the area for the entire globe. Please can someone help
me with weighting my grid values by the cell areas.
I have included the script, netcdf header information, and
the netcdf file (for 1 month). I am using Ferret v6.3
in Linux Ubuntu Intrepid.
Thanks in advance
Michael
test.jnl:
!==========
use "test.nc"
! Create a variable over the globe where each cell's value is one
let + (0*x[gx=air_subset] + 0*y[gy=air_subset])
let max_lon = 144
let max_lat = 73
REPEAT/j=1:`max_lat` ( ;\
REPEAT/i=1: `max_lon` ( ;\
let cell_area = ones[i=@din, j=@din] ;\
say `cell_area` ;\
) ;\
)
!==========
ncdump -h test.nv:
netcdf test {
dimensions:
LON = 144 ;
LAT = 73 ;
LEVEL = 17 ;
TIME = UNLIMITED ; // (1 currently)
bnds = 2 ;
variables:
double LON(LON) ;
LON:units = "degrees_east" ;
LON:long_name = "Longitude" ;
LON:point_spacing = "even" ;
LON:modulo = 360. ;
LON:axis = "X" ;
double LAT(LAT) ;
LAT:units = "degrees_north" ;
LAT:long_name = "Latitude" ;
LAT:point_spacing = "even" ;
LAT:axis = "Y" ;
double LEVEL(LEVEL) ;
LEVEL:units = "millibar" ;
LEVEL:long_name = "Level" ;
LEVEL:positive = "down" ;
LEVEL:point_spacing = "even" ;
LEVEL:axis = "Z" ;
double TIME(TIME) ;
TIME:units = "hours since 0001-01-01 00:00:00" ;
TIME:long_name = "Time" ;
TIME:time_origin = "01-JAN-0001 00:00:00" ;
TIME:axis = "T" ;
TIME:bounds = "TIME_bnds" ;
double TIME_bnds(TIME, bnds) ;
float AIR_SUBSET(TIME, LEVEL, LAT, LON) ;
AIR_SUBSET:missing_value = -1.e+34f ;
AIR_SUBSET:_FillValue = -1.e+34f ;
AIR_SUBSET:long_name =
"AIR_ANOMALY[T=\"1-JAN-1957\":\"1-JAN-1957\"]" ;
AIR_SUBSET:history = "From air_anomaly" ;
// global attributes:
:history = "FERRET V6.3 3-Nov-09" ;
:Conventions = "CF-1.0" ;
}