Snippets Collections
from netCDF4 import Dataset
import netCDF4 as nc
import numpy as np
import xarray as xr
from wrf import getvar, vinterp, ALL_TIMES
import os
from datetime import datetime
import dask
from dask.distributed import Client

client = Client(n_workers=16)
@dask.delayed
def WriteSurfVariables(inputDir,outDir,file):
    inputnc=Dataset(inputDir+"/"+file)
    varlist=['slp','T2','td2','uvmet10','Q2','PSFC','RAINC','RAINNC','SWDOWN','GLW','HFX',
        'LH','OLR','SNOW','SST','QFX']
    for var in varlist:
        outvar=getvar(inputnc,var,timeidx=ALL_TIMES)
        del outvar.attrs['projection']
        outvar.to_netcdf(outDir+var+"_"+file) 
#n_worker=16
now = datetime.now() 
print(now) 
#results2d = []
results3d = []
inputDir="/blue/dhingmire/Prod_Outputs/ERA5/1981/d2"
outDir="/blue/dhingmire/Processed_data/ERA5/testDir/"
for file in os.listdir(inputDir):
    
    results3d.append(WriteSurfVariables(inputDir,outDir,file))
    #results3d.append(Write3DVariables(inputDir,outDir,file))

#dask.compute(results2d)
dask.compute(results3d)

    
now = datetime.now()
print("now =", now)
import xarray as xr
import numpy as np
from typing import *
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
#import proplot as pplt
import datetime as dt
import warnings
warnings.filterwarnings('ignore')
import os
import sys
import geocat.comp.interpolation
import dask


from dask.diagnostics import ProgressBar
from datetime import datetime

from dask.distributed import Client, LocalCluster
cluster = LocalCluster()
client = Client(cluster)

import warnings
warnings.filterwarnings("ignore")

client

def func(obj, ps,hyam,hybm,orog):
    p0Con=100000.0
    newLevs=np.array([100000., 97500., 95000., 92500., 90000., 87500., 85000., 82500., 80000.,
               77500., 75000., 70000., 65000., 60000., 55000., 50000., 45000., 40000.,
               35000., 30000., 25000., 22500., 20000., 17500., 15000., 12500., 10000.,
               7000., 5000., 3000., 2000., 1000., 700., 500., 300., 200., 100.], dtype=float)
    ta_new=geocat.comp.interpolation.interp_hybrid_to_pressure(obj[:,:,:,:], ps[:,:,:], hyam,
                    hybm, p0=p0Con, new_levels=newLevs,
                    lev_dim='lev', method='linear', extrapolate=True, variable='temperature',
                                                           t_bot=obj[:,0,:,:], phi_sfc=orog)
    return ta_new
OroDir='/blue/dhingmire/scripts/regridCMIP6ToERA5/vertical_interp/'
### constants
p0Con=100000.0
newLevs=np.array([100000., 97500., 95000., 92500., 90000., 87500., 85000., 82500., 80000.,
               77500., 75000., 70000., 65000., 60000., 55000., 50000., 45000., 40000.,
               35000., 30000., 25000., 22500., 20000., 17500., 15000., 12500., 10000.,
               7000., 5000., 3000., 2000., 1000., 700., 500., 300., 200., 100.], dtype=float)
#newLevs
inOroF=xr.open_dataset(OroDir+'orog_fx_CESM2_historical_r11i1p1f1_gn.nc')
orog=inOroF.orog*9.80616
#orog.plot()
InDir='/blue/dhingmire/CMIP6_WRFIn/CESM2/historical/ta/'
OutDir='/blue/dhingmire/CMIP6_WRFIn/CESM2/historical/remapped/ta/'

p0Con=100000.0
newLevs=np.array([100000., 97500., 95000., 92500., 90000., 87500., 85000., 82500., 80000.,
               77500., 75000., 70000., 65000., 60000., 55000., 50000., 45000., 40000.,
               35000., 30000., 25000., 22500., 20000., 17500., 15000., 12500., 10000.,
               7000., 5000., 3000., 2000., 1000., 700., 500., 300., 200., 100.], dtype=float)


outfiles=os.listdir(OutDir)
for file in os.listdir(InDir):
    print(file)

    inTemp=xr.open_dataset(InDir+file)

    ps=inTemp.ps
    hyam=inTemp.a
    hybm=inTemp.b
    ta=inTemp.ta

    ta_ref=geocat.comp.interpolation.interp_hybrid_to_pressure(ta[0,:,:,:], ps[0,:,:], hyam,
                    hybm, p0=p0Con, new_levels=newLevs,
                    lev_dim='lev', method='linear', extrapolate=True, variable='temperature',
                                                           t_bot=ta[0,0,:,:], phi_sfc=orog)
    chunk=10
    for t in np.unique(inTemp.time.dt.year.values):
        year=str(t)
        print(year)
        startDate=year+'-01-01'
        endDate=year+'-12-31'
        outF=OutDir+'ta_6hrPlev_'+year+'.nc'
        outFname='ta_6hrPlev_'+year+'.nc'
        
        if(not outFname in (outfiles)): 
        
            taIn=ta.sel(time=slice(startDate,endDate)).chunk({'lat': -1, 'lon': -1, 'time': chunk, 'lev':-1})
            psIn=ps.sel(time=slice(startDate,endDate)).chunk({'lat': -1, 'lon':-1, 'time': chunk})
            ta_Sample=ta_ref.expand_dims(dim={"time": ta.sel(time=slice(startDate,endDate)).time}).chunk({'lat': -1, 'lon': -1, 'time': chunk, 'plev':-1})


            mapped = taIn.map_blocks(func, args=[ psIn, hyam, hybm, orog],template=ta_Sample)
            ta_=mapped.persist()
            fb=ta_.chunk(time=100)
            fb.to_netcdf(outF)
            del(mapped)
            del(ta_)
            del(fb)
SLP = SLP.assign_coords(longitude=(((SLP.longitude + 180) % 360) -180))
Z = Z.assign_coords(longitude=(((Z.longitude + 180) % 360) -180))
SLP = SLP.sortby(SLP['longitude'])
Z = Z.sortby(Z['longitude'])
star

Thu Feb 08 2024 01:12:53 GMT+0000 (Coordinated Universal Time)

#dask #xarray #parallel
star

Thu Feb 08 2024 01:11:37 GMT+0000 (Coordinated Universal Time)

#dask #xarray #parallel
star

Sun Sep 03 2023 00:40:02 GMT+0000 (Coordinated Universal Time)

#python #xarray

Save snippets that work with our extensions

Available in the Chrome Web Store Get Firefox Add-on Get VS Code extension