# -*- coding: utf-8 -*- # --- # jupyter: # jupytext: # text_representation: # extension: .py # format_name: light # format_version: '1.5' # jupytext_version: 1.11.3 # kernelspec: # display_name: Python 3 # language: python # name: python3 # --- # This python script converts the processed .mat file for native Ceilometer observations into a user-friendly NetCDF file. It also converts all time averaged .mat files into their respecitve netCDF files. These final (four) netCDF files are found in the Bolin Centre Database. Data from the ARTofMELT expedition in Spring 2023. This code is written by Sonja Murto (2024). # + # import modules import sys,glob,os import numpy as np import math import pandas as pd import matplotlib.dates as mdates import xarray as xr import time,datetime import itertools import matplotlib.pyplot as plt import string import scipy.io import matplotlib.ticker as mticker from collections import Counter #metpy functions for thermodynamical variable conversions import metpy.calc as mpcalc from metpy.units import units import warnings warnings.filterwarnings('ignore') # + # define functions def datestring_fromtuple(date=(2001,2,15,12)): ''' Input: a date tuple (YYYY,M,D,h) Output: Date string: 'YYYYMMDD_hh' ''' strdate=str(date[0])+"{:0>2}".format(str(date[1]))+"{:0>2}".format(str(date[2]))+'_'+"{:0>2}".format(str(date[3])) return strdate def datetuple(date): '''date is as datetime object, i.e. from pandas using .to_pydatetime() ''' return date.year,date.month,date.day,date.hour def strtodatetuple(datestr='19900109_12',format_st='%Y%m%d_%H',returntuple=True): if returntuple: return datetuple(datetime.datetime.strptime(datestr, format_st)) else: return datetime.datetime.strptime(datestr, format_st) def returndatetime_fromdoy(doys, year=2023): ''' fucntion to return datetime from DOY dates. Takes the leap year into account if given for a leap year Input: list of DOYs Output: List of Dates (in datetime) corresponding to the given doys Note, returns in microseconds. If second rounded: [pd.to_datetime(T).round('1s') for T in date] ''' jdate_frac=doys date=[] for d in jdate_frac: year,julian = [year,d] date.append(datetime.datetime(year, 1, 1)+datetime.timedelta(days=julian -1)) return date def getList(dict): ''' This function returns a list of keys for a pandas dictionary ''' list = [] for key in dict.keys(): list.append(key) return list def truncate_colormap(cmap, minval=0.0, maxval=1.0, n=100,returncolors=False): ''' This function creates a new colormap from the cmap chosen, taking the limits from minval and maxval, and the number of colors as n. The default is a colormap as LinearSegmentedColormap, which can be used as colormap for imshow, contourf... If you assign returncolors as True, the there will be an array of n amount of colors from the chosen colormap. This can be used in plots when each event has a color (n=50) ''' new_cmap = mcolors.LinearSegmentedColormap.from_list('new_cmap', plt.get_cmap(cmap)(np.linspace(minval, maxval, n))) if returncolors: new_cmap=plt.get_cmap(cmap)(np.linspace(minval, maxval, n)) return new_cmap # - #define directories - change according to your computer settings cwd = os.getcwd() savefigpath = cwd + '/FIGS/Example_figs/' #directory for figures #directory where to find the data (.mat files) load_data = r'/Volumes/My Passport for Mac/MISU_sticka/WORK/viktigapapper/ARTofMELT_2023/AoM_data_mod/WX/cl31_output/' # # Convert ceilometer data from .mat files into .nc files # Create secondary dimensions cloud_layer_levs=np.arange(1,4,1).astype('int32') # 3 levels sc_layer_levs=np.arange(1,6,1).astype('int32') # 5 levels range_levs=np.arange(1,771,1).astype('int32') # 770 levels print(cloud_layer_levs,sc_layer_levs) # ## 30s data # load 30s native data CL_SM_org = scipy.io.loadmat(load_data + 'CL31_ceilometer_ARTofMELT_20230507_20230613_30s_v01.mat',struct_as_record=True) #30s CL_SM_org.keys() # get the data in the mat file Names=CL_SM_org['cl31'].dtype.names ndata = {n: CL_SM_org['cl31'][n][0, 0] for n in Names} print(Names) #create time dimension Time_steps=returndatetime_fromdoy(np.array(list(itertools.chain.from_iterable(ndata['doy'])),dtype=float)) Times_nomicrosec=[pd.to_datetime(T).round('1s') for T in Time_steps] Time_steps_dt64_org=[np.datetime64(t) for t in Time_steps] Time_steps_dt64_org=np.array(Time_steps_dt64_org,dtype='datetime64[ns]') #time dimension # + # convert data into pandas dataframes DF_30s=pd.DataFrame(index=range(len(ndata['doy'])),) DF_30s['doy']=np.array(list(itertools.chain.from_iterable(ndata['doy'])),dtype=float) DF_30s['year']=np.array([Times_nomicrosec[i].year for i in range(len(Time_steps))],dtype=int) DF_30s['month']=np.array([Times_nomicrosec[i].month for i in range(len(Time_steps))],dtype=int) DF_30s['day']=np.array([Times_nomicrosec[i].day for i in range(len(Time_steps))],dtype=int) DF_30s['hour']=np.array([Times_nomicrosec[i].hour for i in range(len(Time_steps))],dtype=int) DF_30s['minute']=np.array([Times_nomicrosec[i].minute for i in range(len(Time_steps))],dtype=int) DF_30s['second']=np.array([Times_nomicrosec[i].second for i in range(len(Time_steps))],dtype=int) DF_30s['cloudcode']=np.array(list(itertools.chain.from_iterable(ndata['cloudcode'])),dtype=int) DF_30s['vert_vis']=np.array(list(itertools.chain.from_iterable(ndata['vert_vis'])),dtype=float) DF_30s['high_sig']=np.array(list(itertools.chain.from_iterable(ndata['high_sig'])),dtype=float) DF_30s.head() # - #check for nans - no nans! DF_30s.iloc[DF_30s[DF_30s.doy.isna()].index] #get a list of columns DF_30s.columns # + # save each variable into Data Arrays, with attribute data #1D variables: dimension time da_doy=xr.DataArray(data=np.array(DF_30s['doy']).astype('float32'),name="day_of_year", dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time",units="1",long_name="Day of Year", description="time as decimal day of year"),) #added as 1D #date and time in separate arrays; microseconds approximated to seconds da_year=xr.DataArray(data=np.array(DF_30s['year']).astype('int32'),name="year",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Year"),) #added as 1D da_month=xr.DataArray(data=np.array(DF_30s['month']).astype('int32'),name="month",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1", long_name="Month"),) #added as 1D da_day=xr.DataArray(data=np.array(DF_30s['day']).astype('int32'),name="day",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Day"),) #added as 1D da_hour=xr.DataArray(data=np.array(DF_30s['hour']).astype('int32'),name="hour",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Hour"),) #added as 1D da_min=xr.DataArray(data=np.array(DF_30s['minute']).astype('int32'),name="minute",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Minute"),) #added as 1D da_sec=xr.DataArray(data=np.array(DF_30s['second']).astype('int32'),name="second",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Second", description="Time averaged to closest second"),) #added as 1D #add cloudcode flag da_cloudcode=xr.DataArray(data=np.array(DF_30s['cloudcode']).astype('int32'),name="flag_cloudcode",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1", long_name='Data flag: Cloudcode', flag_values="-1,0,1,2,3,4", flag_meanings="missing_data\nno_significant_backscatter\none_cloud_base_detected\ntwo_cloud_bases_detected\nthree_cloud_bases_detected\nfull_obscuration", description="Code for number of cloud bases detected; see Readme document for more information"),) da_vertvis=xr.DataArray(data=np.array(DF_30s['vert_vis']).astype('float32'), name="vertical_visibility",dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time", units="m", long_name="Vertical Visibilility", description="Vertical visibility given in case of obscured cloud base (at flag_cloudcode 4), else NaN"),) #added as 1D da_high_sig=xr.DataArray(data=np.array(DF_30s['high_sig']).astype('float32'), name="highest_detected_signal",dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time", units="m", long_name="Highest Signal Detected", description="Highest signal detected given in case of obscured cloud base (at flag_cloudcode 4), else NaN"),) #added as 1D #1D: dimension range_levels da_ceilrange=xr.DataArray(data=np.array(list(itertools.chain.from_iterable(ndata['ceil_range'])),dtype=int).astype('int32'), name="ceilometer_range",dims=["range_levels"],coords=dict(range_levels=range_levs), attrs=dict(type="int32",dimension="range_levels",units="m", long_name="Ceilometer Range", description="Ranges for the ceilometer backscatter profile, including the instrument height",),) #added as 1D #2D variable: dimension time/cloud_layer da_baseht=xr.DataArray(data=np.array(ndata['base_ht'],dtype=float).astype('float32'), name="cloud_base_altitude",dims=["time","cloud_layer"], coords=dict(time=Time_steps_dt64_org,cloud_layer=cloud_layer_levs), attrs=dict(type="float32",dimension="time, cloud_layer", units="m", long_name="Cloud Base Altitude", description="cloud base height of 1-3 cloud layers; NaN if no layer detected. " +\ "Instrument height incorporated."),) #added as 2D #2D variables: dimension time/sky_condition_layer da_scfrac=xr.DataArray(data=np.array(ndata['sc_frac'],dtype=float).astype('float32'), name="sky_condition_cloud_fraction",dims=["time","sky_condition_layer"], coords=dict(time=Time_steps_dt64_org,sky_condition_layer=sc_layer_levs), attrs=dict(type="float32",dimension="time, sky_condition_layer", units="octal", long_name="Sky Condition Cloud Fraction", description="Cloud fraction calculated with the sky condition algorithm. "+\ "0-8 = cloud coverage of up to 5 levels; 9 = obscuration. " +\ "NaN = missing data or no detected layer."),) #added as 2D da_scht=xr.DataArray(data=np.array(ndata['sc_ht'],dtype=float).astype('float32'), name="sky_condition_cloud_altitude",dims=["time","sky_condition_layer"], coords=dict(time=Time_steps_dt64_org,sky_condition_layer=sc_layer_levs), attrs=dict(type="float32",dimension="time, sky_condition_layer", units="m", long_name="Sky Condition Cloud Altitude", description = "Cloud layer height calculated with the sky condition algorithm. "+\ "Cloud layer height given for 1-5 sky condition layers; NaN if no layer detected. "+\ "Vertical visibility is reported as height if obscuration (at sky_condition_cloud_fraction 9). "+\ "Instrument height incorporated."),) #added as 2D #2D variable: dimension time/range da_bsprof=xr.DataArray(data=np.array(ndata['bs_prof'],dtype=float).astype('float32'), name="backscatter_profile",dims=["time","range_levels"], coords=dict(time=Time_steps_dt64_org,range_levels=range_levs), attrs=dict(type="float32",dimension="time, range_levels", units="1 km-1 steradians-1", long_name="Backscatter Profile", description="backscatter coefficient profile"),) #added as 2D # + # quick plot of cloud base heights da_baseht.sel(cloud_layer=1).plot(color='r',label='cloud_layer = 1') da_baseht.sel(cloud_layer=2).plot(color='b',label='cloud_layer = 2') da_baseht.sel(cloud_layer=3).plot(color='y',label='cloud_layer = 3') plt.title('') plt.legend() plt.show() # + #merge all arrays into one ds_all=xr.merge([da_doy,da_year,da_month,da_day,da_hour,da_min,da_sec, da_ceilrange,da_cloudcode,da_vertvis,da_high_sig, da_baseht,da_scfrac,da_scht,da_bsprof]) len(ds_all) # - plt.scatter(ds_all.time,ds_all.vertical_visibility,c=ds_all.flag_cloudcode) plt.colorbar() #get the time ranges: start Times_nomicrosec[0] #get the time ranges: end Times_nomicrosec[-1] # + # add global attribute data. Note that the geospatial_bounds are taken from the gps data at the weather station sensor # (Weather station data: start: 2023-05-01 7:00:05, end = 2023-06-13 16:24:35) ds_all.attrs = {"Conventions" :"CF-1.8", "source" : "Ceilometer", "instrument_model" : "Vaisala Ceilometer CL31", "creator_name" : "Sonja Murto", "creator_email" : "sonja.murto@misu.su.se", "creator_url" : "https://orcid.org/0000-0002-4966-9077", "institution" : "Stockholm University", "processing_software" : "Matlab (for creating the matlab file) and a jupyter notebook script (netCDF)", "sampling_interval": "30s", "product_version" : "v01", "last_revised_date" : "2024-05-31T15:00:00", "project" : "ARTofMELT", "project_principal_investigator" : "Michael Tjernström", "project_principal_investigator_email" : "michaelt@misu.su.se", "project_principal_investigator_url" : "https://orcid.org/0000-0002-6908-7410", "acknowledgement" : " Knut och Alice Wallenbergs Stiftelse, Grant 2016-0024", "platform" : "Swedish Icebreaker Oden", "platform_type" : "On Oden's 7th deck above the bridge", "deployment_mode" : "ship", "title" : "Ceilometer cloud base height, vertical visibility and backscatter profiles", "feature_type" : "time series", "time_coverage_start" : "2023-05-07T00:00:22", "time_coverage_end" : "2023-06-13T16:24:22", "geospatial_bounds" : "80.52392166666667N, -3.8737749999999997E, 78.04355166666666N, 15.660881666666667E", "platform_altitude" : "Located at approximately 25 m a.s.l", "location_keywords": "Oden, Arctic Ocean, Fram Strait, atmosphere, on the ship", "comments" : "This file consists of ceilometer data " +\ "measured with the Vaisala Ceilometer CL31 that was located on the 7th deck, "+\ "above the bridge (at approximately 25m)." + \ "The sky condition measurements are time averages to represent an area average. " + \ "The vertical resolution is 10m * 770, but the measurement height (25m) is included in the backscatter profile ranges, " + \ "as well as in the cloud base heights (cloud_base_altitude and sky_condition_cloud_altitude). " + \ "Geospatial bounds are taken from the gps location of the weather station dataset located on Oden. " +\ "Time variables month, day, hour, minute and second are approximated to the nearest second. " +\ "Data produced by Sonja Murto. See the document - Readme_CL.txt - for more details."} # - ds_all # Save to netCDF ds_all.to_netcdf(load_data + 'CL31_ceilometer_ARTofMELT_20230507_20230613_30s_v01.nc') # ## Create netCDF from 1 min average files - remove times with no data points CL_SM_avg = scipy.io.loadmat(load_data + 'CL31_ceilometer_ARTofMELT_20230507_20230613_1min_v01.mat',struct_as_record=True) CL_SM_avg.keys() # get the data in the mat file Names=CL_SM_avg['cl31_av'].dtype.names ndata = {n: CL_SM_avg['cl31_av'][n][0, 0] for n in Names} print(Names) # + # create pandas dataframe from the data # 1D variables DF_1min=pd.DataFrame(index=range(len(ndata['doy'])),) DF_1min['doy']=np.array(list(itertools.chain.from_iterable(ndata['doy'])),dtype=float) DF_1min['cloudcode']=np.array(list(itertools.chain.from_iterable(ndata['cloudcode'])),dtype=float) DF_1min['vert_vis']=np.array(list(itertools.chain.from_iterable(ndata['vert_vis'])),dtype=float) DF_1min['high_sig']=np.array(list(itertools.chain.from_iterable(ndata['high_sig'])),dtype=float) #2D cloudlayer DF_1min_cloudbh=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['base_ht'],dtype=float)) #2D sc_layer_ht DF_1min_scht=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['sc_ht'],dtype=float)) #2D sc_layer_frac DF_1min_scfrac=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['sc_frac'],dtype=float)) #2D ranges DF_1min_range=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['bs_prof'],dtype=float)) # - len(DF_1min) #check for NaNs selection = DF_1min.iloc[DF_1min[~DF_1min.doy.isna()].index].index.values # size with no nans idx_nan = DF_1min.iloc[DF_1min[DF_1min.doy.isna()].index].index.values # length of nan times print(len(selection),len(idx_nan)) #select only no-nan times; for all 5 pandas dataframes DF_1min=DF_1min.iloc[selection].reset_index(drop=True) DF_1min_cloudbh=DF_1min_cloudbh.iloc[selection].reset_index(drop=True) DF_1min_scht=DF_1min_scht.iloc[selection].reset_index(drop=True) DF_1min_scfrac=DF_1min_scfrac.iloc[selection].reset_index(drop=True) DF_1min_range=DF_1min_range.iloc[selection].reset_index(drop=True) #check for right length print(len(DF_1min),len(DF_1min_cloudbh),len(DF_1min_scfrac),len(DF_1min_scht),len(DF_1min_range)) DF_1min.iloc[DF_1min[DF_1min.doy.isna()].index] #check again for NaNs - empty! # set time dimension Time_steps=returndatetime_fromdoy(np.array(DF_1min.doy.values,dtype=float)) Times_nomicrosec=[pd.to_datetime(T).round('1s') for T in Time_steps] Time_steps_dt64_org=[np.datetime64(t) for t in Time_steps] Time_steps_dt64_org=np.array(Time_steps_dt64_org,dtype='datetime64[ns]') # add date values DF_1min['year']=np.array([Times_nomicrosec[i].year for i in range(len(Time_steps))],dtype=int) DF_1min['month']=np.array([Times_nomicrosec[i].month for i in range(len(Time_steps))],dtype=int) DF_1min['day']=np.array([Times_nomicrosec[i].day for i in range(len(Time_steps))],dtype=int) DF_1min['hour']=np.array([Times_nomicrosec[i].hour for i in range(len(Time_steps))],dtype=int) DF_1min['minute']=np.array([Times_nomicrosec[i].minute for i in range(len(Time_steps))],dtype=int) DF_1min['second']=np.array([Times_nomicrosec[i].second for i in range(len(Time_steps))],dtype=int) # + # create xr DataArrays #1D: dimension time da_doy=xr.DataArray(data=np.array(DF_1min['doy']).astype('float32'),name="day_of_year", dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time",units="1",long_name="Day of Year", description="time as decimal day of year"),) #added as 1D #date and time in separate arrays; microseconds approximated to seconds da_year=xr.DataArray(data=np.array(DF_1min['year']).astype('int32'),name="year",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Year"),) #added as 1D da_month=xr.DataArray(data=np.array(DF_1min['month']).astype('int32'),name="month",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1", long_name="Month"),) #added as 1D da_day=xr.DataArray(data=np.array(DF_1min['day']).astype('int32'),name="day",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Day"),) #added as 1D da_hour=xr.DataArray(data=np.array(DF_1min['hour']).astype('int32'),name="hour",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Hour"),) #added as 1D da_min=xr.DataArray(data=np.array(DF_1min['minute']).astype('int32'),name="minute",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Minute"),) #added as 1D da_sec=xr.DataArray(data=np.array(DF_1min['second']).astype('int32'),name="second",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Second", description="Time averaged to closest second"),) #added as 1D #add cloudcode flag da_cloudcode=xr.DataArray(data=np.array(DF_1min['cloudcode']).astype('int32'),name="flag_cloudcode",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1", long_name='Data flag: Cloudcode', flag_values="-1,0,1,2,3,4", flag_meanings="missing_data\nno_significant_backscatter\none_cloud_base_detected\ntwo_cloud_bases_detected\nthree_cloud_bases_detected\nfull_obscuration", description="Code for number of cloud bases detected; see Readme document for more information"),) da_vertvis=xr.DataArray(data=np.array(DF_1min['vert_vis']).astype('float32'), name="vertical_visibility",dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time", units="m", long_name="Vertical Visibilility", description="Vertical visibility given in case of obscured cloud base (at flag_cloudcode 4), else NaN"),) #added as 1D da_high_sig=xr.DataArray(data=np.array(DF_1min['high_sig']).astype('float32'), name="highest_detected_signal",dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time", units="m", long_name="Highest Signal Detected", description="Highest signal detected given in case of obscured cloud base (at flag_cloudcode 4), else NaN"),) #added as 1D #1D: dimension range_levels da_ceilrange=xr.DataArray(data=np.array(list(itertools.chain.from_iterable(ndata['ceil_range'])),dtype=int).astype('int32'), name="ceilometer_range",dims=["range_levels"],coords=dict(range_levels=range_levs), attrs=dict(type="int32",dimension="range_levels",units="m", long_name="Ceilometer Range", description="Ranges for the ceilometer backscatter profile, including the instrument height",),) #added as 1D #2D: dimension time/cloud_layer da_baseht=xr.DataArray(data=np.array(DF_1min_cloudbh,dtype=float).astype('float32'), name="cloud_base_altitude",dims=["time","cloud_layer"], coords=dict(time=Time_steps_dt64_org,cloud_layer=cloud_layer_levs), attrs=dict(type="float32",dimension="time, cloud_layer", units="m", long_name="Cloud Base Altitude", description="cloud base height of 1-3 cloud layers; NaN if no layer detected. " +\ "Instrument height incorporated."),) #added as 2D #2D: dimension time/sky_condition_layer da_scfrac=xr.DataArray(data=np.array(DF_1min_scfrac,dtype=float).astype('float32'), name="sky_condition_cloud_fraction",dims=["time","sky_condition_layer"], coords=dict(time=Time_steps_dt64_org,sky_condition_layer=sc_layer_levs), attrs=dict(type="float32",dimension="time, sky_condition_layer", units="octal", long_name="Sky Condition Cloud Fraction", description="Cloud fraction calculated with the sky condition algorithm. "+\ "0-8 = cloud coverage of up to 5 levels; 9 = obscuration. " +\ "NaN = missing data or no detected layer."),) #added as 2D da_scht=xr.DataArray(data=np.array(DF_1min_scht,dtype=float).astype('float32'), name="sky_condition_cloud_altitude",dims=["time","sky_condition_layer"], coords=dict(time=Time_steps_dt64_org,sky_condition_layer=sc_layer_levs), attrs=dict(type="float32",dimension="time, sky_condition_layer", units="m", long_name="Sky Condition Cloud Altitude", description = "Cloud layer height calculated with the sky condition algorithm. "+\ "Cloud layer height given for 1-5 sky condition layers; NaN if no layer detected. "+\ "Vertical visibility is reported as height if obscuration (at sky_condition_cloud_fraction 9). "+\ "Instrument height incorporated."),) #added as 2D #2D: dimension time/range da_bsprof=xr.DataArray(data=np.array(DF_1min_range,dtype=float).astype('float32'), name="backscatter_profile",dims=["time","range_levels"], coords=dict(time=Time_steps_dt64_org,range_levels=range_levs), attrs=dict(type="float32",dimension="time, range_levels", units="1 km-1 steradians-1", long_name="Backscatter Profile", description="backscatter coefficient profile"),) #added as 2D # + #merge all arrays into one ds_all=xr.merge([da_doy,da_year,da_month,da_day,da_hour,da_min,da_sec, da_ceilrange,da_cloudcode,da_vertvis,da_high_sig, da_baseht,da_scfrac,da_scht,da_bsprof]) len(ds_all) # - #get time range - start Times_nomicrosec[0] #get time range - end Times_nomicrosec[-1] # + # modify attributes (start/end time and text in comments); # note again geospatial bounds from the weather station data ds_all.attrs = {"Conventions" :"CF-1.8", "source" : "Ceilometer", "instrument_model" : "Vaisala Ceilometer CL31", "creator_name" : "Sonja Murto", "creator_email" : "sonja.murto@misu.su.se", "creator_url" : "https://orcid.org/0000-0002-4966-9077", "institution" : "Stockholm University", "processing_software" : "Matlab (for creating the matlab file) and a jupyter notebook script (netCDF)", "sampling_interval": "original 30s; here 1min averages", "product_version" : "v01", "last_revised_date" : "2024-05-31T15:00:00", "project" : "ARTofMELT", "project_principal_investigator" : "Michael Tjernström", "project_principal_investigator_email" : "michaelt@misu.su.se", "project_principal_investigator_url" : "https://orcid.org/0000-0002-6908-7410", "acknowledgement" : " Knut och Alice Wallenbergs Stiftelse, Grant 2016-0024", "platform" : "Swedish Icebreaker Oden", "platform_type" : "On Oden's 7th deck above the bridge", "deployment_mode" : "ship", "title" : "Ceilometer cloud base height, vertical visibility and backscatter profiles", "feature_type" : "time series", "time_coverage_start" : "2023-05-07T00:00:52", "time_coverage_end" : "2023-06-13T16:23:37", "geospatial_bounds" : "80.52392166666667N, -3.8737749999999997E, 78.04355166666666N, 15.660881666666667E", "platform_altitude" : "Located at approximately 25 m a.s.l", "location_keywords": "Oden, Arctic Ocean, Fram Strait, atmosphere, on the ship", "comments" : "This file consists of 1 min averages of ceilometer data " +\ "measured with the Vaisala Ceilometer CL31 that was located on the 7th deck, "+\ "above the bridge (at approximately 25m)." + \ "The sky condition measurements are time averages to represent an area average. " + \ "The vertical resolution is 10m * 770, but the measurement height (25m) is included in the backscatter profile ranges, " + \ "as well as in the cloud base heights (cloud_base_altitude and sky_condition_cloud_altitude). " + \ "Geospatial bounds are taken from the gps location of the weather station dataset located on Oden. " +\ "Time variables month, day, hour, minute and second are approximated to the nearest second. " +\ "Data produced by Sonja Murto. See the document - Readme_CL.txt - for more details."} # - ds_all #save to netCDF ds_all.to_netcdf(load_data + 'CL31_ceilometer_ARTofMELT_20230507_20230613_1min_v01.nc') # + #test plot ds_all.sel(cloud_layer=1).cloud_base_altitude.plot(color='r',label='cloud_layer = 1') ds_all.sel(cloud_layer=2).cloud_base_altitude.plot(color='b',label='cloud_layer = 2') ds_all.sel(cloud_layer=3).cloud_base_altitude.plot(color='y',label='cloud_layer = 3') plt.title('') plt.legend() plt.show() # + ds_all.sel(sky_condition_layer=1).sky_condition_cloud_altitude.plot(color='r',label='sky_condition_layer = 1') ds_all.sel(sky_condition_layer=2).sky_condition_cloud_altitude.plot(color='b',label='sky_condition_layer = 2') ds_all.sel(sky_condition_layer=3).sky_condition_cloud_altitude.plot(color='y',label='sky_condition_layer = 3') ds_all.sel(sky_condition_layer=4).sky_condition_cloud_altitude.plot(color='g',label='sky_condition_layer = 4') ds_all.sel(sky_condition_layer=5).sky_condition_cloud_altitude.plot(color='k',label='sky_condition_layer = 5') plt.title('') plt.legend() plt.show() # - # ## Create netCDF from 10 min average files - remove times with no data points CL_SM_avg = scipy.io.loadmat(load_data + 'CL31_ceilometer_ARTofMELT_20230507_20230613_10min_v01.mat',struct_as_record=True) CL_SM_avg.keys() # get the data in the mat file Names=CL_SM_avg['cl31_av'].dtype.names ndata = {n: CL_SM_avg['cl31_av'][n][0, 0] for n in Names} print(Names) # + # create pandas dataframe from the data # 1D variables DF_10min=pd.DataFrame(index=range(len(ndata['doy'])),) DF_10min['doy']=np.array(list(itertools.chain.from_iterable(ndata['doy'])),dtype=float) DF_10min['cloudcode']=np.array(list(itertools.chain.from_iterable(ndata['cloudcode'])),dtype=float) DF_10min['vert_vis']=np.array(list(itertools.chain.from_iterable(ndata['vert_vis'])),dtype=float) DF_10min['high_sig']=np.array(list(itertools.chain.from_iterable(ndata['high_sig'])),dtype=float) #2D cloudlayer DF_10min_cloudbh=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['base_ht'],dtype=float)) #2D sc_layer_ht DF_10min_scht=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['sc_ht'],dtype=float)) #2D sc_layer_frac DF_10min_scfrac=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['sc_frac'],dtype=float)) #2D ranges DF_10min_range=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['bs_prof'],dtype=float)) # - len(DF_10min) #check for NaNs - no nans in 10min avg data selection = DF_10min.iloc[DF_10min[~DF_10min.doy.isna()].index].index.values # size with no nans idx_nan = DF_10min.iloc[DF_10min[DF_10min.doy.isna()].index].index.values # length of nan times print(len(selection),len(idx_nan)) #select only no-nan times; for all 5 pandas dataframes DF_10min=DF_10min.iloc[selection].reset_index(drop=True) DF_10min_cloudbh=DF_10min_cloudbh.iloc[selection].reset_index(drop=True) DF_10min_scht=DF_10min_scht.iloc[selection].reset_index(drop=True) DF_10min_scfrac=DF_10min_scfrac.iloc[selection].reset_index(drop=True) DF_10min_range=DF_10min_range.iloc[selection].reset_index(drop=True) #check for right length (same as in the beginning) print(len(DF_10min),len(DF_10min_cloudbh),len(DF_10min_scfrac),len(DF_10min_scht),len(DF_10min_range)) DF_10min.iloc[DF_10min[DF_10min.doy.isna()].index] #check again for NaNs - empty! # set time dimension Time_steps=returndatetime_fromdoy(np.array(DF_10min.doy.values,dtype=float)) Times_nomicrosec=[pd.to_datetime(T).round('1s') for T in Time_steps] Time_steps_dt64_org=[np.datetime64(t) for t in Time_steps] Time_steps_dt64_org=np.array(Time_steps_dt64_org,dtype='datetime64[ns]') # add date values DF_10min['year']=np.array([Times_nomicrosec[i].year for i in range(len(Time_steps))],dtype=int) DF_10min['month']=np.array([Times_nomicrosec[i].month for i in range(len(Time_steps))],dtype=int) DF_10min['day']=np.array([Times_nomicrosec[i].day for i in range(len(Time_steps))],dtype=int) DF_10min['hour']=np.array([Times_nomicrosec[i].hour for i in range(len(Time_steps))],dtype=int) DF_10min['minute']=np.array([Times_nomicrosec[i].minute for i in range(len(Time_steps))],dtype=int) DF_10min['second']=np.array([Times_nomicrosec[i].second for i in range(len(Time_steps))],dtype=int) # + # create xr DataArrays #1D: dimension time da_doy=xr.DataArray(data=np.array(DF_10min['doy']).astype('float32'),name="day_of_year", dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time",units="1",long_name="Day of Year", description="time as decimal day of year"),) #added as 1D #date and time in separate arrays; microseconds approximated to seconds da_year=xr.DataArray(data=np.array(DF_10min['year']).astype('int32'),name="year",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Year"),) #added as 1D da_month=xr.DataArray(data=np.array(DF_10min['month']).astype('int32'),name="month",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1", long_name="Month"),) #added as 1D da_day=xr.DataArray(data=np.array(DF_10min['day']).astype('int32'),name="day",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Day"),) #added as 1D da_hour=xr.DataArray(data=np.array(DF_10min['hour']).astype('int32'),name="hour",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Hour"),) #added as 1D da_min=xr.DataArray(data=np.array(DF_10min['minute']).astype('int32'),name="minute",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Minute"),) #added as 1D da_sec=xr.DataArray(data=np.array(DF_10min['second']).astype('int32'),name="second",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Second", description="Time averaged to closest second"),) #added as 1D #add cloudcode flag da_cloudcode=xr.DataArray(data=np.array(DF_10min['cloudcode']).astype('int32'),name="flag_cloudcode",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1", long_name='Data flag: Cloudcode', flag_values="-1,0,1,2,3,4", flag_meanings="missing_data\nno_significant_backscatter\none_cloud_base_detected\ntwo_cloud_bases_detected\nthree_cloud_bases_detected\nfull_obscuration", description="Code for number of cloud bases detected; see Readme document for more information"),) da_vertvis=xr.DataArray(data=np.array(DF_10min['vert_vis']).astype('float32'), name="vertical_visibility",dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time", units="m", long_name="Vertical Visibilility", description="Vertical visibility given in case of obscured cloud base (at flag_cloudcode 4), else NaN"),) #added as 1D da_high_sig=xr.DataArray(data=np.array(DF_10min['high_sig']).astype('float32'), name="highest_detected_signal",dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time", units="m", long_name="Highest Signal Detected", description="Highest signal detected given in case of obscured cloud base (at flag_cloudcode 4), else NaN"),) #added as 1D #1D: dimension range_levels da_ceilrange=xr.DataArray(data=np.array(list(itertools.chain.from_iterable(ndata['ceil_range'])),dtype=int).astype('int32'), name="ceilometer_range",dims=["range_levels"],coords=dict(range_levels=range_levs), attrs=dict(type="int32",dimension="range_levels",units="m", long_name="Ceilometer Range", description="Ranges for the ceilometer backscatter profile, including the instrument height",),) #added as 1D #2D: dimension time/cloud_layer da_baseht=xr.DataArray(data=np.array(DF_10min_cloudbh,dtype=float).astype('float32'), name="cloud_base_altitude",dims=["time","cloud_layer"], coords=dict(time=Time_steps_dt64_org,cloud_layer=cloud_layer_levs), attrs=dict(type="float32",dimension="time, cloud_layer", units="m", long_name="Cloud Base Altitude", description="cloud base height of 1-3 cloud layers; NaN if no layer detected. " +\ "Instrument height incorporated."),) #added as 2D #2D: dimension time/sky_condition_layer da_scfrac=xr.DataArray(data=np.array(DF_10min_scfrac,dtype=float).astype('float32'), name="sky_condition_cloud_fraction",dims=["time","sky_condition_layer"], coords=dict(time=Time_steps_dt64_org,sky_condition_layer=sc_layer_levs), attrs=dict(type="float32",dimension="time, sky_condition_layer", units="octal", long_name="Sky Condition Cloud Fraction", description="Cloud fraction calculated with the sky condition algorithm. "+\ "0-8 = cloud coverage of up to 5 levels; 9 = obscuration. " +\ "NaN = missing data or no detected layer."),) #added as 2D da_scht=xr.DataArray(data=np.array(DF_10min_scht,dtype=float).astype('float32'), name="sky_condition_cloud_altitude",dims=["time","sky_condition_layer"], coords=dict(time=Time_steps_dt64_org,sky_condition_layer=sc_layer_levs), attrs=dict(type="float32",dimension="time, sky_condition_layer", units="m", long_name="Sky Condition Cloud Altitude", description = "Cloud layer height calculated with the sky condition algorithm. "+\ "Cloud layer height given for 1-5 sky condition layers; NaN if no layer detected. "+\ "Vertical visibility is reported as height if obscuration (at sky_condition_cloud_fraction 9). "+\ "Instrument height incorporated."),) #added as 2D #2D: dimension time/range da_bsprof=xr.DataArray(data=np.array(DF_10min_range,dtype=float).astype('float32'), name="backscatter_profile",dims=["time","range_levels"], coords=dict(time=Time_steps_dt64_org,range_levels=range_levs), attrs=dict(type="float32",dimension="time, range_levels", units="1 km-1 steradians-1", long_name="Backscatter Profile", description="backscatter coefficient profile"),) #added as 2D # + #merge all arrays into one ds_all=xr.merge([da_doy,da_year,da_month,da_day,da_hour,da_min,da_sec, da_ceilrange,da_cloudcode,da_vertvis,da_high_sig, da_baseht,da_scfrac,da_scht,da_bsprof]) len(ds_all) # - #get time range - start Times_nomicrosec[0] #get time range - end Times_nomicrosec[-1] # + # modify attributes (start/end time and text in comments); # note again geospatial bounds from the weather station data ds_all.attrs = {"Conventions" :"CF-1.8", "source" : "Ceilometer", "instrument_model" : "Vaisala Ceilometer CL31", "creator_name" : "Sonja Murto", "creator_email" : "sonja.murto@misu.su.se", "creator_url" : "https://orcid.org/0000-0002-4966-9077", "institution" : "Stockholm University", "processing_software" : "Matlab (for creating the matlab file) and a jupyter notebook script (netCDF)", "sampling_interval": "original 30s; here 10min averages", "product_version" : "v01", "last_revised_date" : "2024-05-31T15:00:00", "project" : "ARTofMELT", "project_principal_investigator" : "Michael Tjernström", "project_principal_investigator_email" : "michaelt@misu.su.se", "project_principal_investigator_url" : "https://orcid.org/0000-0002-6908-7410", "acknowledgement" : " Knut och Alice Wallenbergs Stiftelse, Grant 2016-0024", "platform" : "Swedish Icebreaker Oden", "platform_type" : "On Oden's 7th deck above the bridge", "deployment_mode" : "ship", "title" : "Ceilometer cloud base height, vertical visibility and backscatter profiles", "feature_type" : "time series", "time_coverage_start" : "2023-05-07T00:05:22", "time_coverage_end" : "2023-06-13T16:15:07", "geospatial_bounds" : "80.52392166666667N, -3.8737749999999997E, 78.04355166666666N, 15.660881666666667E", "platform_altitude" : "Located at approximately 25 m a.s.l", "location_keywords": "Oden, Arctic Ocean, Fram Strait, atmosphere, on the ship", "comments" : "This file consists of 10 min averages of ceilometer data " +\ "measured with the Vaisala Ceilometer CL31 that was located on the 7th deck, "+\ "above the bridge (at approximately 25m)." + \ "The sky condition measurements are time averages to represent an area average. " + \ "The vertical resolution is 10m * 770, but the measurement height (25m) is included in the backscatter profile ranges, " + \ "as well as in the cloud base heights (cloud_base_altitude and sky_condition_cloud_altitude). " + \ "Geospatial bounds are taken from the gps location of the weather station dataset located on Oden. " +\ "Time variables month, day, hour, minute and second are approximated to the nearest second. " +\ "Data produced by Sonja Murto. See the document - Readme_CL.txt - for more details."} # - ds_all #save to netCDF ds_all.to_netcdf(load_data + 'CL31_ceilometer_ARTofMELT_20230507_20230613_10min_v01.nc') # + #test plot ds_all.sel(cloud_layer=1).cloud_base_altitude.plot(color='r',label='cloud_layer = 1') ds_all.sel(cloud_layer=2).cloud_base_altitude.plot(color='b',label='cloud_layer = 2') ds_all.sel(cloud_layer=3).cloud_base_altitude.plot(color='y',label='cloud_layer = 3') plt.title('') plt.legend() plt.show() # - # ## Create netCDF from 20 min average files - remove times with no data points CL_SM_avg = scipy.io.loadmat(load_data + 'CL31_ceilometer_ARTofMELT_20230507_20230613_20min_v01.mat',struct_as_record=True) CL_SM_avg.keys() # get the data in the mat file Names=CL_SM_avg['cl31_av'].dtype.names ndata = {n: CL_SM_avg['cl31_av'][n][0, 0] for n in Names} print(Names) # + # create pandas dataframe from the data # 1D variables DF_20min=pd.DataFrame(index=range(len(ndata['doy'])),) DF_20min['doy']=np.array(list(itertools.chain.from_iterable(ndata['doy'])),dtype=float) DF_20min['cloudcode']=np.array(list(itertools.chain.from_iterable(ndata['cloudcode'])),dtype=float) DF_20min['vert_vis']=np.array(list(itertools.chain.from_iterable(ndata['vert_vis'])),dtype=float) DF_20min['high_sig']=np.array(list(itertools.chain.from_iterable(ndata['high_sig'])),dtype=float) #2D cloudlayer DF_20min_cloudbh=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['base_ht'],dtype=float)) #2D sc_layer_ht DF_20min_scht=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['sc_ht'],dtype=float)) #2D sc_layer_frac DF_20min_scfrac=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['sc_frac'],dtype=float)) #2D ranges DF_20min_range=pd.DataFrame(index=range(len(ndata['doy'])),data=np.array(ndata['bs_prof'],dtype=float)) # - len(DF_20min) #check for NaNs - no nans in 20min avg data selection = DF_20min.iloc[DF_20min[~DF_20min.doy.isna()].index].index.values # size with no nans idx_nan = DF_20min.iloc[DF_20min[DF_20min.doy.isna()].index].index.values # length of nan times print(len(selection),len(idx_nan)) #select only no-nan times; for all 5 pandas dataframes DF_20min=DF_20min.iloc[selection].reset_index(drop=True) DF_20min_cloudbh=DF_20min_cloudbh.iloc[selection].reset_index(drop=True) DF_20min_scht=DF_20min_scht.iloc[selection].reset_index(drop=True) DF_20min_scfrac=DF_20min_scfrac.iloc[selection].reset_index(drop=True) DF_20min_range=DF_20min_range.iloc[selection].reset_index(drop=True) #check for right length (same as in the beginning) print(len(DF_20min),len(DF_20min_cloudbh),len(DF_20min_scfrac),len(DF_20min_scht),len(DF_20min_range)) DF_20min.iloc[DF_20min[DF_20min.doy.isna()].index] #check again for NaNs - empty! # set time dimension Time_steps=returndatetime_fromdoy(np.array(DF_20min.doy.values,dtype=float)) Times_nomicrosec=[pd.to_datetime(T).round('1s') for T in Time_steps] Time_steps_dt64_org=[np.datetime64(t) for t in Time_steps] Time_steps_dt64_org=np.array(Time_steps_dt64_org,dtype='datetime64[ns]') # add date values DF_20min['year']=np.array([Times_nomicrosec[i].year for i in range(len(Time_steps))],dtype=int) DF_20min['month']=np.array([Times_nomicrosec[i].month for i in range(len(Time_steps))],dtype=int) DF_20min['day']=np.array([Times_nomicrosec[i].day for i in range(len(Time_steps))],dtype=int) DF_20min['hour']=np.array([Times_nomicrosec[i].hour for i in range(len(Time_steps))],dtype=int) DF_20min['minute']=np.array([Times_nomicrosec[i].minute for i in range(len(Time_steps))],dtype=int) DF_20min['second']=np.array([Times_nomicrosec[i].second for i in range(len(Time_steps))],dtype=int) # + # create xr DataArrays #1D: dimension time da_doy=xr.DataArray(data=np.array(DF_20min['doy']).astype('float32'),name="day_of_year", dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time",units="1",long_name="Day of Year", description="time as decimal day of year"),) #added as 1D #date and time in separate arrays; microseconds approximated to seconds da_year=xr.DataArray(data=np.array(DF_20min['year']).astype('int32'),name="year",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Year"),) #added as 1D da_month=xr.DataArray(data=np.array(DF_20min['month']).astype('int32'),name="month",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1", long_name="Month"),) #added as 1D da_day=xr.DataArray(data=np.array(DF_20min['day']).astype('int32'),name="day",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Day"),) #added as 1D da_hour=xr.DataArray(data=np.array(DF_20min['hour']).astype('int32'),name="hour",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Hour"),) #added as 1D da_min=xr.DataArray(data=np.array(DF_20min['minute']).astype('int32'),name="minute",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Minute"),) #added as 1D da_sec=xr.DataArray(data=np.array(DF_20min['second']).astype('int32'),name="second",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1",long_name="Second", description="Time averaged to closest second"),) #added as 1D #add cloudcode flag da_cloudcode=xr.DataArray(data=np.array(DF_20min['cloudcode']).astype('int32'),name="flag_cloudcode",dims=["time"], coords=dict(time=Time_steps_dt64_org), attrs=dict(type="int32",dimension="time",units="1", long_name='Data flag: Cloudcode', flag_values="-1,0,1,2,3,4", flag_meanings="missing_data\nno_significant_backscatter\none_cloud_base_detected\ntwo_cloud_bases_detected\nthree_cloud_bases_detected\nfull_obscuration", description="Code for number of cloud bases detected; see Readme document for more information"),) da_vertvis=xr.DataArray(data=np.array(DF_20min['vert_vis']).astype('float32'), name="vertical_visibility",dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time", units="m", long_name="Vertical Visibilility", description="Vertical visibility given in case of obscured cloud base (at flag_cloudcode 4), else NaN"),) #added as 1D da_high_sig=xr.DataArray(data=np.array(DF_20min['high_sig']).astype('float32'), name="highest_detected_signal",dims=["time"],coords=dict(time=Time_steps_dt64_org), attrs=dict(type="float32",dimension="time", units="m", long_name="Highest Signal Detected", description="Highest signal detected given in case of obscured cloud base (at flag_cloudcode 4), else NaN"),) #added as 1D #1D: dimension range_levels da_ceilrange=xr.DataArray(data=np.array(list(itertools.chain.from_iterable(ndata['ceil_range'])),dtype=int).astype('int32'), name="ceilometer_range",dims=["range_levels"],coords=dict(range_levels=range_levs), attrs=dict(type="int32",dimension="range_levels",units="m", long_name="Ceilometer Range", description="Ranges for the ceilometer backscatter profile, including the instrument height",),) #added as 1D #2D: dimension time/cloud_layer da_baseht=xr.DataArray(data=np.array(DF_20min_cloudbh,dtype=float).astype('float32'), name="cloud_base_altitude",dims=["time","cloud_layer"], coords=dict(time=Time_steps_dt64_org,cloud_layer=cloud_layer_levs), attrs=dict(type="float32",dimension="time, cloud_layer", units="m", long_name="Cloud Base Altitude", description="cloud base height of 1-3 cloud layers; NaN if no layer detected. " +\ "Instrument height incorporated."),) #added as 2D #2D: dimension time/sky_condition_layer da_scfrac=xr.DataArray(data=np.array(DF_20min_scfrac,dtype=float).astype('float32'), name="sky_condition_cloud_fraction",dims=["time","sky_condition_layer"], coords=dict(time=Time_steps_dt64_org,sky_condition_layer=sc_layer_levs), attrs=dict(type="float32",dimension="time, sky_condition_layer", units="octal", long_name="Sky Condition Cloud Fraction", description="Cloud fraction calculated with the sky condition algorithm. "+\ "0-8 = cloud coverage of up to 5 levels; 9 = obscuration. " +\ "NaN = missing data or no detected layer."),) #added as 2D da_scht=xr.DataArray(data=np.array(DF_20min_scht,dtype=float).astype('float32'), name="sky_condition_cloud_altitude",dims=["time","sky_condition_layer"], coords=dict(time=Time_steps_dt64_org,sky_condition_layer=sc_layer_levs), attrs=dict(type="float32",dimension="time, sky_condition_layer", units="m", long_name="Sky Condition Cloud Altitude", description = "Cloud layer height calculated with the sky condition algorithm. "+\ "Cloud layer height given for 1-5 sky condition layers; NaN if no layer detected. "+\ "Vertical visibility is reported as height if obscuration (at sky_condition_cloud_fraction 9). "+\ "Instrument height incorporated."),) #added as 2D #2D: dimension time/range da_bsprof=xr.DataArray(data=np.array(DF_20min_range,dtype=float).astype('float32'), name="backscatter_profile",dims=["time","range_levels"], coords=dict(time=Time_steps_dt64_org,range_levels=range_levs), attrs=dict(type="float32",dimension="time, range_levels", units="1 km-1 steradians-1", long_name="Backscatter Profile", description="backscatter coefficient profile"),) #added as 2D # + #merge all arrays into one ds_all=xr.merge([da_doy,da_year,da_month,da_day,da_hour,da_min,da_sec, da_ceilrange,da_cloudcode,da_vertvis,da_high_sig, da_baseht,da_scfrac,da_scht,da_bsprof]) len(ds_all) # - #get time range - start Times_nomicrosec[0] #get time range - end Times_nomicrosec[-1] # + # modify attributes (start/end time and text in comments); # note again geospatial bounds from the weather station data ds_all.attrs = {"Conventions" :"CF-1.8", "source" : "Ceilometer", "instrument_model" : "Vaisala Ceilometer CL31", "creator_name" : "Sonja Murto", "creator_email" : "sonja.murto@misu.su.se", "creator_url" : "https://orcid.org/0000-0002-4966-9077", "institution" : "Stockholm University", "processing_software" : "Matlab (for creating the matlab file) and a jupyter notebook script (netCDF)", "sampling_interval": "original 30s; here 20min averages", "product_version" : "v01", "last_revised_date" : "2024-05-31T15:00:00", "project" : "ARTofMELT", "project_principal_investigator" : "Michael Tjernström", "project_principal_investigator_email" : "michaelt@misu.su.se", "project_principal_investigator_url" : "https://orcid.org/0000-0002-6908-7410", "acknowledgement" : " Knut och Alice Wallenbergs Stiftelse, Grant 2016-0024", "platform" : "Swedish Icebreaker Oden", "platform_type" : "On Oden's 7th deck above the bridge", "deployment_mode" : "ship", "title" : "Ceilometer cloud base height, vertical visibility and backscatter profiles", "feature_type" : "time series", "time_coverage_start" : "2023-05-07T00:10:22", "time_coverage_end" : "2023-06-13T16:10:07", "geospatial_bounds" : "80.52392166666667N, -3.8737749999999997E, 78.04355166666666N, 15.660881666666667E", "platform_altitude" : "Located at approximately 25 m a.s.l", "location_keywords": "Oden, Arctic Ocean, Fram Strait, atmosphere, on the ship", "comments" : "This file consists of 20 min averages of ceilometer data " +\ "measured with the Vaisala Ceilometer CL31 that was located on the 7th deck, "+\ "above the bridge (at approximately 25m)." + \ "The sky condition measurements are time averages to represent an area average. " + \ "The vertical resolution is 10m * 770, but the measurement height (25m) is included in the backscatter profile ranges, " + \ "as well as in the cloud base heights (cloud_base_altitude and sky_condition_cloud_altitude). " + \ "Geospatial bounds are taken from the gps location of the weather station dataset located on Oden. " +\ "Time variables month, day, hour, minute and second are approximated to the nearest second. " +\ "Data produced by Sonja Murto. See the document - Readme_CL.txt - for more details."} # - ds_all #save to netCDF ds_all.to_netcdf(load_data + 'CL31_ceilometer_ARTofMELT_20230507_20230613_20min_v01.nc') # + #test plot ds_all.sel(cloud_layer=1).cloud_base_altitude.plot(color='r',label='cloud_layer = 1') ds_all.sel(cloud_layer=2).cloud_base_altitude.plot(color='b',label='cloud_layer = 2') ds_all.sel(cloud_layer=3).cloud_base_altitude.plot(color='y',label='cloud_layer = 3') plt.title('') plt.legend() plt.show() # -