Documentos de Académico
Documentos de Profesional
Documentos de Cultura
-----------------------------------
# Name: cigom_test.py
# Purpose: Script para descarga de información desde el Servidor OPeNDAP de NOAA
#
# Author: Rubersy Ramos García
#
# Created: 04/09/2021
#
# NOTA 1: véase path_base (por defecto path_base =
"C:/Users/root/Desktop/CIGOM/Output")
# NOTA 2: falta la sección de código para realizar las descargas para cada uno
de los incrementos de 3 horas,
# por ahora solo se realiza la descarga para el último incremento:
(sufix_hours[32] ='00_00960000.nc')
#----------------------------------------------------------------------------------
------------------------------------
url_base =
"https://www.ncei.noaa.gov/thredds-coastal/dodsC/amseas/amseas_20201218_to_current/
"
url_today_last =
"C:/Users/root/Desktop/CIGOM/coamps_ncom_amseas_u_1_2021090300_00960000.nc"
#Realizar la descarga de los datos del dataset
#---------------------------------------------------------------------------------
print("----------------------------------------------------------------------------
-----------------------------------")
print("Descargando datos desde el Servidor OPeNDAP de NOAA...")
print("Catálogo:
https://www.ncei.noaa.gov/thredds-coastal/catalog/amseas/amseas_20201218_to_current
/catalog.html")
print("----------------------------------------------------------------------------
-----------------------------------")
#print("----------------------TESTEO-----------------------------")
#Imprimir formato y dimensiones del dataset
#-------------------------------------------
#print(dataset.file_format)
#print(dataset.dimensions.keys())
#print("----------------------TESTEO-----------------------------")
#print(all_vars)
#print(lon_data)
#print(lat_data)
#print(depth_data)
#print(time_data)
#print(water_u_data)
#print(water_v_data)
#-----------------------------------------------------------------
lat_attrib = dataset.variables["lat"].ncattrs()
for attr in lat_attrib:
value = dataset.variables['lat'].getncattr(attr)
dataset_out.variables["lat"].setncattr(attr, value )
depth_attrib = dataset.variables["depth"].ncattrs()
for attr in depth_attrib:
value = dataset.variables['depth'].getncattr(attr)
dataset_out.variables["depth"].setncattr(attr, value )
time_attrib = dataset.variables["time"].ncattrs()
for attr in time_attrib:
value = dataset.variables['time'].getncattr(attr)
dataset_out.variables["time"].setncattr(attr, value )
water_u_attrib = dataset.variables["water_u"].ncattrs()
for attr in water_u_attrib:
if attr != "_FillValue":
value = dataset.variables['water_u'].getncattr(attr)
dataset_out.variables["water_u"].setncattr(attr, value )
water_v_attrib = dataset.variables["water_v"].ncattrs()
for attr in water_v_attrib:
if attr != "_FillValue":
value = dataset.variables['water_v'].getncattr(attr)
dataset_out.variables["water_v"].setncattr(attr, value )
#print("----------------------TESTEO-----------------------------")
#print(dataset_out.variables["time"][:])
#print(dataset_out.variables["depth"][:])
#print(dataset_out.variables["lat"][:])
#print(dataset_out.variables["lon"][:])
#print(dataset_out.variables["water_u"][:])
#print(dataset_out.variables["water_u"])
#print(dataset_out.variables["water_v"][:])
#print(time_attrib)
#print(depth_attrib)
#print(lat_attrib)
#print(lon_attrib)
#print(water_u_attrib)
#print(water_v_attrib)
#print(dataset_out.variables["time"].ncattrs())
#print(dataset_out.variables["depth"].ncattrs())
#print(dataset_out.variables["lat"].ncattrs())
#print(dataset_out.variables["lon"].ncattrs())
#print(dataset_out.variables["water_u"].ncattrs())
#print(dataset_out.variables["water_v"].ncattrs())
#------------------------------------------------------------------
#Cierre de los dataset
#------------------------------------------
dataset.close()
dataset_out.close()
#Notificaciones
#------------------------------------------
print("Los datos de las variables fueron importados correctamente y estan
disponibles en:")
print(path_today_last)
print("----------------------------------------------------------------------------
-----------------------------------")