updated environment
This commit is contained in:
parent
5963606d05
commit
a7ee25fba7
@ -1,4 +1,4 @@
|
|||||||
name: BASTET
|
name: MasterThesis
|
||||||
channels:
|
channels:
|
||||||
- jmcmurray
|
- jmcmurray
|
||||||
- anaconda
|
- anaconda
|
||||||
@ -17,11 +17,11 @@ dependencies:
|
|||||||
- branca=0.4.2=pyhd8ed1ab_0
|
- branca=0.4.2=pyhd8ed1ab_0
|
||||||
- brotlipy=0.7.0=py38hfa6e2cd_1000
|
- brotlipy=0.7.0=py38hfa6e2cd_1000
|
||||||
- bzip2=1.0.8=he774522_3
|
- bzip2=1.0.8=he774522_3
|
||||||
- ca-certificates=2020.12.5=h5b45459_0
|
- ca-certificates=2021.5.30=h5b45459_0
|
||||||
- cartopy=0.18.0=py38hd77ba2b_0
|
- cartopy=0.18.0=py38hd77ba2b_0
|
||||||
- cartopy_offlinedata=0.2.3=pyh9f0ad1d_0
|
- cartopy_offlinedata=0.2.3=pyh9f0ad1d_0
|
||||||
- cdsapi=0.2.7=py_0
|
- cdsapi=0.2.7=py_0
|
||||||
- certifi=2020.12.5=py38haa244fe_1
|
- certifi=2021.5.30=py38haa244fe_0
|
||||||
- cffi=1.14.3=py38h0e640b1_1
|
- cffi=1.14.3=py38h0e640b1_1
|
||||||
- cfgrib=0.9.8.4=py_0
|
- cfgrib=0.9.8.4=py_0
|
||||||
- cftime=1.2.1=py38h1e00858_1
|
- cftime=1.2.1=py38h1e00858_1
|
||||||
@ -41,6 +41,7 @@ dependencies:
|
|||||||
- distributed=2021.3.0=py38haa244fe_0
|
- distributed=2021.3.0=py38haa244fe_0
|
||||||
- eccodes=2.17.0=h37af81a_0
|
- eccodes=2.17.0=h37af81a_0
|
||||||
- entrypoints=0.3=py38h32f6830_1002
|
- entrypoints=0.3=py38h32f6830_1002
|
||||||
|
- et_xmlfile=1.0.1=py_1001
|
||||||
- expat=2.2.9=h33f27b4_2
|
- expat=2.2.9=h33f27b4_2
|
||||||
- folium=0.12.0=pyhd8ed1ab_1
|
- folium=0.12.0=pyhd8ed1ab_1
|
||||||
- freeglut=3.0.0=h6538335_1005
|
- freeglut=3.0.0=h6538335_1005
|
||||||
@ -58,6 +59,7 @@ dependencies:
|
|||||||
- ipython=7.19.0=py38hc5df569_0
|
- ipython=7.19.0=py38hc5df569_0
|
||||||
- ipython_genutils=0.2.0=py_1
|
- ipython_genutils=0.2.0=py_1
|
||||||
- jasper=2.0.14=hdc05fd1_1
|
- jasper=2.0.14=hdc05fd1_1
|
||||||
|
- jdcal=1.4.1=py_0
|
||||||
- jedi=0.17.2=py38h32f6830_1
|
- jedi=0.17.2=py38h32f6830_1
|
||||||
- jinja2=2.11.2=pyh9f0ad1d_0
|
- jinja2=2.11.2=pyh9f0ad1d_0
|
||||||
- joblib=1.0.1=pyhd8ed1ab_0
|
- joblib=1.0.1=pyhd8ed1ab_0
|
||||||
@ -100,6 +102,7 @@ dependencies:
|
|||||||
- numpy=1.19.4=py38h0cc643e_0
|
- numpy=1.19.4=py38h0cc643e_0
|
||||||
- olefile=0.46=pyh9f0ad1d_1
|
- olefile=0.46=pyh9f0ad1d_1
|
||||||
- openjpeg=2.3.1=h57dd2e7_3
|
- openjpeg=2.3.1=h57dd2e7_3
|
||||||
|
- openpyxl=3.0.7=pyhd8ed1ab_0
|
||||||
- openssl=1.1.1k=h8ffe710_0
|
- openssl=1.1.1k=h8ffe710_0
|
||||||
- os=0.1.4=0
|
- os=0.1.4=0
|
||||||
- owslib=0.20.0=py_0
|
- owslib=0.20.0=py_0
|
||||||
@ -120,6 +123,7 @@ dependencies:
|
|||||||
- pthreads-win32=2.9.1=hfa6e2cd_3
|
- pthreads-win32=2.9.1=hfa6e2cd_3
|
||||||
- pycparser=2.20=pyh9f0ad1d_2
|
- pycparser=2.20=pyh9f0ad1d_2
|
||||||
- pyepsg=0.4.0=py_0
|
- pyepsg=0.4.0=py_0
|
||||||
|
- pyfiglet=0.8.post1=py_0
|
||||||
- pygments=2.7.2=py_0
|
- pygments=2.7.2=py_0
|
||||||
- pygrib=2.0.5=py38hbf9c9a7_0
|
- pygrib=2.0.5=py38hbf9c9a7_0
|
||||||
- pykdtree=1.3.1=py38h1e00858_1004
|
- pykdtree=1.3.1=py38h1e00858_1004
|
||||||
|
170
DataRequest.py
170
DataRequest.py
@ -1,23 +1,58 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
import cdsapi
|
import cdsapi
|
||||||
from input.user_input import *
|
from datetime import datetime
|
||||||
|
from datetime import date
|
||||||
|
import numpy as np
|
||||||
|
import xarray as xr
|
||||||
|
from netCDF4 import Dataset
|
||||||
|
from dask.diagnostics import ProgressBar
|
||||||
|
|
||||||
# start_lat = 78.22 Svalbard
|
folder = "ERA5"
|
||||||
|
ident = "McMurdo"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
north_lim, south_lim, east_lim, west_lim = 90, 45, 180, -180 # Northern Polar Region
|
||||||
|
# north_lim, south_lim, east_lim, west_lim = -45, -90, 180, -180 # Southern Polar Region
|
||||||
|
|
||||||
|
start_lat = 67.887382 # Kiruna
|
||||||
|
start_lon = 21.081452
|
||||||
|
|
||||||
|
# SOME START LOCATIONS:
|
||||||
|
|
||||||
|
# start_lat = 78.22 # Svalbard
|
||||||
# start_lon = 15.65
|
# start_lon = 15.65
|
||||||
|
#
|
||||||
|
# start_lat = -77.8535 # McMurdo
|
||||||
|
# start_lon = 167.2022
|
||||||
|
|
||||||
# start_lat = 67.887382 Kiruna
|
startdate = '2019-12-15'
|
||||||
# start_lon = 21.081452
|
enddate = '2020-01-10'
|
||||||
|
|
||||||
startdays = ['23', '24', '25', '26', '27', '28', '29', '30', '31']
|
try:
|
||||||
days = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31']
|
os.makedirs(folder)
|
||||||
endascent = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22']
|
except FileExistsError:
|
||||||
endfloat = ['1']
|
pass
|
||||||
|
|
||||||
def ERAsingle(year, month, days, north_lim, south_lim, east_lim, west_lim, name):
|
|
||||||
|
start = datetime.fromisoformat(startdate)
|
||||||
|
end = datetime.fromisoformat(enddate)
|
||||||
|
|
||||||
|
#"""
|
||||||
|
# DOWNLOAD OF ERA5-DATA:
|
||||||
|
|
||||||
|
startdays = [str(start.day+i).zfill(2) for i in range((date(start.year, start.month + 1, 1) - date(start.year, start.month, 1)).days - start.day + 1)]
|
||||||
|
endascent = [str(i+1).zfill(2) for i in range(30 - len(startdays))]
|
||||||
|
days = [str(i+1).zfill(2) for i in range(31)]
|
||||||
|
endfloat = [str(i+1).zfill(2) for i in range(end.day)]
|
||||||
|
|
||||||
|
|
||||||
|
def ERAsingle(year, month, days, nlim, slim, elim, wlim, name):
|
||||||
single = cdsapi.Client().retrieve(
|
single = cdsapi.Client().retrieve(
|
||||||
'reanalysis-era5-single-levels',
|
'reanalysis-era5-single-levels',
|
||||||
{
|
{
|
||||||
'product_type': 'reanalysis',
|
'product_type': 'reanalysis',
|
||||||
'format': 'netcdf',
|
|
||||||
'variable': [
|
'variable': [
|
||||||
'cloud_base_height', 'high_cloud_cover', 'low_cloud_cover', 'medium_cloud_cover',
|
'cloud_base_height', 'high_cloud_cover', 'low_cloud_cover', 'medium_cloud_cover',
|
||||||
'skin_temperature', 'surface_net_solar_radiation', 'surface_net_thermal_radiation',
|
'skin_temperature', 'surface_net_solar_radiation', 'surface_net_thermal_radiation',
|
||||||
@ -40,24 +75,23 @@ def ERAsingle(year, month, days, north_lim, south_lim, east_lim, west_lim, name)
|
|||||||
'month': str(month),
|
'month': str(month),
|
||||||
'day': days,
|
'day': days,
|
||||||
'area': [
|
'area': [
|
||||||
north_lim, west_lim, south_lim, # north_lim, west_lim, south_lim, # North, West, South 72, -111, 67,
|
nlim, wlim, slim, elim,
|
||||||
east_lim, # east_lim, # East 22,
|
|
||||||
],
|
],
|
||||||
|
'format': 'netcdf',
|
||||||
})
|
})
|
||||||
single.download(name)
|
single.download(name)
|
||||||
|
|
||||||
|
|
||||||
def ERAlevelAscend(year, month, days, start_lat, start_lon, name):
|
def ERAlevelAscent(year, month, dayrange, start_lat, start_lon, name):
|
||||||
north_lim = start_lat + 10.0
|
nlim = start_lat + 10.0
|
||||||
south_lim = start_lat - 10.0
|
slim = start_lat - 10.0
|
||||||
east_lim = start_lon - 10.0
|
elim = start_lon + 10.0
|
||||||
west_lim = start_lon + 10.0
|
wlim = start_lon - 10.0
|
||||||
|
|
||||||
ascend = cdsapi.Client().retrieve(
|
ascent = cdsapi.Client().retrieve(
|
||||||
'reanalysis-era5-pressure-levels',
|
'reanalysis-era5-pressure-levels',
|
||||||
{
|
{
|
||||||
'product_type': 'reanalysis',
|
'product_type': 'reanalysis',
|
||||||
'format': 'netcdf',
|
|
||||||
'variable': [
|
'variable': [
|
||||||
'geopotential', 'temperature', 'u_component_of_wind',
|
'geopotential', 'temperature', 'u_component_of_wind',
|
||||||
'v_component_of_wind', 'vertical_velocity',
|
'v_component_of_wind', 'vertical_velocity',
|
||||||
@ -79,7 +113,7 @@ def ERAlevelAscend(year, month, days, start_lat, start_lon, name):
|
|||||||
],
|
],
|
||||||
'year': str(year),
|
'year': str(year),
|
||||||
'month': str(month),
|
'month': str(month),
|
||||||
'day': days,
|
'day': dayrange,
|
||||||
'time': [
|
'time': [
|
||||||
'00:00', '01:00', '02:00',
|
'00:00', '01:00', '02:00',
|
||||||
'03:00', '04:00', '05:00',
|
'03:00', '04:00', '05:00',
|
||||||
@ -91,19 +125,18 @@ def ERAlevelAscend(year, month, days, start_lat, start_lon, name):
|
|||||||
'21:00', '22:00', '23:00',
|
'21:00', '22:00', '23:00',
|
||||||
],
|
],
|
||||||
'area': [
|
'area': [
|
||||||
north_lim, west_lim, south_lim,
|
nlim, wlim, slim, elim,
|
||||||
east_lim,
|
|
||||||
],
|
],
|
||||||
|
'format': 'netcdf',
|
||||||
})
|
})
|
||||||
ascend.download(name)
|
ascent.download(name)
|
||||||
|
|
||||||
|
|
||||||
def ERAlevelFloat(year, month, days, north_lim, south_lim, east_lim, west_lim, name):
|
def ERAlevelFloat(year, month, dayrange, nlim, slim, elim, wlim, name):
|
||||||
floating = cdsapi.Client().retrieve(
|
floating = cdsapi.Client().retrieve(
|
||||||
'reanalysis-era5-pressure-levels',
|
'reanalysis-era5-pressure-levels',
|
||||||
{
|
{
|
||||||
'product_type': 'reanalysis',
|
'product_type': 'reanalysis',
|
||||||
'format': 'netcdf',
|
|
||||||
'variable': [
|
'variable': [
|
||||||
'geopotential', 'temperature', 'u_component_of_wind',
|
'geopotential', 'temperature', 'u_component_of_wind',
|
||||||
'v_component_of_wind', 'vertical_velocity',
|
'v_component_of_wind', 'vertical_velocity',
|
||||||
@ -115,7 +148,7 @@ def ERAlevelFloat(year, month, days, north_lim, south_lim, east_lim, west_lim, n
|
|||||||
],
|
],
|
||||||
'year': str(year),
|
'year': str(year),
|
||||||
'month': str(month),
|
'month': str(month),
|
||||||
'day': days,
|
'day': dayrange,
|
||||||
'time': [
|
'time': [
|
||||||
'00:00', '01:00', '02:00',
|
'00:00', '01:00', '02:00',
|
||||||
'03:00', '04:00', '05:00',
|
'03:00', '04:00', '05:00',
|
||||||
@ -127,28 +160,83 @@ def ERAlevelFloat(year, month, days, north_lim, south_lim, east_lim, west_lim, n
|
|||||||
'21:00', '22:00', '23:00',
|
'21:00', '22:00', '23:00',
|
||||||
],
|
],
|
||||||
'area': [
|
'area': [
|
||||||
north_lim, west_lim, south_lim,
|
nlim, wlim, slim, elim,
|
||||||
east_lim,
|
|
||||||
],
|
],
|
||||||
|
'format': 'netcdf',
|
||||||
})
|
})
|
||||||
floating.download(name)
|
floating.download(name)
|
||||||
|
|
||||||
|
|
||||||
##ERAsingle(2018, 5, startdays, 90, 45, 180, -180, "single_2018_1.nc")
|
ERAlevelAscent(start.year, start.month, startdays, start_lat, start_lon, os.path.join(folder, "ascent1.nc"))
|
||||||
##ERAsingle(2018, 6, days, 90, 45, 180, -180, "single_2018_2.nc")
|
ERAlevelAscent(start.year, start.month + 1, endascent, start_lat, start_lon, os.path.join(folder, "ascent2.nc"))
|
||||||
##ERAsingle(2018, 7, days, 90, 45, 180, -180, "single_2018_3.nc")
|
|
||||||
##ERAsingle(2018, 8, endfloat, 90, 45, 180, -180, "single_2018_4.nc")
|
|
||||||
|
|
||||||
##ERAlevelAscend(2018, 5, startdays, start_lat, start_lon, "ascend_2018_kiruna_1.nc") # start_lat = 67.887382 Kiruna
|
ERAsingle(start.year, start.month, startdays, north_lim, south_lim, east_lim, west_lim, os.path.join(folder, "single" + str(start.month) + ".nc"))
|
||||||
ERAlevelAscend(2018, 6, endascent, start_lat, start_lon, "ascend_2018_kiruna_2.nc") # start_lon = 21.081452
|
ERAlevelFloat(start.year, start.month, startdays, north_lim, south_lim, east_lim, west_lim, os.path.join(folder, "float" + str(start.month) + ".nc"))
|
||||||
|
|
||||||
ERAlevelFloat(2018, 5, startdays, 90, 45, -180, 180, "float_2018_1.nc")
|
for m in range(end.month - start.month - 1):
|
||||||
ERAlevelFloat(2018, 6, days, 90, 45, -180, 180, "float_2018_2.nc")
|
ERAsingle(start.year, start.month + m + 1, days, north_lim, south_lim, east_lim, west_lim, os.path.join(folder, "single" + str(start.month + m + 1) + ".nc"))
|
||||||
ERAlevelFloat(2018, 7, days, 90, 45, -180, 180, "float_2018_3.nc")
|
ERAlevelFloat(start.year, start.month + m + 1, days, north_lim, south_lim, east_lim, west_lim, os.path.join(folder, "float" + str(start.month + m + 1) + ".nc"))
|
||||||
ERAlevelFloat(2018, 8, endfloat, 90, 45, -180, 180, "float_2018_4.nc")
|
|
||||||
|
|
||||||
#ERAlevelAscend(2016, 7, ['12', '13', '14'], start_lat, start_lon, "ascend_2016_kiruna_new.nc")
|
ERAsingle(start.year, end.month, endfloat, north_lim, south_lim, east_lim, west_lim, os.path.join(folder, "single" + str(end.month) + ".nc"))
|
||||||
#ERAsingle(2016, 7, ['12', '13', '14', '15', '16', '17', '18', '19'], 90, 45, 180, -180, "single_2016_new.nc")
|
ERAlevelFloat(start.year, end.month, endfloat, north_lim, south_lim, east_lim, west_lim, os.path.join(folder, "float" + str(end.month) + ".nc"))
|
||||||
#ERAlevelFloat(2016, 7, ['12', '13', '14', '15', '16', '17', '18', '19'], 90, 45, -180, 180, "float_2016_new.nc")
|
|
||||||
|
#"""
|
||||||
|
|
||||||
|
# STITCHING OF MULTIPLE *.NC-FILES TO ONE:
|
||||||
|
|
||||||
|
floatfiles = []
|
||||||
|
singlefiles = []
|
||||||
|
ascentfiles = []
|
||||||
|
|
||||||
|
|
||||||
|
for (root, dirs, files) in os.walk("ERA5"):
|
||||||
|
for name in files:
|
||||||
|
if name.startswith("float"):
|
||||||
|
floatfiles.append(os.path.join(folder, str(name)))
|
||||||
|
elif name.startswith("radiation"):
|
||||||
|
singlefiles.append(os.path.join(folder, str(name)))
|
||||||
|
else:
|
||||||
|
ascentfiles.append(os.path.join(folder, str(name)))
|
||||||
|
|
||||||
|
|
||||||
|
startfile = Dataset(floatfiles[0], "r", format="NETCDF4")
|
||||||
|
endfile = Dataset(floatfiles[-1], "r", format="NETCDF4")
|
||||||
|
|
||||||
|
tstart = int(startfile.variables['time'][0])
|
||||||
|
tend = int(endfile.variables['time'][-1])
|
||||||
|
|
||||||
|
startfile.close()
|
||||||
|
endfile.close()
|
||||||
|
|
||||||
|
df1 = xr.open_mfdataset(floatfiles, chunks={'time': 100}, combine="nested", engine='netcdf4', concat_dim="time", parallel=True)
|
||||||
|
df1 = df1.assign_coords(time=np.linspace(tstart, tend, (tend - tstart) + 1))
|
||||||
|
df1.to_netcdf(os.path.join(folder, "FLOAT_" + str(ident) + "_" + str(start.year) + "_" + str(start.month) + "to" + str(end.year) + "_" + str(end.month) + ".nc"), mode='w', format="NETCDF4", engine="netcdf4", encoding={"z": {"dtype": "float32"}, "t": {"dtype": "float32"}, "u": {"dtype": "float32"}, "v": {"dtype": "float32"}, "w": {"dtype": "float32"}})
|
||||||
|
|
||||||
|
df2 = xr.open_mfdataset(singlefiles, chunks={'time': 500}, combine="nested", engine='netcdf4', concat_dim="time", parallel=True)
|
||||||
|
df2 = df2.assign_coords(time=np.linspace(tstart, tend, (tend - tstart) + 1))
|
||||||
|
df2.to_netcdf(os.path.join(folder, "SINGLE_" + str(ident) + "_" + str(start.year) + "_" + str(start.month) + "to" + str(end.year) + "_" + str(end.month) + ".nc"), mode='w', format="NETCDF4", engine="netcdf4", encoding={"cbh": {"dtype": "float32"}, "hcc": {"dtype": "float32"}, "lcc": {"dtype": "float32"}, "mcc": {"dtype": "float32"}, "skt": {"dtype": "float32"}, "ssr": {"dtype": "float32"}, "str": {"dtype": "float32"}, "sp": {"dtype": "float32"}, "ssrd": {"dtype": "float32"}, "strdc": {"dtype": "float32"}, "strd": {"dtype": "float32"}, "tisr": {"dtype": "float32"}, "tsr": {"dtype": "float32"}, "ttr": {"dtype": "float32"}, "tcc": {"dtype": "float32"}, "fdir": {"dtype": "float32"}})
|
||||||
|
|
||||||
|
startfile = Dataset(ascentfiles[0], "r", format="NETCDF4")
|
||||||
|
endfile = Dataset(ascentfiles[-1], "r", format="NETCDF4")
|
||||||
|
|
||||||
|
tstart = int(startfile.variables['time'][0])
|
||||||
|
tend = int(endfile.variables['time'][-1])
|
||||||
|
|
||||||
|
startfile.close()
|
||||||
|
endfile.close()
|
||||||
|
|
||||||
|
df3 = xr.open_mfdataset(ascentfiles, chunks={'time': 800}, combine="nested", engine='netcdf4', concat_dim="time", parallel=True)
|
||||||
|
df3 = df3.assign_coords(time=np.linspace(tstart, tend, (tend - tstart) + 1))
|
||||||
|
df3.to_netcdf(os.path.join(folder, "ASCENT_" + str(ident) + "_" + str(start.year) + "_" + str(start.month) + ".nc"), mode='w', format="NETCDF4", engine="netcdf4", encoding={"z": {"dtype": "float32"}, "t": {"dtype": "float32"}, "u": {"dtype": "float32"}, "v": {"dtype": "float32"}, "w": {"dtype": "float32"}})
|
||||||
|
|
||||||
|
|
||||||
|
# DELETING ORIGINAL FILES:
|
||||||
|
|
||||||
|
"""
|
||||||
|
for (root, dirs, files) in os.walk("ERA5"):
|
||||||
|
for name in files:
|
||||||
|
if name in floatfiles + singlefiles + ascentfiles:
|
||||||
|
os.remove(name)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
"""
|
@ -44,12 +44,14 @@ starttime = datetime.now()
|
|||||||
if not sys.warnoptions:
|
if not sys.warnoptions:
|
||||||
warnings.simplefilter("ignore")
|
warnings.simplefilter("ignore")
|
||||||
|
|
||||||
|
"""
|
||||||
data = pd.read_excel(r'C:\Users\marcel\PycharmProjects\MasterThesis\Data_PoGo2016.xls', sheet_name='SuperTIGER2') # Tabelle3
|
data = pd.read_excel(r'C:\Users\marcel\PycharmProjects\MasterThesis\Data_PoGo2016.xls', sheet_name='SuperTIGER2') # Tabelle3
|
||||||
|
|
||||||
comp_time = pd.DataFrame(data, columns=['Time']).to_numpy().squeeze()
|
comp_time = pd.DataFrame(data, columns=['Time']).to_numpy().squeeze()
|
||||||
comp_height = pd.DataFrame(data, columns=['Height']).to_numpy().squeeze()
|
comp_height = pd.DataFrame(data, columns=['Height']).to_numpy().squeeze()
|
||||||
comp_lat = pd.DataFrame(data, columns=['Latitude']).to_numpy().squeeze()
|
comp_lat = pd.DataFrame(data, columns=['Latitude']).to_numpy().squeeze()
|
||||||
comp_lon = pd.DataFrame(data, columns=['Longitude']).to_numpy().squeeze()
|
comp_lon = pd.DataFrame(data, columns=['Longitude']).to_numpy().squeeze()
|
||||||
|
"""
|
||||||
|
|
||||||
print("")
|
print("")
|
||||||
print("INITIALISING SIMULATION...")
|
print("INITIALISING SIMULATION...")
|
||||||
@ -184,7 +186,7 @@ def ERA5Data(lon, lat, h, t, deltaT_ERA, flag_arr):
|
|||||||
interp4d_vw_z_post = np.ma.dot(w1, vw_z_float.vindex[t_post_ind, :, lat_ind1, lon_ind1].compute()) / np.sum(w1)
|
interp4d_vw_z_post = np.ma.dot(w1, vw_z_float.vindex[t_post_ind, :, lat_ind1, lon_ind1].compute()) / np.sum(w1)
|
||||||
interp4d_vw_z = (interp4d_vw_z_post - interp4d_vw_z_pre) * (t_epoch - t_pre) + interp4d_vw_z_pre
|
interp4d_vw_z = (interp4d_vw_z_post - interp4d_vw_z_pre) * (t_epoch - t_pre) + interp4d_vw_z_pre
|
||||||
|
|
||||||
pressure_hPa = np.array([1, 2, 3, 5, 7, 10, 20, 30]) # !!!
|
pressure_hPa = np.array([1, 2, 3, 5, 7, 10, 20]) # !!!
|
||||||
|
|
||||||
pressure = 100 * pressure_hPa
|
pressure = 100 * pressure_hPa
|
||||||
|
|
||||||
@ -1068,7 +1070,7 @@ df1 = pd.DataFrame(data={
|
|||||||
df1.to_excel("output.xlsx")
|
df1.to_excel("output.xlsx")
|
||||||
|
|
||||||
plt.plot(sol.t, sol.y[2, :], 'k--', label='Simulation')
|
plt.plot(sol.t, sol.y[2, :], 'k--', label='Simulation')
|
||||||
plt.plot(comp_time, comp_height, 'r-', label='PoGo+ Flight Test')
|
# plt.plot(comp_time, comp_height, 'r-', label='PoGo+ Flight Test')
|
||||||
plt.legend()
|
plt.legend()
|
||||||
plt.title('high factor')
|
plt.title('high factor')
|
||||||
plt.xlabel('time in s')
|
plt.xlabel('time in s')
|
||||||
@ -1084,6 +1086,6 @@ ax.set_extent([-120, 30, 60, 80], crs=ccrs.PlateCarree())
|
|||||||
|
|
||||||
plt.plot(start_lon, start_lat, 'rx', transform=ccrs.Geodetic())
|
plt.plot(start_lon, start_lat, 'rx', transform=ccrs.Geodetic())
|
||||||
plt.plot(sol.y[0, :], sol.y[1, :], 'k--', transform=ccrs.Geodetic())
|
plt.plot(sol.y[0, :], sol.y[1, :], 'k--', transform=ccrs.Geodetic())
|
||||||
plt.plot(comp_lon, comp_lat, 'r-', transform=ccrs.Geodetic())
|
# plt.plot(comp_lon, comp_lat, 'r-', transform=ccrs.Geodetic())
|
||||||
# plt.savefig(os.path.join(rootdir, figname))
|
# plt.savefig(os.path.join(rootdir, figname))
|
||||||
plt.show()
|
plt.show()
|
@ -12,6 +12,9 @@ matplotlib 3.3.2
|
|||||||
astropy 4.1
|
astropy 4.1
|
||||||
netcdf4 1.3.3
|
netcdf4 1.3.3
|
||||||
cdsapi 0.2.7 (*)
|
cdsapi 0.2.7 (*)
|
||||||
|
dask 2.20
|
||||||
|
openpyxl 3.0.4
|
||||||
|
pyfiglet 0.8
|
||||||
|
|
||||||
|
|
||||||
Alternatively, use ANACONDA and the environment(*.yml)-file in this repository.
|
Alternatively, use ANACONDA and the environment(*.yml)-file in this repository.
|
||||||
|
Loading…
x
Reference in New Issue
Block a user