Skip to content

Commit

Permalink
output pdf figures and updated density plot
Browse files Browse the repository at this point in the history
  • Loading branch information
BaptisteVandecrux committed Jan 15, 2022
1 parent c70c703 commit 08ee182
Show file tree
Hide file tree
Showing 3 changed files with 165 additions and 21 deletions.
11 changes: 7 additions & 4 deletions FirnCoVer_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@
compaction_df.loc[erroneous_periods[i][0],'compaction_borehole_length_m'].plot(marker='o')
tmp.plot(marker='o')
plt.title('Instrument '+str(erroneous_periods[i][0]))
fig.savefig('err_instr_'+str(erroneous_periods[i][0])+'.png')
fig.savefig('figures/err_instr_'+str(erroneous_periods[i][0])+'.png')
# %% Removing erroneous periods from the analysis
compaction_df.loc[13,'compaction_borehole_length_m'].loc['2018-02-20':] = np.nan
compaction_df.loc[10,'compaction_borehole_length_m'].loc['2019-07-29':] = np.nan
Expand Down Expand Up @@ -157,7 +157,8 @@
for k in range(np.size(ax)):
i,j = np.unravel_index(k, ax.shape)
ax[i,j].set_ylim((-1.25,0))
f.savefig('figures/borehole_shortening_m.tiff', dpi=600, format="tiff", pil_kwargs={"compression": "tiff_lzw"})
f.savefig('figures/fig4_borehole_shortening_m.tiff', dpi=600, format="tiff", pil_kwargs={"compression": "tiff_lzw"})
f.savefig('figures/fig4_borehole_shortening_m.pdf')


#%% daily compaction
Expand All @@ -183,7 +184,8 @@
ax[2,0].set_ylim((0, 2))
ax[1,1].set_ylim((0, 2))
ax[3,1].set_ylim((0,2))
f.savefig('figures/daily_compaction_md_smoothed.tiff', dpi=600, format="tiff", pil_kwargs={"compression": "tiff_lzw"})
f.savefig('figures/fig5_daily_compaction_md_smoothed.tiff', dpi=600, format="tiff", pil_kwargs={"compression": "tiff_lzw"})
f.savefig('figures/fig5_daily_compaction_md_smoothed.pdf')

#%% print period where instruments where tower was not working
for instr_nr in np.array([32, 7, 11,17]):
Expand Down Expand Up @@ -255,7 +257,8 @@
ax[i,j].set_xticklabels("")
f1.text(0.02, 0.5, 'Daily air temperature ($^o$C)', va='center', rotation='vertical', size = 20, color = color1)
f1.text(0.95, 0.5, 'Surface height (m)', va='center', rotation='vertical', size = 20, color = color2)
f1.savefig('figures/fig5_Ta_HS.tiff', dpi=600, format="tiff", pil_kwargs={"compression": "tiff_lzw"})
f1.savefig('figures/fig6_Ta_HS.tiff', dpi=600, format="tiff", pil_kwargs={"compression": "tiff_lzw"})
f1.savefig('figures/fig6_Ta_HS.pdf')

#%% firn temperature

Expand Down
8 changes: 4 additions & 4 deletions FirnCover_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,15 +314,15 @@ def multi_plot(inst_meta_df, compaction_df, var = 'daily_compaction_md',
ini_depth = inst_meta_df.loc[inst_meta_df.index.values == instr_nr,
'borehole_initial_length_m']
ax[i,j].plot(compaction_df.loc[instr_nr,var].resample('D').asfreq(),
label='instr. '+str(instr_nr)+', Ini. len.: %0.1f m'%abs(ini_depth))
label='instr. '+str(instr_nr)+', ini. len.: %0.1f m'%abs(ini_depth))
if var2:
ax[i,j].plot(compaction_df.loc[instr_nr,var2].resample('D').asfreq())

if site in ['EKT', 'KAN-U', 'DYE-2']:
ax[i,j].legend(loc='upper left',ncol=2, fontsize =10)
ax[i,j].legend(loc='upper left',ncol=2, fontsize =12)
else:
ax[i,j].legend(loc='upper left',fontsize =10)

ax[i,j].legend(loc='upper left',fontsize =12)
# ax[i,j].legend(loc='lower left',ncol=1, fontsize =12)
ax[i,j].grid(True)
ax[i,j].set_title(site)
if site == 'Crawford':
Expand Down
167 changes: 154 additions & 13 deletions plot_density.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,170 @@
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright © 2019 Max Stevens <[email protected]>
#
# Distributed under terms of the MIT license.

"""
@author: [email protected]
Created on %(date)s
@author: %(username)s
tip list:
%matplotlib inline
%matplotlib qt
import pdb; pdb.set_trace()
"""

import netCDF4 as nc
import numpy as np
from datetime import datetime
import pandas as pd
import tables as tb
import matplotlib.pyplot as plt
import datetime
import matplotlib.dates as mdates
import sys


'''
This script processes the sumup netCDF-formatted database of firn cores and puts
them into pandas dataframes, saved in pickles (.pkl files).
It puts the data into two pandas dataframes (one for Antarctica and one for
Greenland). The data is saved in a pickle for easy reloading (the processing is
slow because the script needs to find unique values.)
This may be of use in other scripts as an easy way to pull in data from the
sumup database.
The changes that I make from the original sumup database are:
- There is a core from the 1950's (likely a Benson core from Greeland) that has
the lat/lon listed as -9999. I set the coordinates of the core to 75N, 60W.
- For cores that do not have a specific date, Lynn put 00 for the month and day.
I use January 1 for all of those cores so that I can create a datetime object.
Optionally, you can choose to write metadata to a csv file that can be imported
into google earth to see the locations of each core.
I have not done this, but it should be easy to do a csv write with only cores
e.g. deeper than some depth.
'''

### Load the data.
su = nc.Dataset('data/sumup_density_2020.nc','r+')
df = pd.DataFrame()

df['latitude']=su['Latitude'][:]
df['longitude']=su['Longitude'][:]
df['elevation']=su['Elevation'][:]
date=su['Date'][:].astype(int).astype(str)
for kk,dd in enumerate(date):
yr=dd[0:4]
mo=dd[4:6]
dy=dd[6:]
if mo=='00':
mo='01'
elif mo=='90':
mo = '01'
elif mo=='96':
mo = '06'
if dy == '00':
dy='01'
elif dy == '32':
dy='31'
date[kk]=yr+mo+dy
df['date'] = pd.to_datetime(date)

df['density']=su['Density'][:]
df['depth_top']=su['Start_Depth'][:]
df['depth_bot']=su['Stop_Depth'][:]
df['depth_mid']=su['Midpoint'][:]
df['error']=su['Error'][:]
df['citation_num']=su['Citation'][:]
df[df==-9999]=np.nan

ind= df.latitude<-100
df.loc[ind,'longitude']=-60
df.loc[ind,'latitude']=75

ind = np.isnan(df.depth_mid)
df.loc[ind,'depth_mid'] = df.loc[ind,'depth_top'] + (df.loc[ind,'depth_bot'] - df.loc[ind,'depth_top'])/2

df = df.loc[df.latitude>0]
df=df.reset_index()
df = df.loc[np.isin(df.citation_num, [45, 47, 48])]
df=df.loc[df['index'] != 838078, :]
df['core_id'] = ((df.depth_mid.diff()<-0.2)+0).cumsum()

#%%
plt.figure()
df.depth_top.plot()
df.depth_bot.plot()
df.depth_mid.plot()
df.depth_mid.diff().plot()
((df.depth_mid.diff()<-0.2)+0).cumsum().plot(secondary_y=True)

df_save = df.copy()
#%%
df = df_save.copy()

# Giving core name
sumup_citations = pd.read_csv('data/sumup_citations.txt', sep=';.-',header=None,engine='python',encoding='ANSI')

df['citation'] = [sumup_citations.values[int(i)-1][0] for i in df['citation_num']]
df['name'] = [c.split(' ')[0].split(',')[0] +'_'+str(d.year) for c,d in zip(df['citation'],df['date'])]

for source in df.citation.unique():
core_id_in_source = df.loc[df.citation==source].core_id
if len(core_id_in_source.unique())>1:
df.loc[df.citation==source, 'name'] = [n1 + '_' + str(n2 - core_id_in_source.min() + 1) for n1,n2 in zip(df.loc[df.citation==source, 'name'] , core_id_in_source)]

df['maxdepth'] = np.nan
for core in df.core_id.unique():
maxdepth = np.max((df.loc[df.core_id==core].depth_bot.values[-1],
df.loc[df.core_id==core].depth_mid.values[-1]))
df.loc[df.core_id==core,'maxdepth']=maxdepth
df = df.set_index('core_id')
# df = df.drop(columns=('level_0','index'))

site_list = pd.read_csv('data/FirnCover_location.csv')

df['site'] = ''
for core in df.index.get_level_values(0).unique():
lon_core = df.loc[core].longitude.values[0]
lat_core = df.loc[core].latitude.values[0]
dist = (site_list.longitude-lon_core)**2 + (site_list.latitude-lat_core)**2
if np.min(dist)*111*1000<200:
closest = site_list.loc[dist == np.min(dist), 'site'].values
else:
closest = []
if len(closest)>0:
df.loc[core,'site'] = closest[0]
print(core, np.floor(np.min(dist)*111*1000), df.loc[core,'site'].iloc[0])

tmp, ind = np.unique(df.name,return_index=True)
core_list = df.iloc[ind,:].reset_index()
core_list.to_csv('data/sumup_firncover.csv')

df['name'] = [site +' '+str(d.year) for site, d in zip(df['site'],df['date'])]
df = df.loc[df.site!='',:]

# %% Plotting

df_dens = pd.read_csv('data/sumup_greenland.csv')
fig, ax = plt.subplots(2,4,figsize=(9,6), sharex=True, sharey=True)
plt.subplots_adjust(left=0.08, right=0.95, top=0.95,
bottom = 0.1, wspace=0.15, hspace=0.2)
ax = ax.flatten()

cores = [0, 5, 8, 13, 22, 27, 37, 30]
# cores = core_list.loc[core_list.site == 'KAN_U','core_id']
for count, core in enumerate(cores):
ax[count].step(df.loc[core].density*1000, -df.loc[core].depth_mid, where='mid')
ax[count].set_title(df.loc[core].name.unique()[0])
ax[count].set_xlim(200,950)
ax[count].set_ylim(-15, 0)
ax[count].grid()
fig.text(0.5, 0.02, 'Density (kg m$^{-3}$)', ha='center', size = 12)
fig.text(0.02, 0.5, 'Depth (m)', va='center', rotation='vertical', size = 12)
plt.savefig('core_all.png')

df_dens = df_dens.loc[np.isin(df_dens.Citation, [45, 47])]
coreid = df_dens.coreid.unique()


fig, ax = plt.subplots(1,6)
for ind in coreid:
df = df_dens.loc[df_dens.coreid==ind,:]
break


0 comments on commit 08ee182

Please sign in to comment.