Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
No results found
Show changes
Showing
with 388 additions and 1184 deletions
This diff is collapsed.
You can't use the standard diagnostic options with the current cycling period.
Choose the "Custom" option, copy the file_def*.xml files that
you want to use into the Rose app and adjust the meaning period
accordingly.
<?xml version="1.0"?>
<simulation>
<simulation>
<!-- ============================================================================================ -->
<!-- XIOS context -->
......@@ -8,12 +8,12 @@
<context id="xios" >
<variable_definition>
<variable id="info_level" type="int">10</variable>
<variable id="using_server" type="bool">false</variable>
<variable id="using_oasis" type="bool">false</variable>
<variable id="oasis_codes_id" type="string" >oceanx</variable>
<variable id="info_level" type="int">-1</variable>
<variable id="using_server" type="bool">true</variable>
<variable id="using_oasis" type="bool">false</variable>
<variable id="oasis_codes_id" type="string" >oceanx</variable>
</variable_definition>
</context>
......
<?xml version="1.0"?>
<simulation>
<!-- ============================================================================================ -->
<!-- XIOS context -->
<!-- ============================================================================================ -->
<context id="xios" >
<variable_definition>
<variable id="info_level" type="int">-1</variable>
<variable id="using_server" type="bool">true</variable>
<variable id="using_oasis" type="bool">true</variable>
<variable id="oasis_codes_id" type="string" >toyoce</variable>
</variable_definition>
</context>
<!-- ============================================================================================ -->
<!-- NEMO CONTEXT add and suppress the components you need -->
<!-- ============================================================================================ -->
<context id="nemo" src="./context_nemo.xml"/> <!-- NEMO -->
</simulation>
../../SHARED/domain_def_nemo.xml
\ No newline at end of file
../../SHARED/field_def_nemo-ice.xml
\ No newline at end of file
../../SHARED/field_def_nemo-oce.xml
\ No newline at end of file
<?xml version="1.0"?>
<!--
============================================================================================================
= output files definition =
= Define your own filesfor ocean dynamics context =
= put the variables you want... =
============================================================================================================
-->
<file_definition type="one_file" name="@expname@_@freq@_@startdate@_@enddate@" sync_freq="5d" min_digits="4">
<file_group id="5d" output_freq="5d" output_level="10" enabled=".TRUE."> <!-- 5d files -->
<file id="file11" name_suffix="_grid_T" description="ocean T grid variables" >
<field field_ref="e3t" />
<field field_ref="toce" name="thetao" />
<field field_ref="soce" name="so" />
<field field_ref="sst" name="tos" />
<field field_ref="sss" name="sos" />
<field field_ref="ssh" name="zos" />
<field field_ref="mldkz5" />
<field field_ref="mldr10_1" />
<field field_ref="sbt" />
<field field_ref="heatc" name="heatc" />
<field field_ref="saltc" name="saltc" />
<field field_ref="empmr" name="wfo" />
<field field_ref="qsr_oce" name="qsr_oce" />
<field field_ref="qns_oce" name="qns_oce" />
<field field_ref="qt_oce" name="qt_oce" />
<field field_ref="saltflx" name="sfx" />
<field field_ref="taum" name="taum" />
<field field_ref="wspd" name="windsp" />
<field field_ref="precip" name="precip" />
<!-- ice and snow -->
<field field_ref="snowpre" />
</file>
<file id="file12" name_suffix="_grid_U" description="ocean U grid variables" >
<field field_ref="e3u" />
<field field_ref="ssu" name="uos" />
<field field_ref="uoce" name="uo" />
<field field_ref="utau" name="tauuo" />
</file>
<file id="file13" name_suffix="_grid_V" description="ocean V grid variables" >
<field field_ref="e3v" />
<field field_ref="ssv" name="vos" />
<field field_ref="voce" name="vo" />
<field field_ref="vtau" name="tauvo" />
</file>
<file id="file14" name_suffix="_grid_ABL" description="ABL grid variables" >
<field field_ref="u_abl" />
<field field_ref="v_abl" />
<field field_ref="t_abl" />
<field field_ref="q_abl" />
<field field_ref="uvz1_abl" />
<field field_ref="tz1_abl" />
<field field_ref="qz1_abl" />
<field field_ref="uvz1_dta" />
<field field_ref="tz1_dta" />
<field field_ref="qz1_dta" />
<field field_ref="pblh" />
</file>
</file_group>
</file_definition>
../../SHARED/grid_def_nemo.xml
\ No newline at end of file
This diff is collapsed.
!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
!! SI3 configuration namelist: Overwrites SHARED/namelist_ice_ref
!! 1 - Generic parameters (nampar)
!! 2 - Ice thickness discretization (namitd)
!! 3 - Ice dynamics (namdyn)
!! 4 - Ice ridging/rafting (namdyn_rdgrft)
!! 5 - Ice rheology (namdyn_rhg)
!! 6 - Ice advection (namdyn_adv)
!! 7 - Ice surface boundary conditions (namsbc)
!! 8 - Ice thermodynamics (namthd)
!! 9 - Ice heat diffusion (namthd_zdf)
!! 10 - Ice lateral melting (namthd_da)
!! 11 - Ice growth in open water (namthd_do)
!! 12 - Ice salinity (namthd_sal)
!! 13 - Ice melt ponds (namthd_pnd)
!! 14 - Ice initialization (namini)
!! 15 - Ice/snow albedos (namalb)
!! 16 - Ice diagnostics (namdia)
!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
!
!------------------------------------------------------------------------------
&nampar ! Generic parameters
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namitd ! Ice discretization
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namdyn ! Ice dynamics
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namdyn_rdgrft ! Ice ridging/rafting
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namdyn_rhg ! Ice rheology
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namdyn_adv ! Ice advection
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namsbc ! Ice surface boundary conditions
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namthd ! Ice thermodynamics
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namthd_zdf ! Ice heat diffusion
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namthd_da ! Ice lateral melting
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namthd_do ! Ice growth in open water
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namthd_sal ! Ice salinity
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namthd_pnd ! Melt ponds
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namini ! Ice initialization
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namalb ! albedo parameters
!------------------------------------------------------------------------------
/
!------------------------------------------------------------------------------
&namdia ! Diagnostics
!------------------------------------------------------------------------------
/
../../SHARED/namelist_ice_ref
\ No newline at end of file
../../SHARED/namelist_ref
\ No newline at end of file
bld::tool::fppkeys key_si3 key_xios key_qco
......@@ -113,8 +113,8 @@
sn_trcsbc(7) = 'dust.orca.new' , -1 , 'dustsi' , .true. , .true. , 'yearly' , '' , '' , ''
sn_trcsbc(14) = 'dust.orca.new' , -1 , 'dustfer' , .true. , .true. , 'yearly' , '' , '' , ''
sn_trcsbc(23) = 'ndeposition.orca', -12 , 'ndep' , .false. , .true. , 'yearly' , '' , '' , ''
rn_trsfac(5) = 7.9258065e-02 ! ( 0.021 / 31. * 117 )
rn_trsfac(7) = 3.1316726e-01 ! ( 8.8 / 28.1 )
rn_trsfac(5) = 3.774194e-02 ! ( 1E-3 / 31. * 117 )
rn_trsfac(7) = 9.572954e-03 ! ( 0.269 / 28.1 )
rn_trsfac(14) = 6.2667860e-04 ! ( 0.035 / 55.85 )
rn_trsfac(23) = 5.2232143e-01 ! ( From kgN m-2 s-1 to molC l-1 ====> zfact = 7.3125/14 )
rn_sbc_time = 1. ! Time scaling factor for SBC and CBC data (seconds in a day)
......
......@@ -113,8 +113,8 @@
sn_trcsbc(7) = 'dust.orca.new' , -1 , 'dustsi' , .true. , .true. , 'yearly' , '' , '' , ''
sn_trcsbc(14) = 'dust.orca.new' , -1 , 'dustfer' , .true. , .true. , 'yearly' , '' , '' , ''
sn_trcsbc(23) = 'ndeposition.orca', -12 , 'ndep' , .false. , .true. , 'yearly' , '' , '' , ''
rn_trsfac(5) = 7.9258065e-02 ! ( 0.021 / 31. * 117 )
rn_trsfac(7) = 3.1316726e-01 ! ( 8.8 / 28.1 )
rn_trsfac(5) = 3.774194e-02 ! ( 1E-3 / 31. * 117 )
rn_trsfac(7) = 9.572954e-03 ! ( 0.269 / 28.1 )
rn_trsfac(14) = 6.2667860e-04 ! ( 0.035 / 55.85 )
rn_trsfac(23) = 5.2232143e-01 ! ( From kgN m-2 s-1 to molC l-1 ====> zfact = 7.3125/14 )
rn_sbc_time = 1. ! Time scaling factor for SBC and CBC data (seconds in a day)
......
********************************
Run the Reference configurations
********************************
.. todo::
Lack of illustrations for ref. cfgs, and more generally in the guide.
NEMO is distributed with a set of reference configurations allowing both
the user to set up his own first applications and
the developer to test/validate his NEMO developments (using SETTE package).
.. contents::
:local:
:depth: 1
.. attention::
Concerning the configurations,
the NEMO System Team is only in charge of the so-called reference configurations described below.
.. hint::
Configurations developed by external research projects or initiatives that
make use of NEMO are welcome to be publicized through the website by
filling up the form :website:`to add an associated project<projects/add>`.
How to compile an experiment from a reference configuration
===========================================================
To compile the ORCA2_ICE_PISCES_ reference configuration using :file:`makenemo`,
one should use the following, by selecting among available architecture file or
providing a user defined one:
.. code-block:: console
$ ./makenemo -r 'ORCA2_ICE_PISCES' -m 'my_arch' -j '4'
A new ``EXP00`` folder will be created within the selected reference configurations,
namely ``./cfgs/ORCA2_ICE_PISCES/EXP00``.
It will be necessary to uncompress the archives listed in the above table for
the given reference configuration that includes input & forcing files.
Then it will be possible to launch the execution of the model through a runscript
(opportunely adapted to the user system).
List of Configurations
======================
All forcing files listed below in the table are available from |DOI data|_
=================== === === === === === ==================================
Configuration Component(s) Archives (input & forcing files)
------------------- ------------------- ----------------------------------
Name O S T P A
=================== === === === === === ==================================
AGRIF_DEMO_ X X X AGRIF_DEMO_v4.0.tar,
ORCA2_ICE_v4.0.tar
AMM12_ X AMM12_v4.0.tar
C1D_PAPA_ X INPUTS_C1D_PAPA_v4.0.tar
GYRE_BFM_ X X *none*
GYRE_PISCES_ X X X *none*
ORCA2_ICE_PISCES_ X X X X ORCA2_ICE_v4.0.tar,
INPUTS_PISCES_v4.0.tar
ORCA2_OFF_PISCES_ X X ORCA2_OFF_v4.0.tar,
INPUTS_PISCES_v4.0.tar
ORCA2_OFF_TRC_ X ORCA2_OFF_v4.0.tar
ORCA2_SAS_ICE_ X ORCA2_ICE_v4.0.tar,
INPUTS_SAS_v4.0.tar
SPITZ12_ X X SPITZ12_v4.0.tar
=================== === === === === === ==================================
.. admonition:: Legend for component combination
O for OCE, S for SI\ :sup:`3`, T for TOP, P for PISCES and A for AGRIF
AGRIF_DEMO
----------
``AGRIF_DEMO`` is based on the ``ORCA2_ICE_PISCES`` global configuration at 2° of resolution with
the inclusion of 3 online nested grids to demonstrate the overall capabilities of AGRIF in
a realistic context (including the nesting of sea ice models).
The configuration includes a 1:1 grid in the Pacific and two successively nested grids with
odd and even refinement ratios over the Arctic ocean,
with the finest grid spanning the whole Svalbard archipelago that is of
particular interest to test sea ice coupling.
.. image:: _static/AGRIF_DEMO_no_cap.jpg
:scale: 66%
:align: center
The 1:1 grid can be used alone as a benchmark to check that
the model solution is not corrupted by grid exchanges.
Note that since grids interact only at the baroclinic time level,
numerically exact results can not be achieved in the 1:1 case.
Perfect reproducibility is obtained only by switching to a fully explicit setup instead of
a split explicit free surface scheme.
AMM12
-----
``AMM12`` stands for *Atlantic Margin Model at 12 km* that is
a regional configuration covering the Northwest European Shelf domain on
a regular horizontal grid of ~12 km of resolution (see :cite:`ODEA2012`).
.. image:: _static/AMM_domain.png
:align: center
This configuration allows to tests several features of NEMO specifically addressed to the shelf seas.
In particular, ``AMM12`` accounts for vertical s-coordinates system, GLS turbulence scheme,
tidal lateral boundary conditions using a flather scheme (see more in ``BDY``).
Boundaries may be completely omitted by setting ``ln_bdy = .false.`` in ``nambdy``.
Sample surface fluxes, river forcing and an initial restart file are included to test a realistic model run
(``AMM12_v4.0.tar``).
Note that, the Baltic boundary is included within the river input file and is specified as a river source,
but unlike ordinary river points the Baltic inputs also include salinity and temperature data.
C1D_PAPA
--------
.. figure:: _static/Papa2015.jpg
:height: 225px
:align: left
``C1D_PAPA`` is a 1D configuration for the `PAPA station`_ located in
the northern-eastern Pacific Ocean at 50.1°N, 144.9°W.
See :gmd:`Reffray et al. (2015) <8/69/2015>` for the description of
its physical and numerical turbulent-mixing behaviour.
| The water column setup, called NEMO1D, is activated by
setting ``ln_c1d = .true.`` in ``namdom`` and
has a horizontal domain of 1x1 grid point.
| This reference configuration uses 75 vertical levels grid (1m at the surface),
GLS turbulence scheme with K-epsilon closure and the NCAR bulk formulae.
Data provided with ``INPUTS_C1D_PAPA_v4.2.tar`` file account for:
- :file:`forcing_PAPASTATION_1h_y201[0-1].nc`:
ECMWF operational analysis atmospheric forcing rescaled to 1h
(with long and short waves flux correction) for years 2010 and 2011
- :file:`init_PAPASTATION_m06d15.nc`: Initial Conditions from
observed data and Levitus 2009 climatology
- :file:`chlorophyll_PAPASTATION.nc`: surface chlorophyll file from Seawifs data
GYRE_BFM
--------
``GYRE_BFM`` shares the same physical setup of GYRE_PISCES_,
but NEMO is coupled with the `BFM`_ biogeochemical model as described in ``./cfgs/GYRE_BFM/README``.
GYRE_PISCES
-----------
``GYRE_PISCES`` is an idealized configuration representing a Northern hemisphere double gyres system,
in the Beta-plane approximation with a regular 1° horizontal resolution and 31 vertical levels,
with PISCES BGC model :cite:`gmd-8-2465-2015`.
Analytical forcing for heat, freshwater and wind-stress fields are applied.
This configuration acts also as demonstrator of the **user defined setup**
(``ln_read_cfg = .false.``) and grid setting are handled through
the ``&namusr_def`` controls in :file:`namelist_cfg`:
.. literalinclude:: ../../../cfgs/GYRE_PISCES/EXPREF/namelist_cfg
:language: fortran
:lines: 35-41
Note that, the default grid size is 30x20 grid points (with ``nn_GYRE = 1``) and
vertical levels are set by ``jpkglo``.
The specific code changes can be inspected in :file:`./src/OCE/USR`.
.. rubric:: Running GYRE as a benchmark
| This simple configuration can be used as a benchmark since it is easy to increase resolution,
with the drawback of getting results that have a very limited physical meaning.
| GYRE grid resolution can be increased at runtime by setting a different value of ``nn_GYRE``
(integer multiplier scaling factor), as described in the following table:
=========== ============ ============ ============ ===============
``nn_GYRE`` ``jpiglo`` ``jpjglo`` ``jpkglo`` Equivalent to
=========== ============ ============ ============ ===============
1 30 20 31 GYRE 1°
25 750 500 101 ORCA 1/2°
50 1500 1000 101 ORCA 1/4°
150 4500 3000 101 ORCA 1/12°
200 6000 4000 101 ORCA 1/16°
=========== ============ ============ ============ ===============
| Note that, it is necessary to set ``ln_bench = .true.`` in ``&namusr_def`` to
avoid problems in the physics computation and that
the model timestep should be adequately rescaled.
| For example if ``nn_GYRE = 150``, equivalent to an ORCA 1/12° grid,
the timestep ``rn_rdt`` should be set to 1200 seconds
Differently from previous versions of NEMO, the code uses by default the time-splitting scheme and
internally computes the number of sub-steps.
ORCA2_ICE_PISCES
----------------
``ORCA2_ICE_PISCES`` is a reference configuration for the global ocean with
a 2°x2° curvilinear horizontal mesh and 31 vertical levels,
distributed using z-coordinate system and with 10 levels in the top 100m.
ORCA is the generic name given to global ocean Mercator mesh,
(i.e. variation of meridian scale factor as cosinus of the latitude),
with two poles in the northern hemisphere so that
the ratio of anisotropy is nearly one everywhere
This configuration uses the three components
- |OCE|, the ocean dynamical core
- |ICE|, the thermodynamic-dynamic sea ice model.
- |MBG|, passive tracer transport module and PISCES BGC model :cite:`gmd-8-2465-2015`
All components share the same grid.
The model is forced with CORE-II normal year atmospheric forcing and
it uses the NCAR bulk formulae.
.. rubric:: Ocean Physics
:horizontal diffusion on momentum:
the eddy viscosity coefficient depends on the geographical position.
It is taken as 40000 m\ :sup:`2`/s, reduced in the equator regions (2000 m\ :sup:`2`/s)
excepted near the western boundaries.
:isopycnal diffusion on tracers:
the diffusion acts along the isopycnal surfaces (neutral surface) with
an eddy diffusivity coefficient of 2000 m\ :sup:`2`/s.
:Eddy induced velocity parametrization:
With a coefficient that depends on the growth rate of baroclinic instabilities
(it usually varies from 15 m\ :sup:`2`/s to 3000 m\ :sup:`2`/s).
:lateral boundary conditions:
Zero fluxes of heat and salt and no-slip conditions are applied through lateral solid boundaries.
:bottom boundary condition:
Zero fluxes of heat and salt are applied through the ocean bottom.
The Beckmann [19XX] simple bottom boundary layer parameterization is applied along
continental slopes.
A linear friction is applied on momentum.
:convection:
The vertical eddy viscosity and diffusivity coefficients are increased to 1 m\ :sup:`2`/s in
case of static instability.
:time step: is 5400sec (1h30') so that there is 16 time steps in one day.
ORCA2_OFF_PISCES
----------------
``ORCA2_OFF_PISCES`` shares the same general offline configuration of ``ORCA2_ICE_TRC``,
but only PISCES model is an active component of TOP.
ORCA2_OFF_TRC
-------------
| ``ORCA2_OFF_TRC`` is based on the ORCA2 global ocean configuration
(see ORCA2_ICE_PISCES_ for general description) along with
the tracer passive transport module (TOP),
but dynamical fields are pre-calculated and read with specific time frequency.
| This enables for an offline coupling of TOP components,
here specifically inorganic carbon compounds (CFC11, CFC12, SF6, C14) and water age module (age).
See :file:`namelist_top_cfg` to inspect the selection of
each component with the dedicated logical keys.
Pre-calculated dynamical fields are provided to NEMO using
the namelist ``&namdta_dyn`` in :file:`namelist_cfg`,
in this case with a 5 days frequency (120 hours):
.. literalinclude:: ../../namelists/namdta_dyn
:language: fortran
Input dynamical fields for this configuration (:file:`ORCA2_OFF_v4.0.tar`) comes from
a 2000 years long climatological simulation of ORCA2_ICE using ERA40 atmospheric forcing.
| Note that,
this configuration default uses linear free surface (``ln_linssh = .true.``) assuming that
model mesh is not varying in time and
it includes the bottom boundary layer parameterization (``ln_trabbl = .true.``) that
requires the provision of BBL coefficients through ``sn_ubl`` and ``sn_vbl`` fields.
| It is also possible to activate PISCES model (see ``ORCA2_OFF_PISCES``) or
a user defined set of tracers and source-sink terms with ``ln_my_trc = .true.``
(and adaptation of ``./src/TOP/MY_TRC`` routines).
In addition, the offline module (OFF) allows for the provision of further fields:
1. **River runoff** can be provided to TOP components by setting ``ln_dynrnf = .true.`` and
by including an input datastream similarly to the following:
.. code-block:: fortran
sn_rnf = 'dyna_grid_T', 120, 'sorunoff' , .true., .true., 'yearly', '', '', ''
2. **VVL dynamical fields**, in the case input data were produced by a dyamical core using
variable volume (``ln_linssh = .false.``)
it is necessary to provide also diverce and E-P at before timestep by
including input datastreams similarly to the following
.. code-block:: fortran
sn_div = 'dyna_grid_T', 120, 'e3t' , .true., .true., 'yearly', '', '', ''
sn_empb = 'dyna_grid_T', 120, 'sowaflupb', .true., .true., 'yearly', '', '', ''
More details can be found by inspecting the offline data manager in
the routine :file:`./src/OFF/dtadyn.F90`.
ORCA2_SAS_ICE
-------------
| ORCA2_SAS_ICE is a demonstrator of the Stand-Alone Surface (SAS) module and
it relies on ORCA2 global ocean configuration (see ORCA2_ICE_PISCES_ for general description).
| The standalone surface module allows surface elements such as sea-ice, iceberg drift, and
surface fluxes to be run using prescribed model state fields.
It can profitably be used to compare different bulk formulae or
adjust the parameters of a given bulk formula.
More informations about SAS can be found in :doc:`NEMO manual <cite>`.
SPITZ12
-------
``SPITZ12`` is a regional configuration around the Svalbard archipelago
at 1/12° of horizontal resolution and 75 vertical levels.
See :gmd:`Rousset et al. (2015) <8/2991/2015>` for more details.
This configuration references to year 2002,
with atmospheric forcing provided every 2 hours using NCAR bulk formulae,
while lateral boundary conditions for dynamical fields have 3 days time frequency.
.. rubric:: References
.. bibliography:: cfgs.bib
:all:
:style: unsrt
:labelprefix: C
Simple style rules for namelists
--------------------------------
NEMO reference namelists should adhere to the following simple style rules:
1. Comments outside a namelist block start with !! in column 1
2. Each namelist block starts with 3 lines of the form:
!-----------------------------------------------------------------------
&namblockname ! short description of block
!-----------------------------------------------------------------------
with all ! and & 's starting in column 1
3. The closing / for each namelist block is in column 1
4. Comments within namelist blocks never start with !- . Use ! followed
by space or != etc.
These conventions make it possible to construct empty configuration namelists.
For example, a namelist_cfg template can be produced from namelist_ref with
the following grep command; e.g.:
grep -E '^!-|^&|^/' namelist_ref > namelist_cfg.template
head namelist_cfg.template
!-----------------------------------------------------------------------
&namrun ! parameters of the run
!-----------------------------------------------------------------------
/
!-----------------------------------------------------------------------
&namcfg ! parameters of the configuration
!-----------------------------------------------------------------------
/
!-----------------------------------------------------------------------
&namdom ! time and space domain
!-----------------------------------------------------------------------
/
.
.
If all configuration namelists are produced and maintained using this
strategy then standard, side-by-side comaparators, such as vimdiff or xxdiff,
can be used to compare and transfer lines from the reference namelist to a
configuration namelist when setting up a new configuration.
Tips and tricks
---------------
1. The following bash function is useful when checking which namelist blocks
are in active use in a configuration namelist:
function list_used_nl(){ grep -n -E '^&|^/' "$1" | sed -e 's/:/ /' \
| awk ' BEGIN { x = 0 } \
{if ( NR % 2 == 0 && $1 - x > 2 ) printf("%3d %s\n", $1 - x , n) ; \
else x = $1; n = $2}' \
| sort -k 2;}
which (assuming the namelist adheres to the conventions) will list the number
of entries in each non-empty namelist block. The list is sorted on the block
name to ease comparisons. For example:
list_used_nl ORCA2_LIM3_PISCES/EXP00/namelist_cfg
1 &nambbc
5 &nambbl
30 &namberg
10 &namcfg
4 &namctl
3 &namdom
1 &namdrg
5 &namdyn_adv
1 &namdyn_hpg
22 &namdyn_ldf
1 &namdyn_spg
5 &namdyn_vor
3 &nameos
1 &namhsb
4 &namrun
1 &namsbc
1 &namsbc_blk
3 &namtra_adv
28 &namtra_ldf
10 &namtra_ldfeiv
25 &namzdf
3 &namzdf_iwm
2. vimdiff can give garish colours in some terminals. Usually this is because
vim assumes, incorrectly, that the terminal only supports 8 colours. Try forcing
256 colours with:
:set t_Co=256
to produce more pastel shades (add this to ~/.vimrc if successful).
3. Switching between vsplit panes in vim is a multi-key sequence. The tool is
much easier to use if the sequence is mapped to a spare key. Here I use the
§ and ± key on my Mac keyboard (add to ~/.vimrc):
map § ^Wl
map ± ^Wh
4. With easy switching between panes, constructing namelists in vimdiff just
requires the following commands in addition to normal editing:
]c - Go to next block of the diff
dp - Push version of the block under cursor into the other pane
do - Pull version of the block under cursor from the other pane
***********
Diagnostics
***********
.. todo::
.. contents::
:local:
Output of diagnostics in NEMO is usually done using XIOS.
This is an efficient way of writing diagnostics because
the time averaging, file writing and even some simple arithmetic or regridding is carried out in
parallel to the NEMO model run.
This page gives a basic introduction to using XIOS with NEMO.
Much more information is available from the :xios:`XIOS homepage<>` above and from the NEMO manual.
Use of XIOS for diagnostics is activated using the pre-compiler key ``key_xios``.
Extracting and installing XIOS
==============================
1. Install the NetCDF4 library.
If you want to use single file output you will need to compile the HDF & NetCDF libraries to
allow parallel IO.
2. Download the version of XIOS that you wish to use.
The recommended version is now XIOS 2.5:
.. code-block:: console
$ svn co http://forge.ipsl.jussieu.fr/ioserver/svn/XIOS/branchs/xios-2.5
and follow the instructions in :xios:`XIOS documentation <wiki/documentation>` to compile it.
If you find problems at this stage, support can be found by subscribing to
the :xios:`XIOS mailing list <../mailman/listinfo.cgi/xios-users>` and sending a mail message to it.
XIOS Configuration files
------------------------
XIOS is controlled using XML input files that should be copied to
your model run directory before running the model.
Examples of these files can be found in the reference configurations (:file:`./cfgs`).
The XIOS executable expects to find a file called :file:`iodef.xml` in the model run directory.
In NEMO we have made the decision to use include statements in the :file:`iodef.xml` file to include:
- :file:`field_def_nemo-oce.xml` (for physics),
- :file:`field_def_nemo-ice.xml` (for ice),
- :file:`field_def_nemo-pisces.xml` (for biogeochemistry) and
- :file:`domain_def.xml` from the :file:`./cfgs/SHARED` directory.
Most users will not need to modify :file:`domain_def.xml` or :file:`field_def_nemo-???.xml` unless
they want to add new diagnostics to the NEMO code.
The definition of the output files is organized into separate :file:`file_definition.xml` files which
are included in the :file:`iodef.xml` file.
Modes
=====
Detached Mode
-------------
In detached mode the XIOS executable is executed on separate cores from the NEMO model.
This is the recommended method for using XIOS for realistic model runs.
To use this mode set ``using_server`` to ``true`` at the bottom of the :file:`iodef.xml` file:
.. code-block:: xml
<variable id="using_server" type="boolean">true</variable>
Make sure there is a copy (or link to) your XIOS executable in the working directory and
in your job submission script allocate processors to XIOS.
Attached Mode
-------------
In attached mode XIOS runs on each of the cores used by NEMO.
This method is less efficient than the detached mode but can be more convenient for testing or
with small configurations.
To activate this mode simply set ``using_server`` to false in the :file:`iodef.xml` file
.. code-block:: xml
<variable id="using_server" type="boolean">false</variable>
and don't allocate any cores to XIOS.
.. note::
Due to the different domain decompositions between XIOS and NEMO,
if the total number of cores is larger than the number of grid points in the ``j`` direction then
the model run will fail.
Adding new diagnostics
======================
If you want to add a NEMO diagnostic to the NEMO code you will need to do the following:
1. Add any necessary code to calculate you new diagnostic in NEMO
2. Send the field to XIOS using ``CALL iom_put( 'field_id', variable )`` where
``field_id`` is a unique id for your new diagnostics and
variable is the fortran variable containing the data.
This should be called at every model timestep regardless of how often you want to output the field.
No time averaging should be done in the model code.
3. If it is computationally expensive to calculate your new diagnostic
you should also use "iom_use" to determine if it is requested in the current model run.
For example,
.. code-block:: fortran
IF iom_use('field_id') THEN
!Some expensive computation
!...
!...
iom_put('field_id', variable)
ENDIF
4. Add a variable definition to the :file:`field_def_nemo-???.xml` file.
5. Add the variable to the :file:`iodef.xml` or :file:`file_definition.xml` file.
......@@ -189,12 +189,14 @@
<domain id="grid_F_inner" long_name="grid F inner"/>
<!-- zonal mean grid -->
<domain_group id="gznl">
<domain id="gznl" long_name="gznl"/>
<domain id="ptr" domain_ref="gznl" >
<zoom_domain id="ptr" ibegin="0000" jbegin="0" ni="1" nj="0000" />
</domain>
</domain_group>
<domain id="gznl" long_name="gznl"/>
<domain id="ptr" domain_ref="gznl" >
<zoom_domain id="ptr" ibegin="0000" jbegin="0" ni="1" nj="0000" />
</domain>
<domain id="znl_T" domain_ref="gznl" > <zoom_domain id="znl_T"/> </domain>
<domain id="znl_W" domain_ref="gznl" > <zoom_domain id="znl_W"/> </domain>
<!-- other grids -->
......