Skip to content

API reference

Here you find the code reference for the two main components users of PyStemmusScope might need. First is the StemmusScope model class. Second is the save module, with which you can save the STEMMUS_SCOPE model output to a netCDF file.

PyStemmusScope:

PyStemmusScope wrapper around Stemmus_Scope model.

PyStemmusScope wrapper around Stemmus_Scope model.

For a detailed model description, look at this publication.

Configures the model and prepares forcing and soil data for the model run.

Parameters:

Name Type Description Default
config_file Union[str, Path]

Path to Stemmus_Scope configuration file. An example config_file can be found in tests/test_data in STEMMUS_SCOPE_Processing repository.

required
model_src_path Union[str, Path]

Path to Stemmus_Scope executable file or to a directory containing model source codes.

required
interpreter optional

Use Matlab or Octave. Only required if model_src_path is a path to model source codes.

None
Example

See notebooks/run_model_in_notebook.ipynb at the STEMMUS_SCOPE_Processing repository

Source code in PyStemmusScope/stemmus_scope.py
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
def __init__(
    self,
    config_file: Union[str, Path],
    model_src_path: Union[str, Path],
    interpreter: Optional[str] = None,
):
    """PyStemmusScope wrapper around Stemmus_Scope model.

    For a detailed model description, look at
    [this publication](https://gmd.copernicus.org/articles/14/1379/2021/).

    Configures the model and prepares forcing and soil data for the model run.

    Arguments:
        config_file: Path to Stemmus_Scope configuration file. An example
            config_file can be found in tests/test_data in [STEMMUS_SCOPE_Processing
            repository](https://github.com/EcoExtreML/STEMMUS_SCOPE_Processing).
        model_src_path: Path to Stemmus_Scope executable file or to a
            directory containing model source codes.
        interpreter (optional): Use `Matlab` or `Octave`. Only required if
            `model_src_path` is a path to model source codes.

    Example:
        See notebooks/run_model_in_notebook.ipynb at the [STEMMUS_SCOPE_Processing
        repository](https://github.com/EcoExtreML/STEMMUS_SCOPE_Processing)
    """
    # make sure paths are abolute and path objects
    config_path = utils.to_absolute_path(config_file)
    model_src = utils.to_absolute_path(model_src_path)

    # check the path to model source
    self.exe_file = None
    if _is_model_src_exe(model_src):
        self.exe_file = model_src
    else:
        _check_interpreter(interpreter)

    self.model_src = model_src
    self.interpreter = interpreter

    # read config template
    self._config = config_io.read_config(config_path)

config: dict property

Return the configurations for this model.

run()

Run model using executable.

Returns:

Type Description
str

The model log.

Source code in PyStemmusScope/stemmus_scope.py
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
def run(self) -> str:
    """Run model using executable.

    Returns:
        The model log.
    """
    if self.exe_file:
        # run using MCR
        args = [f"{self.exe_file} {self.cfg_file}"]
        # set matlab log dir
        os.environ["MATLAB_LOG_DIR"] = str(self._config["InputPath"])
        result = _run_sub_process(args, None)
    if self.interpreter == "Matlab":
        # set Matlab arguments
        path_to_config = f"'{self.cfg_file}'"
        eval_code = f"STEMMUS_SCOPE_exe({path_to_config});exit;"
        args = ["matlab", "-r", eval_code, "-nodisplay", "-nosplash", "-nodesktop"]

        # seperate args dont work on linux!
        result = _run_sub_process(
            args if utils.os_name() == "nt" else shlex.join(args), self.model_src
        )
    if self.interpreter == "Octave":
        # set Octave arguments
        # use subprocess instead of oct2py,
        # see issue STEMMUS_SCOPE_Processing/issues/46
        path_to_config = f"'{self.cfg_file}'"
        # fix for windows
        path_to_config = path_to_config.replace("\\", "/")
        eval_code = f"STEMMUS_SCOPE_exe({path_to_config});exit;"
        args = ["octave", "--eval", eval_code, "--no-gui", "--silent"]

        # seperate args dont work on linux!
        result = _run_sub_process(
            args if utils.os_name() == "nt" else shlex.join(args), self.model_src
        )
    return result

setup(WorkDir=None, Location=None, StartTime=None, EndTime=None)

Configure the model run.

  1. Creates config file and input/output directories based on the config template
  2. Prepare forcing and soil data

Parameters:

Name Type Description Default
WorkDir Optional[str]

path to a directory where input/output directories should be created.

None
Location Optional[str]

Location of the model run. Can be a site ("FI-Hyy") or lat/lon, e.g., "(52.0, 4.05)".

None
ForcingFileName

forcing file name. Forcing file should be in netcdf format.

required
StartTime Optional[str]

Start time of the model run. It must be in ISO format (e.g. 2007-01-01T00:00).

None
EndTime Optional[str]

End time of the model run. It must be in ISO format (e.g. 2007-01-01T00:00).

None

Returns:

Type Description
str

Path to the config file

Source code in PyStemmusScope/stemmus_scope.py
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
def setup(
    self,
    WorkDir: Optional[str] = None,
    Location: Optional[str] = None,
    StartTime: Optional[str] = None,
    EndTime: Optional[str] = None,
) -> str:
    """Configure the model run.

    1. Creates config file and input/output directories based on the config template
    2. Prepare forcing and soil data

    Args:
        WorkDir: path to a directory where input/output directories should be
            created.
        Location: Location of the model run. Can be a site ("FI-Hyy") or lat/lon,
            e.g., "(52.0, 4.05)".
        ForcingFileName: forcing file name. Forcing file should be in netcdf format.
        StartTime: Start time of the model run. It must be in
            ISO format (e.g. 2007-01-01T00:00).
        EndTime: End time of the model run. It must be in ISO format
            (e.g. 2007-01-01T00:00).

    Returns:
        Path to the config file
    """
    # update config template if needed
    if WorkDir:
        self._config["WorkDir"] = WorkDir

    if Location:
        self._config["Location"] = Location

    if StartTime:
        self._config["StartTime"] = StartTime

    if EndTime:
        self._config["EndTime"] = EndTime

    # validate config *before* directory creation
    config_io.validate_config(self._config)

    # create customized config file and input/output directories for model run
    _, _, self.cfg_file = config_io.create_io_dir(self._config)

    self._config = config_io.read_config(self.cfg_file)

    forcing_io.prepare_forcing(self._config)
    soil_io.prepare_soil_data(self._config)
    soil_io.prepare_soil_init(self._config)

    return str(self.cfg_file)

PyStemmusScope.save:

PyStemmusScope.save

PyStemmusScope save module.

Module designed to create a netcdf file following the ALMA convention from csv files following the SCOPE format in the output directory.

The file required_netcf_variables.csv lists required variable names and their attributes based on the ALMA+CF convention table.

Note

See notebooks/run_model_in_notebook.ipynb in the STEMMUS_SCOPE_Processing repository.

to_netcdf(config_file, cf_filename)

Save csv files generated by STEMMUS_SCOPE to a ALMA compliant netCDF file.

Parameters:

Name Type Description Default
config_file str

Path to the config file.

required
cf_filename str

Path to a csv file for ALMA conventions.

required

Returns:

Type Description
str

Path to a csv file under the output directory.

Source code in PyStemmusScope/save.py
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
def to_netcdf(config_file: str, cf_filename: str) -> str:
    """Save csv files generated by STEMMUS_SCOPE to a ALMA compliant netCDF file.

    Args:
        config_file: Path to the config file.
        cf_filename: Path to a csv file for ALMA conventions.

    Returns:
        Path to a csv file under the output directory.
    """
    config = config_io.read_config(Path(config_file))
    loc, fmt = utils.check_location_fmt(config["Location"])

    # list of required forcing variables, Alma_short_name: forcing_io_name, # model_name
    var_names = {
        "RH": "rh",  # RH
        "SWdown_ec": "sw_down",  # Rin
        "LWdown_ec": "lw_down",  # Rli
        "Qair": "Qair",
        "Tair": "t_air_celcius",  # Ta
        "Psurf": "psurf_hpa",  # P
        "Wind": "wind_speed",  # u
        "Precip": "precip_conv",  # Pre
    }

    if fmt == "site":
        # read forcing file into a dict
        forcing_dict = forcing_io.read_forcing_data_plumber2(
            utils.get_forcing_file(config),
            config["StartTime"],
            config["EndTime"],
        )
    elif fmt == "latlon":
        forcing_dict = forcing_io.read_forcing_data_global(
            Path(config["ForcingPath"]),
            lat=loc[0],  # type: ignore
            lon=loc[1],  # type: ignore
            start_time=np.datetime64(config["StartTime"]),
            end_time=np.datetime64(config["EndTime"]),
        )

    # get time info
    time = forcing_dict["time"]

    # read convention file
    conventions = pd.read_csv(cf_filename)

    alma_short_names = conventions["short_name_alma"]
    data_list = []
    for alma_name in alma_short_names:
        df = conventions.loc[alma_short_names == alma_name].iloc[0]
        file_name = Path(config["OutputPath"]) / df["file_name_STEMMUS-SCOPE"]

        if alma_name in var_names:
            # select data
            data_array = _select_forcing_variables(
                forcing_dict, var_names[alma_name], alma_name
            )

        # create data array
        elif alma_name in {"SoilTemp", "SoilMoist"}:
            data_array = _prepare_soil_data(file_name, alma_name, time)
        else:
            data_array = _prepare_simulated_data(
                file_name, df["short_name_STEMMUS-SCOPE"], alma_name, time
            )

        # update attributes of array
        data_array.attrs = {
            "units": df["unit"],
            "long_name": df["long_name"],
            "standard_name": df["standard_name"],
            "STEMMUS-SCOPE_name": df["short_name_STEMMUS-SCOPE"],
            "definition": df["definition"],
        }

        # add to list
        data_list.append(data_array)

    # merge to a dataset
    dataset = xr.merge(data_list)

    # update dimensions
    dataset = _update_dataset_attrs_dims(dataset, forcing_dict)

    # for writing to netcdf, time attrs should be added
    # time attrs should be the same as plumber 2 forcing data
    # otherwise it cannot be uploaded to modelevaluation portal
    start_time = time.dt.strftime("%Y-%m-%d").values[0]
    time_encode = {
        "time": {"units": f"seconds since {start_time}", "calendar": "standard"}
    }
    # save to nc file
    nc_filename = (
        Path(config["OutputPath"])
        / f"{Path(config['OutputPath']).stem}_STEMMUS_SCOPE.nc"
    )
    dataset.to_netcdf(path=nc_filename, encoding=time_encode)

    return str(nc_filename)