library(tidyverse)
library(neon4cast)
library(lubridate)
library(rMR)
library(arrow)
Appendix A — Example Forecast Workflow
Here is an example of a complete workflow for generating a forecast submission to the Challenge. The example is for the aquatics theme and it forecasts water temperature and oxygen. The water temperature forecast uses a linear regression between air temperature and water temperature to predict water temperature in the future. It then uses the prediction of water temperature to predict oxygen water oxygen concentration by assuming that the oxygen is 100% saturated given the predicted temperature.
To generate the forecast we need to:
- Build a relationship between past air temperature and past water temperature.
- Apply the relationships from #1 to forecasted air temperature.
- Calculate oxygen concentration from the forecasted water temperature.
Therefore we need to:
- Download the historical water temperature data for the NEON sites (called “targets”).
- Download historical air temperature data for the NEON sites (we the stacked NOAA GEFS weather).
- Download NOAA weather forecast for the NEON sites.
- Create linear regression model based on historical data for each NEON site.
- Apply linear regression to using weather forecasts for each NEON.
- Write forecast output file.
- Submit forecast to Challenge.
Each of these steps are below
A.1 Step 0: Set up R environment and directories
We will be downloading NOAA forecasts from the Challenge s3 bucket and submitting to the s3 bucket. Therefore the AWS information is needed.
Define the date that the forecast starts. For demonstration purposes, we are setting the date to 2022-02-15
. In a real-time application, use forecast_date <- Sys.Date()
or forecast_date <- Sys.Date() - lubridate::days(1)
(in some cases the NOAA data the current day is not available by the time you run your forecast).
<- lubridate::as_date("2022-02-15") forecast_date
A.2 Step 0: Define team name
<- "neon4cast-example" model_id
A.3 Download latest target data and site description data
These targets are updated when new data is available from NEON.
<- readr::read_csv("https://data.ecoforecast.org/neon4cast-targets/aquatics/aquatics-targets.csv.gz", guess_max = 1e6) target
A table is available with NEON site descriptions. The calculation of oxygen saturation requires the elevation of each site, which is included in the site description table.
<- readr::read_csv("https://raw.githubusercontent.com/eco4cast/neon4cast-targets/main/NEON_Field_Site_Metadata_20220412.csv") |>
site_data filter(aquatics == 1)
A.4 Download past NOAA forecast stacked together
To build the relations between air and water temperature, we need historical air temperature data to associate with historical water temperature data. Here we use a product that the Challenge organizers created that combines day 1 NOAA weather forecasts (i.e., when the forecasts are most accurate) together to generate an estimate of past weather. He we download this “stack” NOAA product for the set of NEON sites in the targets file.
<- neon4cast::noaa_stage3() df_past
Load a helper function that averages over predicted 0h horizon ensembles to get ‘historic values’ for each site
<- function(df_past, site, var) {
noaa_mean_historical |>
df_past ::filter(site_id == site,
dplyr== var) |>
variable ::rename(ensemble = parameter) |>
dplyr::select(datetime, prediction, ensemble) |>
dplyr::mutate(date = as_date(datetime)) |>
dplyr::group_by(date) |>
dplyr::summarize(air_temperature = mean(prediction, na.rm = TRUE),
dplyr.groups = "drop") |>
::rename(datetime = date) |>
dplyr::mutate(air_temperature = air_temperature - 273.15) |>
dplyr::collect()
dplyr }
A.5 Download NOAA future forecast
We need NOAA Weather forecasts of the future. Fortunately, the Challenge organizers are downloading and subsetting the weather forecasts for each NEON site. Here we download the weather forecast (start_date = forecast_date
) that started at mid-night UTC (cycle=0
) for the set of sites in the target file.
<- function(site, var, reference_date) {
noaa_mean_forecast = "data.ecoforecast.org"
endpoint <- glue::glue("neon4cast-drivers/noaa/gefs-v12/stage1/0/{reference_date}")
bucket <- arrow::s3_bucket(bucket, endpoint_override = endpoint, anonymous = TRUE)
s3
# stage1 air temp is Celsius
::open_dataset(s3) |>
arrow::filter(site_id == site,
dplyr>= lubridate::as_datetime(forecast_date),
datetime == var) |>
variable ::select(datetime, prediction, parameter) |>
dplyr::mutate(datetime = as_date(datetime)) |>
dplyr::group_by(datetime, parameter) |>
dplyr::summarize(air_temperature = mean(prediction), .groups = "drop") |>
dplyr::select(datetime, air_temperature, parameter) |>
dplyr::rename(ensemble = parameter) |>
dplyr::collect()
dplyr
}
We’ll skip any site that doesn’t have both temperature and oxygen
<- target |> na.omit() |> distinct(site_id, variable) |>
sites filter(variable %in% c("oxygen", "temperature")) |>
count(site_id) |> filter(n==2) |> pull(site_id)
A.6 Define the forecasts model for a site
<- function(site) {
forecast_site message(paste0("Running site: ", site))
# Get site information for elevation
<- site_data |> dplyr::filter(field_site_id == site)
site_info
# historical temperatures
<- noaa_mean_historical(df_past, site, "air_temperature")
noaa_past_mean
# Merge in past NOAA data into the targets file, matching by date.
<- target |>
site_target ::select(datetime, site_id, variable, observation) |>
dplyr::filter(variable %in% c("temperature", "oxygen"),
dplyr== site) |>
site_id ::pivot_wider(names_from = "variable", values_from = "observation") |>
tidyr::left_join(noaa_past_mean, by = c("datetime"))
dplyr
rm(noaa_past_mean) # save RAM
# Fit linear model based o # n past data: water temperature = m * air temperature + b
<- lm(temperature ~ air_temperature, data = site_target)
fit
# Get 30-day predicted temperature ensemble at the site
<- noaa_mean_forecast(site, "TMP", noaa_date)
noaa_future
# use the linear model (predict.lm) to forecast water temperature for each ensemble member
<-
temperature |>
noaa_future mutate(site_id = site,
prediction = predict(fit, tibble(air_temperature)),
variable = "temperature")
# use forecasted water temperature to predict oxygen by assuming that oxygen is saturated.
<-
forecasted_oxygen ::Eq.Ox.conc(temperature$prediction,
rMRelevation.m = site_info$field_mean_elevation_m,
bar.press = NULL,
bar.units = NULL,
out.DO.meas = "mg/L",
salinity = 0,
salinity.units = "pp.thou")
# stick bits together
<-
oxygen |>
noaa_future mutate(site_id = site,
prediction = forecasted_oxygen,
variable = "oxygen")
<- dplyr::bind_rows(temperature, oxygen)
forecast
# Format results to EFI standard
<- forecast |>
forecast mutate(reference_datetime = forecast_date,
family = "ensemble",
model_id = model_id) |>
rename(parameter = ensemble) |>
select(model_id, datetime, reference_datetime,
site_id, family, parameter, variable, prediction) }
AND HERE WE GO! We’re ready to start forecasting
Test with a single site first!
<- forecast_site( sites[1] ) forecast
Visualize the ensemble predictions – what do you think?
|>
forecast ggplot(aes(x = datetime, y = prediction, group = parameter)) +
geom_line(alpha=0.3) +
facet_wrap(~variable, scales = "free")
Run all sites – may be slow!
<- map_dfr(sites, forecast_site) forecast
Forecast output file name in standards requires for Challenge. csv.gz means that it will be compressed
<- Sys.Date() #forecast$reference_datetime[1]
file_date <- paste0("aquatics","-",file_date,"-",model_id,".csv.gz") forecast_file
Write csv to disk
write_csv(forecast, forecast_file)
A.7 Submit forecast!
::submit(forecast_file = forecast_file, metadata = NULL, ask = FALSE)` neon4cast
You can check on the status of your submission using
::check_submission(forecast_file) neon4cast
On following day after submission, you can see the forecast on the dashboard at shiny.ecoforecast.org
A.8 Complete registration and metadata
Complete the registration and model overview that defines your model
A.9 Example on github
The example code above can be found on GitHub as a template repository. See the Readme for more information about using the template