saveModel <- function()

in modelServR/R/saveModel.R [147:237]


saveModel <- function(model, modelStoreDir =  Sys.getenv('MODEL_STORE', '/home/rstudio/seattle_flu/test_model_store'), storeRDS = TRUE) {
  basicConfig()
  setLevel("FINEST")
  ts <- Sys.time()
  attr(ts, "tzone") <- 'UTC'
  ts <- paste0(as.character(ts), 'Z')
  
  # we always dump to our directory. We then use the python upload script to post
  # trained models to production
  modelDBfilename <- paste(modelStoreDir, '/', 'modelDB.tsv', sep = '')

  #ensure our model store directory exists
  dir.create(modelStoreDir, showWarnings = FALSE)

  # If we have a latent_field type, use that as base model and write out that csv
  if (model$modelDefinition$type == 'latent_field') {
    modelQuery <- getModelQueryObjectFromModel(model, latent = TRUE)
    modelId <- getModelIdFromQuery(modelQuery)
    name <- getHumanReadableModelIdFromModel(model, latent = TRUE)
    
    filename <-modelId
    rdsFilename <- if (storeRDS){ paste(modelStoreDir, '/', filename, '.RDS', sep = '')} else {''}
    
    newRow <- data.frame(
      filename = filename,
      name = name,
      queryJSON = as.character(jsonlite::toJSON(modelQuery)),
      type = 'inla_latent',
      created = ts,
      rds = rdsFilename
    )
    newRow$latent <- TRUE
    
    loginfo("Saving latent model")

    write.csv(
      model$latentField,
      paste(modelStoreDir, '/', filename, '.csv', sep = ''),
      row.names = FALSE,
      quote = FALSE
    )

    # write to our model db file
    write.table(
      newRow, file = modelDBfilename, sep = '\t', row.names = FALSE, col.names = !file.exists(modelDBfilename),
      quote = FALSE, append = file.exists(modelDBfilename)
    )
  }
  
  # all models output smooth
  loginfo("Saving smooth model")
  modelQuery <- getModelQueryObjectFromModel(model, latent = FALSE)
  modelId <- getModelIdFromQuery(modelQuery)
  name <- getHumanReadableModelIdFromModel(model, latent = FALSE)
  
  filename <-modelId
  
  # We store rds with smooth model name and we then can load that from either latent or smooth later id needed
  # This is only used when Store RDS is True
  if (storeRDS & !exists('rdsFilename')){ rdsFilename <- paste(modelStoreDir, '/', filename, '.RDS', sep = '')}
  
  newRow <- data.frame(
    filename = filename,
    name = name,
    queryJSON = as.character(jsonlite::toJSON(modelQuery)),
    type = 'inla_observed',
    created = ts,
    rds = rdsFilename,
    latent = FALSE
  )
  
  loginfo("Saving observed model")
  
  write.csv(
    model$modeledData,
    paste(modelStoreDir, '/', filename, '.csv', sep = ''),
    row.names = FALSE,
    quote = FALSE
  )
  write.table(
    newRow, file = modelDBfilename, sep = '\t', row.names = FALSE, col.names = !file.exists(modelDBfilename),
    quote = FALSE, append = file.exists(modelDBfilename)
  )
  
  if (storeRDS) {
    loginfo("Saving RDS")
    outfile <- xzfile(rdsFilename, 'wb', compress=9, encoding = 'utf8')
    saveRDS(model,file = outfile)
    close(outfile)
  }
}