From 46012866a49491d9bc4251d9a2b425d4e3b86c45 Mon Sep 17 00:00:00 2001 From: OlivierDehaene <23298448+OlivierDehaene@users.noreply.github.com> Date: Fri, 21 Jun 2024 12:09:09 +0200 Subject: [PATCH] feat(candle): better cuda error --- backends/candle/src/lib.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/backends/candle/src/lib.rs b/backends/candle/src/lib.rs index 241504d0..11c9731e 100644 --- a/backends/candle/src/lib.rs +++ b/backends/candle/src/lib.rs @@ -18,6 +18,7 @@ use crate::models::{ use crate::models::{ FlashBertModel, FlashDistilBertModel, FlashJinaBertModel, FlashNomicBertModel, }; +use anyhow::Context; use candle::{DType, Device}; use candle_nn::VarBuilder; use models::BertConfig; @@ -55,9 +56,11 @@ impl CandleBackend { ) -> Result { // Load config let config: String = std::fs::read_to_string(model_path.join("config.json")) - .map_err(|err| BackendError::Start(err.to_string()))?; + .context("Unable to read config file") + .map_err(|err| BackendError::Start(format!("{err:?}")))?; let config: Config = serde_json::from_str(&config) - .map_err(|err| BackendError::Start(format!("Model is not supported: {}", err)))?; + .context("Model is not supported") + .map_err(|err| BackendError::Start(format!("{err:?}")))?; // Get candle device let device = if candle::utils::cuda_is_available() { @@ -72,7 +75,7 @@ impl CandleBackend { ))) } Err(err) => { - tracing::warn!("Could not find a compatible CUDA device on host: {err}"); + tracing::warn!("Could not find a compatible CUDA device on host: {err:?}"); tracing::warn!("Using CPU instead"); Ok(Device::Cpu) }