diff --git a/crates/llama-cpp-bindings/src/engine.cc b/crates/llama-cpp-bindings/src/engine.cc index a314a6a..5b48090 100644 --- a/crates/llama-cpp-bindings/src/engine.cc +++ b/crates/llama-cpp-bindings/src/engine.cc @@ -246,9 +246,14 @@ class TextInferenceEngineImpl : public TextInferenceEngine { } if (request.multibyte_pending == 0) { - rust::String generated_text = is_eos ? "" : request.generated_text; - result.push_back({request.id, generated_text}); + rust::String generated_text; + try { + generated_text = is_eos ? "" : request.generated_text; + } catch (const std::invalid_argument& e) { + fprintf(stderr, "%s:%d [%s] - ignoring non utf-8/utf-16 output\n", __FILE__, __LINE__, __func__); + } + result.push_back({request.id, generated_text}); request.generated_text.clear(); } } diff --git a/crates/llama-cpp-bindings/src/lib.rs b/crates/llama-cpp-bindings/src/lib.rs index 5bd33d1..b2b2a32 100644 --- a/crates/llama-cpp-bindings/src/lib.rs +++ b/crates/llama-cpp-bindings/src/lib.rs @@ -76,7 +76,9 @@ impl AsyncTextInferenceEngine { let result = match engine.as_mut().unwrap().step() { Ok(result) => result, - Err(err) => panic!("Failed to step: {}", err), + Err(err) => { + panic!("Failed to step: {}", err) + } }; for ffi::StepOutput { request_id, text } in result {