2023-09-03 01:59:07 +00:00
|
|
|
#pragma once
|
|
|
|
|
|
|
|
|
|
#include "rust/cxx.h"
|
|
|
|
|
#include <memory>
|
|
|
|
|
|
|
|
|
|
namespace llama {
|
2023-10-31 22:16:09 +00:00
|
|
|
struct StepOutput;
|
2023-09-03 01:59:07 +00:00
|
|
|
|
|
|
|
|
class TextInferenceEngine {
|
|
|
|
|
public:
|
|
|
|
|
virtual ~TextInferenceEngine();
|
|
|
|
|
|
2023-10-31 22:16:09 +00:00
|
|
|
virtual void add_request(uint32_t request_id, rust::Str text, size_t max_input_length) = 0;
|
2023-10-29 06:37:05 +00:00
|
|
|
virtual void stop_request(uint32_t request_id) = 0;
|
2023-10-31 22:16:09 +00:00
|
|
|
virtual rust::Vec<StepOutput> step() = 0;
|
2023-09-03 01:59:07 +00:00
|
|
|
};
|
|
|
|
|
|
2023-10-25 22:40:11 +00:00
|
|
|
std::unique_ptr<TextInferenceEngine> create_engine(bool use_gpu, rust::Str model_path);
|
2023-09-03 01:59:07 +00:00
|
|
|
} // namespace
|