File tree Expand file tree Collapse file tree 2 files changed +9
-0
lines changed Expand file tree Collapse file tree 2 files changed +9
-0
lines changed Original file line number Diff line number Diff line change @@ -15937,6 +15937,14 @@ bool llama_supports_mlock(void) {
1593715937 return llama_mlock::SUPPORTED;
1593815938}
1593915939
15940+ bool llama_supports_rpc(void) {
15941+ #if defined(GGML_USE_RPC)
15942+ return true;
15943+ #else
15944+ return false;
15945+ #endif
15946+ }
15947+
1594015948bool llama_supports_gpu_offload(void) {
1594115949#if defined(GGML_USE_CUDA) || defined(GGML_USE_METAL) || defined(GGML_USE_VULKAN) || \
1594215950 defined(GGML_USE_SYCL) || defined(GGML_USE_KOMPUTE) || defined(GGML_USE_RPC)
Original file line number Diff line number Diff line change @@ -430,6 +430,7 @@ extern "C" {
430430
431431 LLAMA_API bool llama_supports_mmap (void );
432432 LLAMA_API bool llama_supports_mlock (void );
433+ LLAMA_API bool llama_supports_rpc (void );
433434 LLAMA_API bool llama_supports_gpu_offload (void );
434435
435436 LLAMA_API const struct llama_model * llama_get_model (const struct llama_context * ctx);
You can’t perform that action at this time.
0 commit comments