|
ailia_llm
1.4.0.0
|
Classes | |
| class | AILIAChatMessage |
| class | AILIALLMMediaData |
| Media data structure for multimodal processing. More... | |
| class | AILIALLMMultimodalChatMessage |
| Multimodal chat message with media attachments. More... | |
Public Member Functions | |
| static int | ailiaLLMCreate (ref IntPtr llm) |
| Creates a LLM instance. More... | |
| static int | ailiaLLMOpenModelFile (IntPtr llm, string path, uint n_ctx) |
| Open model file. More... | |
| static int | ailiaLLMSetSamplingParams (IntPtr llm, uint top_k, float top_p, float temp, uint dist) |
| Set the sampling parameter. More... | |
| static int | ailiaLLMSetPrompt (IntPtr llm, IntPtr messages, uint messages_len) |
| Set the prompt. More... | |
| static int | ailiaLLMGenerate (IntPtr llm, ref uint done) |
| Perform generate. More... | |
| static int | ailiaLLMGetDeltaTextSize (IntPtr llm, ref uint len) |
| Gets the size of text. (Include null) More... | |
| static int | ailiaLLMGetDeltaText (IntPtr llm, IntPtr text, uint len) |
| Gets the decoded text. More... | |
| static int | ailiaLLMGetTokenCount (IntPtr llm, ref uint cnt, IntPtr text) |
| Gets the count of token. More... | |
| static int | ailiaLLMGetPromptTokenCount (IntPtr llm, ref uint cnt) |
| Gets the count of prompt token. More... | |
| static int | ailiaLLMGetGeneratedTokenCount (IntPtr llm, ref uint cnt) |
| Gets the count of prompt token. More... | |
| static int | ailiaLLMGetContextSize (IntPtr llm, ref uint context_size) |
| Gets the size of context. More... | |
| static int | ailiaLLMOpenMultimodalProjectorFile (IntPtr llm, string mmproj_path) |
| Load multimodal projector file. More... | |
| static int | ailiaLLMGetMultimodalCapabilities (IntPtr llm, ref uint vision_support, ref uint audio_support) |
| Check if multimodal features are supported. More... | |
| static int | ailiaLLMSetMultimodalPrompt (IntPtr llm, IntPtr messages, uint messages_len) |
| Set multimodal prompt. More... | |
| static int | ailiaLLMGetBackendCount (ref uint env_count) |
| Gets the number of available computational environments (CPU, GPU). More... | |
| static int | ailiaLLMGetBackendName (ref IntPtr env, uint env_idx) |
| Gets the list of computational environments. More... | |
| static void | ailiaLLMDestroy (IntPtr llm) |
| It destroys the LLM instance. More... | |
Static Public Attributes | |
| const String | LIBRARY_NAME ="ailia_llm" |
| const int | AILIA_LLM_STATUS_SUCCESS = (0) |
| const int | AILIA_LLM_STATUS_INVALID_ARGUMENT = (-1) |
| const int | AILIA_LLM_STATUS_ERROR_FILE_API = (-2) |
| const int | AILIA_LLM_STATUS_INVALID_VERSION = (-3) |
| const int | AILIA_LLM_STATUS_BROKEN = (-4) |
| const int | AILIA_LLM_STATUS_MEMORY_INSUFFICIENT = (-5) |
| const int | AILIA_LLM_STATUS_THREAD_ERROR = (-6) |
| const int | AILIA_LLM_STATUS_INVALID_STATE = (-7) |
| const int | AILIA_LLM_STATUS_CONTEXT_FULL = (-8) |
| const int | AILIA_LLM_STATUS_ERROR_BUFFER_API = (-9) |
| const int | AILIA_LLM_STATUS_UNIMPLEMENTED = (-15) |
| const int | AILIA_LLM_STATUS_OTHER_ERROR = (-128) |
| static int ailiaLLM.AiliaLLM.ailiaLLMCreate | ( | ref IntPtr | llm | ) |
Creates a LLM instance.
| llm | A pointer to the LLM instance pointer |
Creates a LLM instance.
| static void ailiaLLM.AiliaLLM.ailiaLLMDestroy | ( | IntPtr | llm | ) |
It destroys the LLM instance.
| llm | A LLM instance pointer |
| static int ailiaLLM.AiliaLLM.ailiaLLMGenerate | ( | IntPtr | llm, |
| ref uint | done | ||
| ) |
Perform generate.
| llm | A LLM instance pointer |
| done | Generation complete? |
The decoded result is obtained through the ailiaLLMGetDeltaText API. Each call to ailiaLLMGenerate decodes one token at a time. The value of done is 0 or 1. If done is 1, the generation is complete.
| static int ailiaLLM.AiliaLLM.ailiaLLMGetBackendCount | ( | ref uint | env_count | ) |
Gets the number of available computational environments (CPU, GPU).
| env_count | The storage location of the number of computational environment information |
| static int ailiaLLM.AiliaLLM.ailiaLLMGetBackendName | ( | ref IntPtr | env, |
| uint | env_idx | ||
| ) |
Gets the list of computational environments.
| env | The storage location of the computational environment information (valid until the AILIANetwork instance is destroyed) |
| env_idx | The index of the computational environment information (0 to ailiaLLMGetBackendCount() -1) |
| static int ailiaLLM.AiliaLLM.ailiaLLMGetContextSize | ( | IntPtr | llm, |
| ref uint | context_size | ||
| ) |
Gets the size of context.
| llm | A LLM instance pointer |
| context_size | The length of context |
| static int ailiaLLM.AiliaLLM.ailiaLLMGetDeltaText | ( | IntPtr | llm, |
| IntPtr | text, | ||
| uint | len | ||
| ) |
Gets the decoded text.
| llm | A LLM instance pointer |
| text | Text(UTF8) |
| len | Buffer size |
If ailiaLLMGenerate() is not run at all, the function returns AILIA_STATUS_INVALID_STATE .
| static int ailiaLLM.AiliaLLM.ailiaLLMGetDeltaTextSize | ( | IntPtr | llm, |
| ref uint | len | ||
| ) |
Gets the size of text. (Include null)
| llm | A LLM instance pointer |
| len | The length of text |
| static int ailiaLLM.AiliaLLM.ailiaLLMGetGeneratedTokenCount | ( | IntPtr | llm, |
| ref uint | cnt | ||
| ) |
Gets the count of prompt token.
| llm | A LLM instance pointer |
| cnt | The count of generated token |
It can be called after calling ailiaLLMGenerate.
| static int ailiaLLM.AiliaLLM.ailiaLLMGetMultimodalCapabilities | ( | IntPtr | llm, |
| ref uint | vision_support, | ||
| ref uint | audio_support | ||
| ) |
Check if multimodal features are supported.
| llm | A LLM instance pointer |
| vision_support | Whether image processing is supported |
| audio_support | Whether audio processing is supported |
Can be called after ailiaLLMOpenMultimodalProjectorFile.
| static int ailiaLLM.AiliaLLM.ailiaLLMGetPromptTokenCount | ( | IntPtr | llm, |
| ref uint | cnt | ||
| ) |
Gets the count of prompt token.
| llm | A LLM instance pointer |
| cnt | The count of prompt token |
It can be called after calling ailiaLLMSetPrompt.
| static int ailiaLLM.AiliaLLM.ailiaLLMGetTokenCount | ( | IntPtr | llm, |
| ref uint | cnt, | ||
| IntPtr | text | ||
| ) |
Gets the count of token.
| llm | A LLM instance pointer |
| cnt | The count of token |
| text | Text(UTF8) |
| static int ailiaLLM.AiliaLLM.ailiaLLMOpenModelFile | ( | IntPtr | llm, |
| string | path, | ||
| uint | n_ctx | ||
| ) |
Open model file.
| llm | A pointer to the LLM instance pointer |
| path | Path for GGUF |
| n_ctx | Context length for model (0 is model default) |
Open a model file for GGUF.
| static int ailiaLLM.AiliaLLM.ailiaLLMOpenMultimodalProjectorFile | ( | IntPtr | llm, |
| string | mmproj_path | ||
| ) |
Load multimodal projector file.
| llm | A LLM instance pointer |
| mmproj_path | Path to the MMPROJ file (GGUF format) |
To use multimodal features, you must first load the text model with ailiaLLMOpenModelFile, then load the multimodal projector with this function.
| static int ailiaLLM.AiliaLLM.ailiaLLMSetMultimodalPrompt | ( | IntPtr | llm, |
| IntPtr | messages, | ||
| uint | messages_len | ||
| ) |
Set multimodal prompt.
| llm | A LLM instance pointer |
| message | Array of multimodal messages |
| message_cnt | Number of messages |
Set multimodal prompt. Include <media> placeholders in message content, and set corresponding media data in media_data. Example: "Describe this image: <__media__>"
| static int ailiaLLM.AiliaLLM.ailiaLLMSetPrompt | ( | IntPtr | llm, |
| IntPtr | messages, | ||
| uint | messages_len | ||
| ) |
Set the prompt.
| llm | A pointer to the LLM instance pointer |
| message | Array of messages |
| message_cnt | Number of messages |
Set the prompt to query the LLM. Please include ChatHistory in the message as well.
| static int ailiaLLM.AiliaLLM.ailiaLLMSetSamplingParams | ( | IntPtr | llm, |
| uint | top_k, | ||
| float | top_p, | ||
| float | temp, | ||
| uint | dist | ||
| ) |
Set the sampling parameter.
| llm | A pointer to the LLM instance pointer |
| top_k | Sampling probability value's top number, default 40 |
| top_p | Sampling probability value range, default 0.9 (0.9 to 1.0) |
| temp | Temperature parameter, default 0.4 |
| dist | Seed, default 1234 |
Set LLM sampling parameters. Must be run before ailiaLLMSetPrompt.
|
static |
|
static |
|
static |
|
static |
|
static |
|
static |
|
static |
|
static |
|
static |
|
static |
|
static |
|
static |
|
static |