ailia_llm  1.4.0.0
Classes | Public Member Functions | Static Public Attributes | List of all members
ailiaLLM.AiliaLLM Class Reference

Classes

class  AILIAChatMessage
 
class  AILIALLMMediaData
 Media data structure for multimodal processing. More...
 
class  AILIALLMMultimodalChatMessage
 Multimodal chat message with media attachments. More...
 

Public Member Functions

static int ailiaLLMCreate (ref IntPtr llm)
 Creates a LLM instance. More...
 
static int ailiaLLMOpenModelFile (IntPtr llm, string path, uint n_ctx)
 Open model file. More...
 
static int ailiaLLMSetSamplingParams (IntPtr llm, uint top_k, float top_p, float temp, uint dist)
 Set the sampling parameter. More...
 
static int ailiaLLMSetPrompt (IntPtr llm, IntPtr messages, uint messages_len)
 Set the prompt. More...
 
static int ailiaLLMGenerate (IntPtr llm, ref uint done)
 Perform generate. More...
 
static int ailiaLLMGetDeltaTextSize (IntPtr llm, ref uint len)
 Gets the size of text. (Include null) More...
 
static int ailiaLLMGetDeltaText (IntPtr llm, IntPtr text, uint len)
 Gets the decoded text. More...
 
static int ailiaLLMGetTokenCount (IntPtr llm, ref uint cnt, IntPtr text)
 Gets the count of token. More...
 
static int ailiaLLMGetPromptTokenCount (IntPtr llm, ref uint cnt)
 Gets the count of prompt token. More...
 
static int ailiaLLMGetGeneratedTokenCount (IntPtr llm, ref uint cnt)
 Gets the count of prompt token. More...
 
static int ailiaLLMGetContextSize (IntPtr llm, ref uint context_size)
 Gets the size of context. More...
 
static int ailiaLLMOpenMultimodalProjectorFile (IntPtr llm, string mmproj_path)
 Load multimodal projector file. More...
 
static int ailiaLLMGetMultimodalCapabilities (IntPtr llm, ref uint vision_support, ref uint audio_support)
 Check if multimodal features are supported. More...
 
static int ailiaLLMSetMultimodalPrompt (IntPtr llm, IntPtr messages, uint messages_len)
 Set multimodal prompt. More...
 
static int ailiaLLMGetBackendCount (ref uint env_count)
 Gets the number of available computational environments (CPU, GPU). More...
 
static int ailiaLLMGetBackendName (ref IntPtr env, uint env_idx)
 Gets the list of computational environments. More...
 
static void ailiaLLMDestroy (IntPtr llm)
 It destroys the LLM instance. More...
 

Static Public Attributes

const String LIBRARY_NAME ="ailia_llm"
 
const int AILIA_LLM_STATUS_SUCCESS = (0)
 
const int AILIA_LLM_STATUS_INVALID_ARGUMENT = (-1)
 
const int AILIA_LLM_STATUS_ERROR_FILE_API = (-2)
 
const int AILIA_LLM_STATUS_INVALID_VERSION = (-3)
 
const int AILIA_LLM_STATUS_BROKEN = (-4)
 
const int AILIA_LLM_STATUS_MEMORY_INSUFFICIENT = (-5)
 
const int AILIA_LLM_STATUS_THREAD_ERROR = (-6)
 
const int AILIA_LLM_STATUS_INVALID_STATE = (-7)
 
const int AILIA_LLM_STATUS_CONTEXT_FULL = (-8)
 
const int AILIA_LLM_STATUS_ERROR_BUFFER_API = (-9)
 
const int AILIA_LLM_STATUS_UNIMPLEMENTED = (-15)
 
const int AILIA_LLM_STATUS_OTHER_ERROR = (-128)
 

Member Function Documentation

◆ ailiaLLMCreate()

static int ailiaLLM.AiliaLLM.ailiaLLMCreate ( ref IntPtr  llm)

Creates a LLM instance.

Parameters
llmA pointer to the LLM instance pointer
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

Creates a LLM instance.

◆ ailiaLLMDestroy()

static void ailiaLLM.AiliaLLM.ailiaLLMDestroy ( IntPtr  llm)

It destroys the LLM instance.

Parameters
llmA LLM instance pointer

◆ ailiaLLMGenerate()

static int ailiaLLM.AiliaLLM.ailiaLLMGenerate ( IntPtr  llm,
ref uint  done 
)

Perform generate.

Parameters
llmA LLM instance pointer
doneGeneration complete?
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

The decoded result is obtained through the ailiaLLMGetDeltaText API. Each call to ailiaLLMGenerate decodes one token at a time. The value of done is 0 or 1. If done is 1, the generation is complete.

◆ ailiaLLMGetBackendCount()

static int ailiaLLM.AiliaLLM.ailiaLLMGetBackendCount ( ref uint  env_count)

Gets the number of available computational environments (CPU, GPU).

Parameters
env_countThe storage location of the number of computational environment information
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

◆ ailiaLLMGetBackendName()

static int ailiaLLM.AiliaLLM.ailiaLLMGetBackendName ( ref IntPtr  env,
uint  env_idx 
)

Gets the list of computational environments.

Parameters
envThe storage location of the computational environment information (valid until the AILIANetwork instance is destroyed)
env_idxThe index of the computational environment information (0 to ailiaLLMGetBackendCount() -1)
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

◆ ailiaLLMGetContextSize()

static int ailiaLLM.AiliaLLM.ailiaLLMGetContextSize ( IntPtr  llm,
ref uint  context_size 
)

Gets the size of context.

Parameters
llmA LLM instance pointer
context_sizeThe length of context
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

◆ ailiaLLMGetDeltaText()

static int ailiaLLM.AiliaLLM.ailiaLLMGetDeltaText ( IntPtr  llm,
IntPtr  text,
uint  len 
)

Gets the decoded text.

Parameters
llmA LLM instance pointer
textText(UTF8)
lenBuffer size
Returns
If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.

If ailiaLLMGenerate() is not run at all, the function returns AILIA_STATUS_INVALID_STATE .

◆ ailiaLLMGetDeltaTextSize()

static int ailiaLLM.AiliaLLM.ailiaLLMGetDeltaTextSize ( IntPtr  llm,
ref uint  len 
)

Gets the size of text. (Include null)

Parameters
llmA LLM instance pointer
lenThe length of text
Returns
If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.

◆ ailiaLLMGetGeneratedTokenCount()

static int ailiaLLM.AiliaLLM.ailiaLLMGetGeneratedTokenCount ( IntPtr  llm,
ref uint  cnt 
)

Gets the count of prompt token.

Parameters
llmA LLM instance pointer
cntThe count of generated token
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

It can be called after calling ailiaLLMGenerate.

◆ ailiaLLMGetMultimodalCapabilities()

static int ailiaLLM.AiliaLLM.ailiaLLMGetMultimodalCapabilities ( IntPtr  llm,
ref uint  vision_support,
ref uint  audio_support 
)

Check if multimodal features are supported.

Parameters
llmA LLM instance pointer
vision_supportWhether image processing is supported
audio_supportWhether audio processing is supported
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

Can be called after ailiaLLMOpenMultimodalProjectorFile.

◆ ailiaLLMGetPromptTokenCount()

static int ailiaLLM.AiliaLLM.ailiaLLMGetPromptTokenCount ( IntPtr  llm,
ref uint  cnt 
)

Gets the count of prompt token.

Parameters
llmA LLM instance pointer
cntThe count of prompt token
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

It can be called after calling ailiaLLMSetPrompt.

◆ ailiaLLMGetTokenCount()

static int ailiaLLM.AiliaLLM.ailiaLLMGetTokenCount ( IntPtr  llm,
ref uint  cnt,
IntPtr  text 
)

Gets the count of token.

Parameters
llmA LLM instance pointer
cntThe count of token
textText(UTF8)
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

◆ ailiaLLMOpenModelFile()

static int ailiaLLM.AiliaLLM.ailiaLLMOpenModelFile ( IntPtr  llm,
string  path,
uint  n_ctx 
)

Open model file.

Parameters
llmA pointer to the LLM instance pointer
pathPath for GGUF
n_ctxContext length for model (0 is model default)
Returns
If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.

Open a model file for GGUF.

◆ ailiaLLMOpenMultimodalProjectorFile()

static int ailiaLLM.AiliaLLM.ailiaLLMOpenMultimodalProjectorFile ( IntPtr  llm,
string  mmproj_path 
)

Load multimodal projector file.

Parameters
llmA LLM instance pointer
mmproj_pathPath to the MMPROJ file (GGUF format)
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

To use multimodal features, you must first load the text model with ailiaLLMOpenModelFile, then load the multimodal projector with this function.

◆ ailiaLLMSetMultimodalPrompt()

static int ailiaLLM.AiliaLLM.ailiaLLMSetMultimodalPrompt ( IntPtr  llm,
IntPtr  messages,
uint  messages_len 
)

Set multimodal prompt.

Parameters
llmA LLM instance pointer
messageArray of multimodal messages
message_cntNumber of messages
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

Set multimodal prompt. Include <media> placeholders in message content, and set corresponding media data in media_data. Example: "Describe this image: <__media__>"

◆ ailiaLLMSetPrompt()

static int ailiaLLM.AiliaLLM.ailiaLLMSetPrompt ( IntPtr  llm,
IntPtr  messages,
uint  messages_len 
)

Set the prompt.

Parameters
llmA pointer to the LLM instance pointer
messageArray of messages
message_cntNumber of messages
Returns
If this function is successful, it returns AILIA_STATUS_SUCCESS , or an error code otherwise.

Set the prompt to query the LLM. Please include ChatHistory in the message as well.

◆ ailiaLLMSetSamplingParams()

static int ailiaLLM.AiliaLLM.ailiaLLMSetSamplingParams ( IntPtr  llm,
uint  top_k,
float  top_p,
float  temp,
uint  dist 
)

Set the sampling parameter.

Parameters
llmA pointer to the LLM instance pointer
top_kSampling probability value's top number, default 40
top_pSampling probability value range, default 0.9 (0.9 to 1.0)
tempTemperature parameter, default 0.4
distSeed, default 1234
Returns
If this function is successful, it returns AILIA_LLM_STATUS_SUCCESS , or an error code otherwise.

Set LLM sampling parameters. Must be run before ailiaLLMSetPrompt.

Member Data Documentation

◆ AILIA_LLM_STATUS_BROKEN

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_BROKEN = (-4)
static

◆ AILIA_LLM_STATUS_CONTEXT_FULL

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_CONTEXT_FULL = (-8)
static

◆ AILIA_LLM_STATUS_ERROR_BUFFER_API

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_ERROR_BUFFER_API = (-9)
static

◆ AILIA_LLM_STATUS_ERROR_FILE_API

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_ERROR_FILE_API = (-2)
static

◆ AILIA_LLM_STATUS_INVALID_ARGUMENT

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_INVALID_ARGUMENT = (-1)
static

◆ AILIA_LLM_STATUS_INVALID_STATE

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_INVALID_STATE = (-7)
static

◆ AILIA_LLM_STATUS_INVALID_VERSION

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_INVALID_VERSION = (-3)
static

◆ AILIA_LLM_STATUS_MEMORY_INSUFFICIENT

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_MEMORY_INSUFFICIENT = (-5)
static

◆ AILIA_LLM_STATUS_OTHER_ERROR

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_OTHER_ERROR = (-128)
static

◆ AILIA_LLM_STATUS_SUCCESS

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_SUCCESS = (0)
static

◆ AILIA_LLM_STATUS_THREAD_ERROR

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_THREAD_ERROR = (-6)
static

◆ AILIA_LLM_STATUS_UNIMPLEMENTED

const int ailiaLLM.AiliaLLM.AILIA_LLM_STATUS_UNIMPLEMENTED = (-15)
static

◆ LIBRARY_NAME

const String ailiaLLM.AiliaLLM.LIBRARY_NAME ="ailia_llm"
static

The documentation for this class was generated from the following file: