ailia_llm  1.4.0.0
Public Member Functions | Protected Member Functions | List of all members
ailiaLLM.AiliaLLMModel Class Reference
Inheritance diagram for ailiaLLM.AiliaLLMModel:
Inheritance graph
[legend]
Collaboration diagram for ailiaLLM.AiliaLLMModel:
Collaboration graph
[legend]

Public Member Functions

bool Create ()
 Create a instance. More...
 
bool Open (string model_path, uint n_ctx=0)
 Open a model. More...
 
bool OpenMultimodalProjector (string mmproj_path)
 Open a multimodal projector file. More...
 
bool GetMultimodalCapabilities (ref bool vision_support, ref bool audio_support)
 Check if multimodal features are supported. More...
 
virtual void Close ()
 Destroys instance. More...
 
virtual void Dispose ()
 Release resources. More...
 
bool SetSamplingParam (uint top_k, float top_p, float temp, uint dist)
 Set the sampling parameter. More...
 
bool SetPrompt (List< AiliaLLMChatMessage > messages)
 Set prompt messages. More...
 
bool SetMultimodalPrompt (List< AiliaLLMMultimodalChatMessage > messages)
 Set multimodal prompt messages. More...
 
bool Generate (ref bool done)
 Perform encode. More...
 
string GetDeltaText ()
 Set prompt messages. More...
 
bool ContextFull ()
 Check if the context length limit has been reached. More...
 
uint PromptTokenCount ()
 Gets the number of prompt tokens. More...
 
uint GeneratedTokenCount ()
 Gets the number of tokens generated. More...
 

Protected Member Functions

virtual void Dispose (bool disposing)
 

Member Function Documentation

◆ Close()

virtual void ailiaLLM.AiliaLLMModel.Close ( )
inlinevirtual

Destroys instance.

Destroys and initializes the instance.

184  {
185  if (net != IntPtr.Zero){
186  AiliaLLM.ailiaLLMDestroy(net);
187  net = IntPtr.Zero;
188  }
189  }

◆ ContextFull()

bool ailiaLLM.AiliaLLMModel.ContextFull ( )
inline

Check if the context length limit has been reached.

Returns
True if the limit is reached, false otherwise.
587  {
588  return context_full;
589  }

◆ Create()

bool ailiaLLM.AiliaLLMModel.Create ( )
inline

Create a instance.

Returns
If this function is successful, it returns true , or false otherwise.
56  {
57  if (net != IntPtr.Zero){
58  Close();
59  }
60 
61  int status = AiliaLLM.ailiaLLMCreate(ref net);
62  if (status != 0){
63  if (logging)
64  {
65  Debug.Log("ailiaLLMCreate failed " + status);
66  }
67  return false;
68  }
69 
70  return true;
71  }
virtual void Close()
Destroys instance.
Definition: AiliaLLMModel.cs:183

◆ Dispose() [1/2]

virtual void ailiaLLM.AiliaLLMModel.Dispose ( )
inlinevirtual

Release resources.

199  {
200  Dispose(true);
201  }
virtual void Dispose()
Release resources.
Definition: AiliaLLMModel.cs:198

◆ Dispose() [2/2]

virtual void ailiaLLM.AiliaLLMModel.Dispose ( bool  disposing)
inlineprotectedvirtual
204  {
205  if (disposing){
206  // release managed resource
207  }
208  Close(); // release unmanaged resource
209  }

◆ Generate()

bool ailiaLLM.AiliaLLMModel.Generate ( ref bool  done)
inline

Perform encode.

Parameters
doneIs done generation
Returns
If this function is successful, it returns array of tokens , or empty array otherwise.
504  {
505  uint done_uint = 0;
506  int status = AiliaLLM.ailiaLLMGenerate(net, ref done_uint);
507  context_full = false;
508  done = (done_uint == 1);
509  if (status != 0){
510  if (logging)
511  {
512  Debug.Log("ailiaLLMGenerate failed " + status);
513  }
514  if (status == AiliaLLM.AILIA_LLM_STATUS_CONTEXT_FULL){
515  context_full = true;
516  }
517  return false;
518  }
519  return true;
520  }

◆ GeneratedTokenCount()

uint ailiaLLM.AiliaLLMModel.GeneratedTokenCount ( )
inline

Gets the number of tokens generated.

Returns
Number of tokens generated. 0 if failed.
629  {
630  if (net == IntPtr.Zero){
631  return 0;
632  }
633  uint count = 0;
634  int status = AiliaLLM.ailiaLLMGetGeneratedTokenCount(net, ref count);
635  if (status != 0){
636  if (logging)
637  {
638  Debug.Log("ailiaLLMGetGeneratedTokenCount failed " + status);
639  }
640  return 0;
641  }
642  return count;
643  }

◆ GetDeltaText()

string ailiaLLM.AiliaLLMModel.GetDeltaText ( )
inline

Set prompt messages.

Returns
It returns text.
534  {
535  uint len = 0;
536  int status = AiliaLLM.ailiaLLMGetDeltaTextSize(net, ref len);
537  if (status != 0){
538  if (logging)
539  {
540  Debug.Log("ailiaLLMGetDeltaTextSize failed " + status);
541  }
542  return "";
543  }
544  byte[] text = new byte [len];
545  GCHandle handle = GCHandle.Alloc(text, GCHandleType.Pinned);
546  IntPtr output = handle.AddrOfPinnedObject();
547  status = AiliaLLM.ailiaLLMGetDeltaText(net, output, len);
548  handle.Free();
549  if (status != 0){
550  if (logging)
551  {
552  Debug.Log("ailiaLLMGetDeltaText failed " + status);
553  }
554  return "";
555  }
556 
557  byte[] new_buf = new byte [buf.Length + len - 1];
558  for (int i = 0; i < buf.Length; i++){
559  new_buf[i] = buf[i];
560  }
561  for (int i = 0; i < len - 1; i++){ // NULLの削除
562  new_buf[buf.Length + i] = text[i];
563  }
564  buf = new_buf;
565 
566  string decoded_text = System.Text.Encoding.UTF8.GetString(buf); // Unicode Decode Errorは発生しない
567  string delta_text = "";
568  if (decoded_text.Length > before_text.Length){
569  delta_text = decoded_text.Substring(before_text.Length);
570  }
571  before_text = decoded_text;
572  return delta_text;
573  }

◆ GetMultimodalCapabilities()

bool ailiaLLM.AiliaLLMModel.GetMultimodalCapabilities ( ref bool  vision_support,
ref bool  audio_support 
)
inline

Check if multimodal features are supported.

Parameters
vision_supportWhether image processing is supported
audio_supportWhether audio processing is supported
Returns
If this function is successful, it returns true , or false otherwise.
152  {
153  uint vision_uint = 0;
154  uint audio_uint = 0;
155  int status = AiliaLLM.ailiaLLMGetMultimodalCapabilities(net, ref vision_uint, ref audio_uint);
156  if (status != 0){
157  if (logging)
158  {
159  Debug.Log("ailiaLLMGetMultimodalCapabilities failed " + status);
160  }
161  return false;
162  }
163  vision_support = (vision_uint == 1);
164  audio_support = (audio_uint == 1);
165  return true;
166  }

◆ Open()

bool ailiaLLM.AiliaLLMModel.Open ( string  model_path,
uint  n_ctx = 0 
)
inline

Open a model.

Parameters
model_pathPath for model
n_ctxContext length for model (0 is model default)
Returns
If this function is successful, it returns true , or false otherwise.
88  {
89  if (net == IntPtr.Zero){
90  return false;
91  }
92 
93  int status = 0;
94 
95  status = AiliaLLM.ailiaLLMOpenModelFile(net, model_path, n_ctx);
96  if (status != 0){
97  if (logging)
98  {
99  Debug.Log("ailiaLLMOpenModelFile failed " + status);
100  }
101  return false;
102  }
103 
104  return true;
105  }

◆ OpenMultimodalProjector()

bool ailiaLLM.AiliaLLMModel.OpenMultimodalProjector ( string  mmproj_path)
inline

Open a multimodal projector file.

Parameters
mmproj_pathPath for MMPROJ file
Returns
If this function is successful, it returns true , or false otherwise.
120  {
121  if (net == IntPtr.Zero){
122  return false;
123  }
124 
125  int status = AiliaLLM.ailiaLLMOpenMultimodalProjectorFile(net, mmproj_path);
126  if (status != 0){
127  if (logging)
128  {
129  Debug.Log("ailiaLLMOpenMultimodalProjectorFile failed " + status);
130  }
131  return false;
132  }
133 
134  return true;
135  }

◆ PromptTokenCount()

uint ailiaLLM.AiliaLLMModel.PromptTokenCount ( )
inline

Gets the number of prompt tokens.

Returns
Number of prompt tokens. 0 if failed.
602  {
603  if (net == IntPtr.Zero){
604  return 0;
605  }
606  uint count = 0;
607  int status = AiliaLLM.ailiaLLMGetPromptTokenCount(net, ref count);
608  if (status != 0){
609  if (logging)
610  {
611  Debug.Log("ailiaLLMGetPromptTokenCount failed " + status);
612  }
613  return 0;
614  }
615  return count;
616  }

◆ SetMultimodalPrompt()

bool ailiaLLM.AiliaLLMModel.SetMultimodalPrompt ( List< AiliaLLMMultimodalChatMessage messages)
inline

Set multimodal prompt messages.

Parameters
messagesMultimodal prompt messages
Returns
If this function is successful, it returns true , or false otherwise.
345  {
346  List<GCHandle> handle_list = new List<GCHandle>();
347  int len = messages.Count;
348  byte[][] role_text_list = new byte [len][];
349  byte[][] content_text_list = new byte [len][];
350  AiliaLLM.AILIALLMMultimodalChatMessage [] message_list = new AiliaLLM.AILIALLMMultimodalChatMessage[len];
351 
352  // Prepare media data arrays
353  List<byte[][]> media_type_lists = new List<byte[][]>();
354  List<byte[][]> media_path_lists = new List<byte[][]>();
355  List<AiliaLLM.AILIALLMMediaData[]> media_data_arrays = new List<AiliaLLM.AILIALLMMediaData[]>();
356 
357  for (int i = 0; i < len; i++){
358  AiliaLLM.AILIALLMMultimodalChatMessage message = new AiliaLLM.AILIALLMMultimodalChatMessage();
359 
360  // Set role and content
361  role_text_list[i] = System.Text.Encoding.UTF8.GetBytes(messages[i].role+"\u0000");
362  GCHandle role_handle = GCHandle.Alloc(role_text_list[i], GCHandleType.Pinned);
363  IntPtr role_input = role_handle.AddrOfPinnedObject();
364 
365  content_text_list[i] = System.Text.Encoding.UTF8.GetBytes(messages[i].content+"\u0000");
366  GCHandle content_handle = GCHandle.Alloc(content_text_list[i], GCHandleType.Pinned);
367  IntPtr content_input = content_handle.AddrOfPinnedObject();
368 
369  message.role = role_input;
370  message.content = content_input;
371 
372  handle_list.Add(role_handle);
373  handle_list.Add(content_handle);
374 
375  // Handle media data
376  if (messages[i].media_data != null && messages[i].media_data.Count > 0){
377  int media_count = messages[i].media_data.Count;
378  message.media_count = (uint)media_count;
379 
380  byte[][] media_type_list = new byte[media_count][];
381  byte[][] media_path_list = new byte[media_count][];
382  AiliaLLM.AILIALLMMediaData[] media_array = new AiliaLLM.AILIALLMMediaData[media_count];
383 
384  for (int j = 0; j < media_count; j++){
385  AiliaLLM.AILIALLMMediaData media = new AiliaLLM.AILIALLMMediaData();
386 
387  // Media type
388  media_type_list[j] = System.Text.Encoding.UTF8.GetBytes(messages[i].media_data[j].media_type+"\u0000");
389  GCHandle type_handle = GCHandle.Alloc(media_type_list[j], GCHandleType.Pinned);
390  media.media_type = type_handle.AddrOfPinnedObject();
391  handle_list.Add(type_handle);
392 
393  // File path
394  if (!string.IsNullOrEmpty(messages[i].media_data[j].file_path)){
395  media_path_list[j] = System.Text.Encoding.UTF8.GetBytes(messages[i].media_data[j].file_path+"\u0000");
396  GCHandle path_handle = GCHandle.Alloc(media_path_list[j], GCHandleType.Pinned);
397  media.file_path = path_handle.AddrOfPinnedObject();
398  handle_list.Add(path_handle);
399  } else {
400  media.file_path = IntPtr.Zero;
401  }
402 
403  // Raw data
404  if (messages[i].media_data[j].data != null && messages[i].media_data[j].data.Length > 0){
405  GCHandle data_handle = GCHandle.Alloc(messages[i].media_data[j].data, GCHandleType.Pinned);
406  media.data = data_handle.AddrOfPinnedObject();
407  media.data_size = (uint)messages[i].media_data[j].data.Length;
408  handle_list.Add(data_handle);
409  } else {
410  media.data = IntPtr.Zero;
411  media.data_size = 0;
412  }
413 
414  media.width = messages[i].media_data[j].width;
415  media.height = messages[i].media_data[j].height;
416 
417  media_array[j] = media;
418  }
419 
420  media_type_lists.Add(media_type_list);
421  media_path_lists.Add(media_path_list);
422  media_data_arrays.Add(media_array);
423 
424  // Allocate and set media_data pointer
425  int media_size = Marshal.SizeOf(typeof(AiliaLLM.AILIALLMMediaData)) * media_count;
426  IntPtr media_ptr = Marshal.AllocHGlobal(media_size);
427 
428  for (int j = 0; j < media_count; j++){
429  IntPtr offset = new IntPtr(media_ptr.ToInt64() + j * Marshal.SizeOf(typeof(AiliaLLM.AILIALLMMediaData)));
430  Marshal.StructureToPtr(media_array[j], offset, false);
431  }
432 
433  message.media_data = media_ptr;
434  } else {
435  message.media_count = 0;
436  message.media_data = IntPtr.Zero;
437  }
438 
439  message_list[i] = message;
440  }
441 
442  int size = Marshal.SizeOf(typeof(AiliaLLM.AILIALLMMultimodalChatMessage)) * message_list.Length;
443  IntPtr ptr = Marshal.AllocHGlobal(size);
444 
445  int status = 0;
446 
447  try
448  {
449  for (int i = 0; i < message_list.Length; i++)
450  {
451  IntPtr offset = new IntPtr(ptr.ToInt64() + i * Marshal.SizeOf(typeof(AiliaLLM.AILIALLMMultimodalChatMessage)));
452  Marshal.StructureToPtr(message_list[i], offset, false);
453  }
454 
455  status = AiliaLLM.ailiaLLMSetMultimodalPrompt(net, ptr, (uint)len);
456  }
457  finally
458  {
459  // Free allocated memory
460  Marshal.FreeHGlobal(ptr);
461  for (int i = 0; i < message_list.Length; i++){
462  if (message_list[i].media_data != IntPtr.Zero){
463  Marshal.FreeHGlobal(message_list[i].media_data);
464  }
465  }
466  }
467 
468  for (int i = 0; i < handle_list.Count; i++){
469  handle_list[i].Free();
470  }
471 
472  context_full = false;
473  buf = new byte[0];
474  before_text = "";
475 
476  if (status != 0){
477  if (logging)
478  {
479  Debug.Log("ailiaLLMSetMultimodalPrompt failed " + status);
480  }
481  if (status == AiliaLLM.AILIA_LLM_STATUS_CONTEXT_FULL){
482  context_full = true;
483  }
484  return false;
485  }
486 
487  return true;
488  }

◆ SetPrompt()

bool ailiaLLM.AiliaLLMModel.SetPrompt ( List< AiliaLLMChatMessage messages)
inline

Set prompt messages.

Parameters
messagesPrompt messages
Returns
If this function is successful, it returns true , or false otherwise.
264  {
265  List<GCHandle> handle_list = new List<GCHandle>();
266  int len = messages.Count;
267  byte[][] role_text_list = new byte [len][];
268  byte[][] conntent_text_list = new byte [len][];
269  AiliaLLM.AILIAChatMessage [] message_list = new AiliaLLM.AILIAChatMessage[len];
270  for (int i = 0; i< len; i++){
271  AiliaLLM.AILIAChatMessage message = new AiliaLLM.AILIAChatMessage();
272 
273  role_text_list[i] = System.Text.Encoding.UTF8.GetBytes(messages[i].role+"\u0000");
274  GCHandle role_handle = GCHandle.Alloc(role_text_list[i], GCHandleType.Pinned);
275  IntPtr role_input = role_handle.AddrOfPinnedObject();
276 
277  conntent_text_list[i] = System.Text.Encoding.UTF8.GetBytes(messages[i].content+"\u0000");
278  GCHandle content_handle = GCHandle.Alloc(conntent_text_list[i], GCHandleType.Pinned);
279  IntPtr content_input = content_handle.AddrOfPinnedObject();
280 
281  message.role = role_input;
282  message.content = content_input;
283  message_list[i] = message;
284 
285  handle_list.Add(role_handle);
286  handle_list.Add(content_handle);
287  }
288 
289  int size = Marshal.SizeOf(typeof(AiliaLLM.AILIAChatMessage)) * message_list.Length;
290  IntPtr ptr = Marshal.AllocHGlobal(size);
291 
292  int status = 0;
293 
294  try
295  {
296  for (int i = 0; i < message_list.Length; i++)
297  {
298  IntPtr offset = new IntPtr(ptr.ToInt64() + i * Marshal.SizeOf(typeof(AiliaLLM.AILIAChatMessage)));
299  Marshal.StructureToPtr(message_list[i], offset, false);
300  }
301 
302  status = AiliaLLM.ailiaLLMSetPrompt(net, ptr, (uint)len);
303  }
304  finally
305  {
306  Marshal.FreeHGlobal(ptr);
307  }
308 
309  for (int i = 0; i < handle_list.Count; i++){
310  handle_list[i].Free();
311  }
312 
313  context_full = false;
314  buf = new byte[0];
315  before_text = "";
316 
317  if (status != 0){
318  if (logging)
319  {
320  Debug.Log("ailiaLLMSetPrompt failed " + status);
321  }
322  if (status == AiliaLLM.AILIA_LLM_STATUS_CONTEXT_FULL){
323  context_full = true;
324  }
325  return false;
326  }
327 
328  return true;
329  }

◆ SetSamplingParam()

bool ailiaLLM.AiliaLLMModel.SetSamplingParam ( uint  top_k,
float  top_p,
float  temp,
uint  dist 
)
inline

Set the sampling parameter.

Parameters
top_kSampling probability value's top number, default 40
top_pSampling probability value range, default 0.9 (0.9 to 1.0)
tempTemperature parameter, default 0.4
distSeed, default 1234
Returns
If this function is successful, it returns true , or false otherwise.
238  {
239  int status = AiliaLLM.ailiaLLMSetSamplingParams(net, top_k, top_p, temp, dist);
240  if (status != 0){
241  if (logging)
242  {
243  Debug.Log("ailiaLLMSetSamplingParams failed " + status);
244  }
245  return false;
246  }
247  return true;
248  }

The documentation for this class was generated from the following file: