2using ElevenLabs.TextToSpeech;
4using Newtonsoft.Json.Linq;
8using System.Collections.Generic;
10using System.Threading.Tasks;
25 public OpenAI.Models.Model
model = OpenAI.Models.Model.GPT5;
27 public List<string>
include =
new List<string>();
28 public List<OpenAI.Tool>
tools =
new List<Tool>();
36 tools.Add(
new WebSearchPreviewTool(SearchContextSize.Low));
37 include.Add(
"web_search_call.action.sources");
43 include.Add(
"file_search_call.results");
45 Debug.LogWarning($
"Actor {config.GetPersona()} has file search enabled but an empty file search vector store ID was provided!");
53 public ElevenLabs.Models.Model
fastModel = ElevenLabs.Models.Model.FlashV2_5;
73 private List<IResponseItem>
conversation =
new List<IResponseItem>();
76 public string Persona {
private set;
get; }
99 conversation.Add(
new Message(OpenAI.Role.System, message));
105 conversation.Add(
new Message(OpenAI.Role.User, message));
116 conversation.Add(
new Message(OpenAI.Role.Assistant, message));
130 CreateResponseRequest request =
new CreateResponseRequest(
137 OpenAI.Responses.Response response = await
openAIApi.ResponsesEndpoint.CreateModelResponseAsync(request, cancellationToken: cancellationToken);
141 for (
int i = 0; i < response.Output.Count; ++i) {
142 IResponseItem responseItem = response.Output[i];
143 switch (responseItem) {
144 case OpenAI.Responses.Message message:
146 actorResponse.message = message.ToString();
147 actorResponse.audioClip = await
GetAudioClipAsync(message.ToString(), cancellationToken);
149 case OpenAI.Responses.ReasoningItem reasoningItem:
151 List<string> reasonings =
new List<string>();
152 foreach (OpenAI.Responses.ReasoningSummary reasoningSummary in reasoningItem.Summary) {
153 actorResponse.
reasonings.Add(reasoningSummary.Text);
156 case OpenAI.Responses.WebSearchToolCall webSearchToolCall:
157 Debug.Log(
"Actor " +
Persona +
" Searched Web");
159 case OpenAI.Responses.FileSearchToolCall fileSearchToolCall:
160 Debug.Log(
"Actor " +
Persona +
" Searched Files");
162 case OpenAI.Responses.FunctionToolCall functionToolCall:
163 Debug.Log(
"Actor " +
Persona +
" Function Call: " + functionToolCall.Name +
", Arguments: " + functionToolCall.Arguments.ToString());
166 string output =
string.Empty;
167 if (functionToolCall.Name ==
"set_emotion") {
168 output =
ParseEmotion(functionToolCall.Arguments.ToString());
169 actorResponse.emotion = output.ToEmotion();
174 conversation.Add(
new FunctionToolCallOutput(functionToolCall, output));
177 Debug.LogWarning(
"Actor.GetResponse: Unhandled " + responseItem.GetType().Name +
" Received");
181 return actorResponse;
182 }
catch (Exception e) {
189 private async Task<AudioClip>
GetAudioClipAsync(
string text, CancellationToken cancellationToken) {
192 TextToSpeechRequest request =
new TextToSpeechRequest(
196 outputFormat: OutputFormat.PCM_24000);
197 ElevenLabs.VoiceClip voiceClip = await
elevenLabsApi.TextToSpeechEndpoint.TextToSpeechAsync(request, cancellationToken);
198 return voiceClip.AudioClip;
199 }
catch (Exception e) {
206 return System.Text.RegularExpressions.Regex.IsMatch(text,
@"[\[\]]");
214 List<string> emotions =
new List<string>();
215 for (
int i = 0; i < (int)
Emotion.Num; ++i) {
217 emotions.Add(emotion.ToString());
225 description =
"The emotion of your reply.",
226 @
enum = emotions.ToArray()
229 required =
new[] {
"emotion" }
231 string parameters = JsonConvert.SerializeObject(args, Formatting.Indented);
232 return new OpenAI.Function(
"set_emotion",
"Set the emotion of your current reply. The selected emotion is returned.", JToken.Parse(parameters));
236 JToken parsedArgs = JToken.Parse(args);
237 if (parsedArgs ==
null || parsedArgs[
"emotion"] ==
null) {
return string.Empty; }
238 string emotion = parsedArgs[
"emotion"].ToString();
ActorConfig Override()
Overrides any actor config if value is set in ActorOverrides.ini.
bool IsAnyFeatureEnabled(Feature features)
bool AreAllFeaturesEnabled(Feature features)
Feature
Flags to enable or disable actor features.
string GetOpenAIVectorStoreID()
string GetElevenLabsVoiceID()
ElevenLabs.Models.Model expressionModel
ElevenLabsSettings(ActorConfig config)
ElevenLabs.Models.Model fastModel
ReasoningEffort reasoningEffort
OpenAI.Models.Model model
List< OpenAI.Tool > tools
OpenAISettings(ActorConfig config)
List< string > reasonings
The actors are the OpenAI Response model which chats with the user.
OpenAI.Function BuildSetEmotionTool()
bool ContainsAudioTags(string text)
void AddSystemMesssage(string message)
async Task< AudioClip > GetAudioClipAsync(string text, CancellationToken cancellationToken)
string ParseEmotion(string args)
async Task< Actor.Response > InsertResponse(string message, Emotion emotion, CancellationToken cancellationToken)
ElevenLabsSettings elevenLabsSettings
ElevenLabsClient elevenLabsApi
OpenAISettings openAISettings
void AddUserMessage(string message)
Actor(ActorConfig config)
List< IResponseItem > conversation
The conversation history from this actor's point of view.
async Task< Actor.Response > GetResponse(CancellationToken cancellationToken)
Helper class to load the authentication file and retrieve API keys.
static ElevenLabsAuthentication GetElevenLabsAuthentication()
static OpenAIAuthentication GetOpenAIAuthentication()
Persona
Personas the actors will role-play.