Created
April 8, 2025 09:55
-
-
Save PeterTough2/39519fa209dff91275874eb273ddf599 to your computer and use it in GitHub Desktop.
Llama API not working
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<?php | |
class LlamaAI { | |
private string $hostname; | |
private string $domainPath; | |
private string $apiToken; | |
private array $headers; | |
private array $queue = []; | |
public function __construct(string $apiToken, string $hostname = 'https://api.llama-api.com', string $domainPath = '/chat/completions') { | |
$this->hostname = $hostname; | |
$this->domainPath = $domainPath; | |
$this->apiToken = $apiToken; | |
$this->headers = ['Authorization: Bearer ' . $this->apiToken]; | |
} | |
private function makeRequest(array $apiRequestJson) { | |
$url = $this->hostname . $this->domainPath; | |
$payload = json_encode($apiRequestJson); | |
$ch = curl_init($url); | |
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true); | |
curl_setopt($ch, CURLOPT_POST, true); | |
curl_setopt($ch, CURLOPT_POSTFIELDS, $payload); | |
curl_setopt($ch, CURLOPT_HTTPHEADER, array_merge($this->headers, ['Content-Type: application/json'])); | |
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false); | |
$response = curl_exec($ch); | |
if (curl_errno($ch)) { | |
$error = curl_error($ch); | |
curl_close($ch); | |
throw new Exception("Error while making request: " . $error); | |
} | |
$httpCode = curl_getinfo($ch, CURLINFO_HTTP_CODE); | |
curl_close($ch); | |
return ['status' => $httpCode, 'data' => json_decode($response, true)]; | |
} | |
private function _runStreamForJupyter(array $apiRequestJson): void { | |
$response = $this->makeRequest($apiRequestJson); | |
if ($response['status'] !== 200) { | |
throw new Exception("POST " . $response['status'] . " " . ($response['data']['detail'] ?? '')); | |
} | |
// Assuming the API returns chunks in the response body, | |
// we need to process them as they arrive. | |
// PHP's curl doesn't directly support streaming in the same way as axios. | |
// This is a simplified simulation. A real-world implementation might involve | |
// reading the response chunk by chunk using curl's callbacks. | |
// For this example, we'll assume the entire streamed response is in $response['data'] | |
// and is an array of chunks. | |
if (is_array($response['data'])) { | |
$this->queue = array_merge($this->queue, $response['data']); | |
} else { | |
// Handle cases where the streamed response format is different | |
$this->queue[] = $response['data']; | |
} | |
} | |
public function getSequences(): ?Generator { | |
while (!empty($this->queue)) { | |
yield array_shift($this->queue); | |
usleep(100 * 1000); // Simulate 100ms delay | |
} | |
return null; | |
} | |
public function runStream(array $apiRequestJson): void { | |
$this->_runStreamForJupyter($apiRequestJson); | |
$this->getSequences(); // In PHP, the generator needs to be iterated over to get values. | |
// The calling code will typically loop through the result of getSequences(). | |
} | |
public function runSync(array $apiRequestJson): ?array { | |
$response = $this->makeRequest($apiRequestJson); | |
if ($response['status'] !== 200) { | |
throw new Exception("POST " . $response['status'] . " " . ($response['data']['detail'] ?? '')); | |
} | |
return $response['data']; | |
} | |
public function run(array $apiRequestJson) { | |
if (isset($apiRequestJson['stream']) && $apiRequestJson['stream']) { | |
return $this->runStream($apiRequestJson); | |
} else { | |
return $this->runSync($apiRequestJson); | |
} | |
} | |
} | |
//usage | |
$llama = new LlamaAI('212bd36e-33bd-44e2-954b-2e8fe2ec3141'); | |
$requestData = [ | |
'messages' => [['role' => 'user', 'content' => 'Hello']], | |
'stream' => false, | |
]; | |
try { | |
$result = $llama->run($requestData); | |
print_r($result); | |
} catch (Exception $e) { | |
echo "Error: " . $e->getMessage() . "\n"; | |
} | |
$streamingRequestData = [ | |
'messages' => [['role' => 'user', 'content' => 'Tell me a story']], | |
'stream' => true, | |
]; | |
try { | |
$llama->run($streamingRequestData); | |
if ($sequences = $llama->getSequences()) { | |
foreach ($sequences as $sequence) { | |
print_r($sequence); | |
} | |
} | |
} catch (Exception $e) { | |
echo "Error: " . $e->getMessage() . "\n"; | |
} | |
?> |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment