generated from spatie/package-skeleton-php
-
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathCreateCompletionRequest.php
64 lines (56 loc) · 2.24 KB
/
CreateCompletionRequest.php
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
<?php
namespace JordanDalton\AnthropicSdkPhp\Requests;
use Saloon\Contracts\Body\HasBody;
use Saloon\Enums\Method;
use Saloon\Http\Request;
use Saloon\Traits\Body\HasJsonBody;
class CreateCompletionRequest extends Request implements HasBody
{
use HasJsonBody;
protected Method $method = Method::POST;
/**
* @param string $prompt The model that will complete your prompt.
* @param string $model The prompt that you want Claude to complete.
* @param int $max_tokens_to_sample The maximum number of tokens to generate before stopping.
* @param array<string> $stop_sequences Sequences that will cause the model to stop generating completion text.
* @param int $temperature Amount of randomness injected into the response.
* @param float $top_p Use nucleus sampling.
* @param int $top_k Only sample from the top K options for each subsequent token.
* @param array $metadata An object describing metadata about the request.
* @param bool $stream Whether to incrementally stream the response using server-sent events.
*/
public function __construct(
public readonly string $prompt = "\n\nHuman:Say Hi\n\nAssistant:",
public readonly string $model = 'claude-2',
public readonly int $max_tokens_to_sample = 256,
public readonly int $temperature = 1,
public readonly array $stop_sequences = [],
public readonly float $top_p = 0.7,
public readonly int $top_k = 5,
public array $metadata = [],
public readonly bool $stream = false
) {
//
}
public function resolveEndpoint(): string
{
return 'complete';
}
public function defaultBody(): array
{
if (! isset($this->metadata['user_id'])) {
$this->metadata['user_id'] = uniqid();
}
return [
'model' => 'claude-2',
'prompt' => $this->prompt,
'max_tokens_to_sample' => $this->max_tokens_to_sample,
'temperature' => $this->temperature,
'top_p' => $this->top_p,
'top_k' => $this->top_k,
'stop_sequences' => $this->stop_sequences,
'metadata' => $this->metadata,
'stream' => $this->stream,
];
}
}