# Docs for v2 can be found by changing the above selector ^
from together import Together
import os
client = Together(
api_key=os.environ.get("TOGETHER_API_KEY"),
)
batch = client.batches.create_batch("file_id", endpoint="/v1/chat/completions")
print(batch.id){
"job": {
"id": "01234567-8901-2345-6789-012345678901",
"user_id": "user_789xyz012",
"input_file_id": "file-input123abc456def",
"file_size_bytes": 1048576,
"status": "IN_PROGRESS",
"job_deadline": "2024-01-15T15:30:00Z",
"created_at": "2024-01-15T14:30:00Z",
"endpoint": "/v1/chat/completions",
"progress": 75,
"model_id": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
"output_file_id": "file-output789xyz012ghi",
"error_file_id": "file-errors456def789jkl",
"error": "<string>",
"completed_at": "2024-01-15T15:45:30Z"
},
"warning": "<string>"
}Create a new batch job with the given input file and endpoint
# Docs for v2 can be found by changing the above selector ^
from together import Together
import os
client = Together(
api_key=os.environ.get("TOGETHER_API_KEY"),
)
batch = client.batches.create_batch("file_id", endpoint="/v1/chat/completions")
print(batch.id){
"job": {
"id": "01234567-8901-2345-6789-012345678901",
"user_id": "user_789xyz012",
"input_file_id": "file-input123abc456def",
"file_size_bytes": 1048576,
"status": "IN_PROGRESS",
"job_deadline": "2024-01-15T15:30:00Z",
"created_at": "2024-01-15T14:30:00Z",
"endpoint": "/v1/chat/completions",
"progress": 75,
"model_id": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
"output_file_id": "file-output789xyz012ghi",
"error_file_id": "file-errors456def789jkl",
"error": "<string>",
"completed_at": "2024-01-15T15:45:30Z"
},
"warning": "<string>"
}Bearer authentication header of the form Bearer <token>, where <token> is your auth token.
The endpoint to use for batch processing
"/v1/chat/completions"
ID of the uploaded input file containing batch requests
"file-abc123def456ghi789"
Time window for batch completion (optional)
"24h"
Priority for batch processing (optional)
1
Model to use for processing batch requests
"meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo"
Job created (potentially with warnings)
Show child attributes
"01234567-8901-2345-6789-012345678901"
"user_789xyz012"
"file-input123abc456def"
Size of input file in bytes
1048576
Current status of the batch job
VALIDATING, IN_PROGRESS, COMPLETED, FAILED, EXPIRED, CANCELLED "IN_PROGRESS"
"2024-01-15T15:30:00Z"
"2024-01-15T14:30:00Z"
"/v1/chat/completions"
Completion progress (0.0 to 100)
75
Model used for processing requests
"meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo"
"file-output789xyz012ghi"
"file-errors456def789jkl"
"2024-01-15T15:45:30Z"
Was this page helpful?