curl --request POST \
--url https://api.fireworks.ai/v1/accounts/{account_id}/supervisedFineTuningJobs/{supervised_fine_tuning_job_id}:resume \
--header 'Authorization: Bearer <token>' \
--header 'Content-Type: application/json' \
--data '{}'{
"dataset": "<string>",
"name": "<string>",
"displayName": "<string>",
"createTime": "2023-11-07T05:31:56Z",
"completedTime": "2023-11-07T05:31:56Z",
"awsS3Config": {
"credentialsSecret": "<string>",
"iamRoleArn": "<string>"
},
"azureBlobStorageConfig": {
"credentialsSecret": "<string>",
"managedIdentityClientId": "<string>",
"tenantId": "<string>"
},
"state": "JOB_STATE_UNSPECIFIED",
"status": {
"code": "OK",
"message": "<string>"
},
"createdBy": "<string>",
"outputModel": "<string>",
"baseModel": "<string>",
"warmStartFrom": "<string>",
"jinjaTemplate": "<string>",
"earlyStop": true,
"epochs": 123,
"learningRate": 123,
"maxContextLength": 123,
"loraRank": 123,
"wandbConfig": {
"enabled": true,
"apiKey": "<string>",
"project": "<string>",
"entity": "<string>",
"runId": "<string>",
"url": "<string>"
},
"evaluationDataset": "<string>",
"isTurbo": true,
"evalAutoCarveout": true,
"updateTime": "2023-11-07T05:31:56Z",
"nodes": 123,
"batchSize": 123,
"mtpEnabled": true,
"mtpNumDraftTokens": 123,
"mtpFreezeBaseModel": true,
"jobProgress": {
"percent": 123,
"epoch": 123,
"totalInputRequests": 123,
"totalProcessedRequests": 123,
"successfullyProcessedRequests": 123,
"failedRequests": 123,
"outputRows": 123,
"inputTokens": 123,
"outputTokens": 123,
"cachedInputTokenCount": 123
},
"metricsFileSignedUrl": "<string>",
"trainerLogsSignedUrl": "<string>",
"gradientAccumulationSteps": 123,
"learningRateWarmupSteps": 123,
"batchSizeSamples": 123,
"estimatedCost": {
"currencyCode": "<string>",
"units": "<string>",
"nanos": 123
},
"optimizerWeightDecay": 123,
"purpose": "PURPOSE_UNSPECIFIED"
}curl --request POST \
--url https://api.fireworks.ai/v1/accounts/{account_id}/supervisedFineTuningJobs/{supervised_fine_tuning_job_id}:resume \
--header 'Authorization: Bearer <token>' \
--header 'Content-Type: application/json' \
--data '{}'{
"dataset": "<string>",
"name": "<string>",
"displayName": "<string>",
"createTime": "2023-11-07T05:31:56Z",
"completedTime": "2023-11-07T05:31:56Z",
"awsS3Config": {
"credentialsSecret": "<string>",
"iamRoleArn": "<string>"
},
"azureBlobStorageConfig": {
"credentialsSecret": "<string>",
"managedIdentityClientId": "<string>",
"tenantId": "<string>"
},
"state": "JOB_STATE_UNSPECIFIED",
"status": {
"code": "OK",
"message": "<string>"
},
"createdBy": "<string>",
"outputModel": "<string>",
"baseModel": "<string>",
"warmStartFrom": "<string>",
"jinjaTemplate": "<string>",
"earlyStop": true,
"epochs": 123,
"learningRate": 123,
"maxContextLength": 123,
"loraRank": 123,
"wandbConfig": {
"enabled": true,
"apiKey": "<string>",
"project": "<string>",
"entity": "<string>",
"runId": "<string>",
"url": "<string>"
},
"evaluationDataset": "<string>",
"isTurbo": true,
"evalAutoCarveout": true,
"updateTime": "2023-11-07T05:31:56Z",
"nodes": 123,
"batchSize": 123,
"mtpEnabled": true,
"mtpNumDraftTokens": 123,
"mtpFreezeBaseModel": true,
"jobProgress": {
"percent": 123,
"epoch": 123,
"totalInputRequests": 123,
"totalProcessedRequests": 123,
"successfullyProcessedRequests": 123,
"failedRequests": 123,
"outputRows": 123,
"inputTokens": 123,
"outputTokens": 123,
"cachedInputTokenCount": 123
},
"metricsFileSignedUrl": "<string>",
"trainerLogsSignedUrl": "<string>",
"gradientAccumulationSteps": 123,
"learningRateWarmupSteps": 123,
"batchSizeSamples": 123,
"estimatedCost": {
"currencyCode": "<string>",
"units": "<string>",
"nanos": 123
},
"optimizerWeightDecay": 123,
"purpose": "PURPOSE_UNSPECIFIED"
}Bearer authentication using your Fireworks API key. Format: Bearer <API_KEY>
The Account Id
The Supervised Fine-tuning Job Id
The body is of type object.
A successful response.
The name of the dataset used for training.
The AWS configuration for S3 dataset access.
Show child attributes
The Azure configuration for Azure Blob Storage dataset access.
Show child attributes
JobState represents the state an asynchronous job can be in.
JOB_STATE_UNSPECIFIED, JOB_STATE_CREATING, JOB_STATE_RUNNING, JOB_STATE_COMPLETED, JOB_STATE_FAILED, JOB_STATE_CANCELLED, JOB_STATE_DELETING, JOB_STATE_WRITING_RESULTS, JOB_STATE_VALIDATING, JOB_STATE_DELETING_CLEANING_UP, JOB_STATE_PENDING, JOB_STATE_EXPIRED, JOB_STATE_RE_QUEUEING, JOB_STATE_CREATING_INPUT_DATASET, JOB_STATE_IDLE, JOB_STATE_CANCELLING, JOB_STATE_EARLY_STOPPED, JOB_STATE_PAUSED, JOB_STATE_DELETED Show child attributes
The email address of the user who initiated this fine-tuning job.
The model ID to be assigned to the resulting fine-tuned model. If not specified, the job ID will be used.
The name of the base model to be fine-tuned Only one of 'base_model' or 'warm_start_from' should be specified.
The PEFT addon model in Fireworks format to be fine-tuned from Only one of 'base_model' or 'warm_start_from' should be specified.
Whether to stop training early if the validation loss does not improve.
The number of epochs to train for.
The learning rate used for training.
The maximum context length to use with the model.
The rank of the LoRA layers.
The Weights & Biases team/user account for logging training progress.
Show child attributes
The name of a separate dataset to use for evaluation.
Whether to run the fine-tuning job in turbo mode.
Whether to auto-carve the dataset for eval.
The update time for the supervised fine-tuning job.
Deprecated: multi-node scheduling is now handled by the cookbook orchestrator in V2 workflows. This field is ignored for V2 jobs and will be removed in a future release.
Deprecated: MTP is not supported in V2 training. These fields are retained for V1 Helm-based SFT backward compatibility only.
Deprecated: see mtp_enabled.
Deprecated: see mtp_enabled.
Job progress.
Show child attributes
The signed URL for the trainer logs file (stdout/stderr). Only populated if the account has trainer log reading enabled.
The number of samples per gradient batch.
The estimated cost of the job.
Show child attributes
Weight decay (L2 regularization) for optimizer.
Scheduling purpose for this job.
PURPOSE_UNSPECIFIED, PURPOSE_PILOT Was this page helpful?