Get Pipeline Data Source Status
client.Pipelines.DataSources.GetStatus(ctx, dataSourceID, query) (*ManagedIngestionStatusResponse, error)
GET/api/v1/pipelines/{pipeline_id}/data-sources/{data_source_id}/status
Get Pipeline Data Source Status
package main
import (
"context"
"fmt"
"github.com/stainless-sdks/llamacloud-prod-go"
"github.com/stainless-sdks/llamacloud-prod-go/option"
)
func main() {
client := llamacloudprod.NewClient(
option.WithAPIKey("My API Key"),
)
managedIngestionStatusResponse, err := client.Pipelines.DataSources.GetStatus(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
llamacloudprod.PipelineDataSourceGetStatusParams{
PipelineID: "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
},
)
if err != nil {
panic(err.Error())
}
fmt.Printf("%+v\n", managedIngestionStatusResponse.JobID)
}
{
"status": "NOT_STARTED",
"deployment_date": "2019-12-27T18:11:19.117Z",
"effective_at": "2019-12-27T18:11:19.117Z",
"error": [
{
"job_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"message": "message",
"step": "MANAGED_INGESTION"
}
],
"job_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e"
}Returns Examples
{
"status": "NOT_STARTED",
"deployment_date": "2019-12-27T18:11:19.117Z",
"effective_at": "2019-12-27T18:11:19.117Z",
"error": [
{
"job_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"message": "message",
"step": "MANAGED_INGESTION"
}
],
"job_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e"
}