List Pipeline Document Chunks
client.pipelines.documents.getChunks(stringdocumentID, DocumentGetChunksParams { pipeline_id } params, RequestOptionsoptions?): DocumentGetChunksResponse { class_name, embedding, end_char_idx, 11 more }
GET/api/v1/pipelines/{pipeline_id}/documents/{document_id}/chunks
List Pipeline Document Chunks
import LlamaCloud from '@llamaindex/llama-cloud';
const client = new LlamaCloud({
apiKey: process.env['LLAMA_CLOUD_API_KEY'], // This is the default and can be omitted
});
const textNodes = await client.pipelines.documents.getChunks('document_id', {
pipeline_id: '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
});
console.log(textNodes);[
{
"class_name": "class_name",
"embedding": [
0
],
"end_char_idx": 0,
"excluded_embed_metadata_keys": [
"string"
],
"excluded_llm_metadata_keys": [
"string"
],
"extra_info": {
"foo": "bar"
},
"id_": "id_",
"metadata_seperator": "metadata_seperator",
"metadata_template": "metadata_template",
"mimetype": "mimetype",
"relationships": {
"foo": {
"node_id": "node_id",
"class_name": "class_name",
"hash": "hash",
"metadata": {
"foo": "bar"
},
"node_type": "1"
}
},
"start_char_idx": 0,
"text": "text",
"text_template": "text_template"
}
]Returns Examples
[
{
"class_name": "class_name",
"embedding": [
0
],
"end_char_idx": 0,
"excluded_embed_metadata_keys": [
"string"
],
"excluded_llm_metadata_keys": [
"string"
],
"extra_info": {
"foo": "bar"
},
"id_": "id_",
"metadata_seperator": "metadata_seperator",
"metadata_template": "metadata_template",
"mimetype": "mimetype",
"relationships": {
"foo": {
"node_id": "node_id",
"class_name": "class_name",
"hash": "hash",
"metadata": {
"foo": "bar"
},
"node_type": "1"
}
},
"start_char_idx": 0,
"text": "text",
"text_template": "text_template"
}
]