diff --git a/src/monocle_apptrace/wrap_common.py b/src/monocle_apptrace/wrap_common.py index 743bcdd..4a3e84b 100644 --- a/src/monocle_apptrace/wrap_common.py +++ b/src/monocle_apptrace/wrap_common.py @@ -20,6 +20,7 @@ TYPE = "type" PROVIDER = "provider_name" EMBEDDING_MODEL = "embedding_model" +VECTOR_STORE = 'vector_store' WORKFLOW_TYPE_MAP = { @@ -68,7 +69,7 @@ def pre_task_processing(to_wrap, instance, args, span): #capture the tags attribute of the instance if present, else ignore try: update_tags(instance, span) - update_attributes(instance, span) + update_vectorstore_attributes(to_wrap, instance, span) except AttributeError: pass update_span_with_context_input(to_wrap=to_wrap, wrapped_args=args, span=span) @@ -136,6 +137,8 @@ def llm_wrapper(tracer: Tracer, to_wrap, wrapped, instance, args, kwargs): else: name = f"langchain.task.{instance.__class__.__name__}" with tracer.start_as_current_span(name) as span: + if 'haystack.components.retrievers' in to_wrap['package'] and 'haystack.retriever' in span.name: + update_vectorstore_attributes(to_wrap, instance, span) update_llm_endpoint(curr_span= span, instance=instance) return_value = wrapped(*args, **kwargs) @@ -151,19 +154,12 @@ def update_llm_endpoint(curr_span: Span, instance): if 'temperature' in instance.__dict__: temp_val = instance.__dict__.get("temperature") curr_span.set_attribute("temperature", temp_val) - if 'document_store' in instance.__dict__: - type = "vector_store" - document_store = instance.__dict__.get("document_store").__class__.__name__ - embedding_model = get_embedding_model() - curr_span.set_attribute(TYPE, type) - curr_span.set_attribute(PROVIDER, document_store) - curr_span.set_attribute(EMBEDDING_MODEL, embedding_model) # handling for model name - model_name = resolve_from_alias(instance.__dict__ , ["model","model_name"]) + model_name = resolve_from_alias(instance.__dict__ , ["model","model_name"]) curr_span.set_attribute("model_name", model_name) set_provider_name(curr_span, instance) # handling AzureOpenAI deployment - deployment_name = resolve_from_alias(instance.__dict__ , [ "engine", "azure_deployment", + deployment_name = resolve_from_alias(instance.__dict__ , [ "engine", "azure_deployment", "deployment_name", "deployment_id", "deployment"]) curr_span.set_attribute("az_openai_deployment", deployment_name) # handling the inference endpoint @@ -278,26 +274,33 @@ def update_tags(instance, span): pass -def update_attributes(instance, span): +def update_vectorstore_attributes(to_wrap, instance, span): """ Updates the telemetry span attributes for vector store retrieval tasks. """ try: # Check if the span is for a vector store retriever task from langchain - if span.name == 'langchain.task.VectorStoreRetriever': + + if to_wrap['package'] == 'langchain_core.retrievers': # Extract embedding model and provider from instance tags embedding_model = instance.tags[0] provider = instance.tags[1] # Update span attributes with type, provider, and embedding model - span._attributes.update({TYPE: 'vector_store'}) + span._attributes.update({TYPE: VECTOR_STORE}) span._attributes.update({PROVIDER: provider}) span._attributes.update({EMBEDDING_MODEL: embedding_model}) - elif span.name == 'llamaindex.query': + elif to_wrap['package'] == 'llama_index.core.base.base_query_engine': model_name = instance.retriever._embed_model.model_name vector_store_name = type(instance.retriever._vector_store).__name__ - span._attributes.update({TYPE: 'vector_store'}) + span._attributes.update({TYPE: VECTOR_STORE}) span._attributes.update({PROVIDER: vector_store_name}) span._attributes.update({EMBEDDING_MODEL: model_name}) + elif 'document_store' in instance.__dict__ and 'haystack.components.retrievers' in to_wrap['package']: + document_store = instance.__dict__.get("document_store").__class__.__name__ + embedding_model = get_embedding_model() + span._attributes.update({TYPE: VECTOR_STORE}) + span._attributes.update({PROVIDER: document_store}) + span._attributes.update({EMBEDDING_MODEL: embedding_model}) except: pass \ No newline at end of file diff --git a/tests/haystack_sample.py b/tests/haystack_sample.py index fc0ceee..25514c1 100644 --- a/tests/haystack_sample.py +++ b/tests/haystack_sample.py @@ -101,17 +101,17 @@ def haystack_app(): haystack_app() -# { +#{ # "name": "haystack.retriever", # "context": { -# "trace_id": "0xee46d79f09814a638517cd05f26ab21f", -# "span_id": "0x79a2f252baf2b721", +# "trace_id": "0x1db120b68e3a759882ac457b07af344f", +# "span_id": "0x88a0290f25ae392a", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xad97601a1ba8388d", -# "start_time": "2024-09-13T11:58:06.064937Z", -# "end_time": "2024-09-13T11:58:06.074810Z", +# "parent_id": "0x346eb05a7dd44a99", +# "start_time": "2024-05-23T03:57:52.482232Z", +# "end_time": "2024-05-23T03:57:52.496394Z", # "status": { # "status_code": "UNSET" # }, @@ -129,27 +129,26 @@ def haystack_app(): # "schema_url": "" # } # } -# The Colossus of Rhodes depicted the Greek sun-god Helios and was approximately 33 meters (108 feet) tall. The head likely had curly hair with spikes of bronze or silver flame radiating, similar to the images found on contemporary Rhodian coins. The statue's exact appearance is unknown, but it was described as having a standard rendering of the time. # { # "name": "haystack.openai", # "context": { -# "trace_id": "0xee46d79f09814a638517cd05f26ab21f", -# "span_id": "0xf57731cd5cfe3f8e", +# "trace_id": "0x1db120b68e3a759882ac457b07af344f", +# "span_id": "0xc90f3343240ee785", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xad97601a1ba8388d", -# "start_time": "2024-09-13T11:58:06.075424Z", -# "end_time": "2024-09-13T11:58:08.299323Z", +# "parent_id": "0x346eb05a7dd44a99", +# "start_time": "2024-05-23T03:57:52.497002Z", +# "end_time": "2024-05-23T03:57:54.793035Z", # "status": { # "status_code": "UNSET" # }, # "attributes": { -# "llm_input": "\n Given the following information, answer the question.\n\n Context:\n \n Within it, too, are to be seen large masses of rock, by the weight of which the artist steadied it while erecting it.[22][23]\nDestruction of the remains[edit]\nThe ultimate fate of the remains of the statue is uncertain. Rhodes has two serious earthquakes per century, owing to its location on the seismically unstable Hellenic Arc. Pausanias tells us, writing ca. 174, how the city was so devastated by an earthquake that the Sibyl oracle foretelling its destruction was considered fulfilled.[24] This means the statue could not have survived for long if it was ever repaired. By the 4th century Rhodes was Christianized, meaning any further maintenance or rebuilding, if there ever was any before, on an ancient pagan statue is unlikely. The metal would have likely been used for coins and maybe also tools by the time of the Arab wars, especially during earlier conflicts such as the Sassanian wars.[9]\nThe onset of Islamic naval incursions against the Byzantine empire gave rise to a dramatic account of what became of the Colossus. \n \n Construction[edit]\nTimeline and map of the Seven Wonders of the Ancient World, including the Colossus of Rhodes\nConstruction began in 292\u00a0BC. Ancient accounts, which differ to some degree, describe the structure as being built with iron tie bars to which brass plates were fixed to form the skin. The interior of the structure, which stood on a 15-metre-high (49-foot) white marble pedestal near the Rhodes harbour entrance, was then filled with stone blocks as construction progressed.[14] Other sources place the Colossus on a breakwater in the harbour. According to most contemporary descriptions, the statue itself was about 70 cubits, or 32 metres (105 feet) tall.[15] Much of the iron and bronze was reforged from the various weapons Demetrius's army left behind, and the abandoned second siege tower may have been used for scaffolding around the lower levels during construction.\n\n \n The Colossus of Rhodes (Ancient Greek: \u1f41 \u039a\u03bf\u03bb\u03bf\u03c3\u03c3\u1f78\u03c2 \u1fec\u03cc\u03b4\u03b9\u03bf\u03c2, romanized:\u00a0ho Koloss\u00f2s Rh\u00f3dios Greek: \u039a\u03bf\u03bb\u03bf\u03c3\u03c3\u03cc\u03c2 \u03c4\u03b7\u03c2 \u03a1\u03cc\u03b4\u03bf\u03c5, romanized:\u00a0Koloss\u00f3s tes Rh\u00f3dou)[a] was a statue of the Greek sun-god Helios, erected in the city of Rhodes, on the Greek island of the same name, by Chares of Lindos in 280\u00a0BC. One of the Seven Wonders of the Ancient World, it was constructed to celebrate the successful defence of Rhodes city against an attack by Demetrius Poliorcetes, who had besieged it for a year with a large army and navy.\nAccording to most contemporary descriptions, the Colossus stood approximately 70 cubits, or 33 metres (108 feet) high \u2013 approximately the height of the modern Statue of Liberty from feet to crown \u2013 making it the tallest statue in the ancient world.[2] It collapsed during the earthquake of 226 BC, although parts of it were preserved. In accordance with a certain oracle, the Rhodians did not build it again.[3] John Malalas wrote that Hadrian in his reign re-erected the Colossus,[4] but he was mistaken.[5] According to the Suda, the Rhodians were called Colossaeans (\u039a\u03bf\u03bb\u03bf\u03c3\u03c3\u03b1\u03b5\u1fd6\u03c2), because they erected the statue on the island.\n \n Silver tetradrachm of Rhodes showing Helios and a rose (205\u2013190 BC, 13.48 g)\nWhile scholars do not know what the statue looked like, they do have a good idea of what the head and face looked like, as it was of a standard rendering at the time. The head would have had curly hair with evenly spaced spikes of bronze or silver flame radiating, similar to the images found on contemporary Rhodian coins.[29]\n\nPossible locations[edit]\nThe old harbour entrance from inner embankment. The Fortress of St Nicholas is on right\nWhile scholars generally agree that anecdotal depictions of the Colossus straddling the harbour's entry point have no historic or scientific basis,[29] the monument's actual location remains a matter of debate. As mentioned above the statue is thought locally to have stood where two pillars now stand at the Mandraki port entrance.\nThe floor of the Fortress of St Nicholas, near the harbour entrance, contains a circle of sandstone blocks of unknown origin or purpose. \n \n To you, O Sun, the people of Dorian Rhodes set up this bronze statue reaching to Olympus, when they had pacified the waves of war and crowned their city with the spoils taken from the enemy. Not only over the seas but also on land did they kindle the lovely torch of freedom and independence. For to the descendants of Herakles belongs dominion over sea and land.\nCollapse (226\u00a0BC)[edit]\nArtist's conception from the Grolier Society's 1911 Book of Knowledge\nFurther information: 226 BC Rhodes earthquake\nThe statue stood for 54 years until a 226\u00a0BC earthquake caused significant damage to large portions of Rhodes, including the harbour and commercial buildings, which were destroyed.[19] The statue snapped at the knees and fell over onto land. Ptolemy III offered to pay for the reconstruction of the statue, but the Oracle of Delphi made the Rhodians fear that they had offended Helios, and they declined to rebuild it.[citation needed]\n\nFallen state (226\u00a0BC to 653\u00a0AD)[edit]\nThe remains lay on the ground for over 800 years, and even broken, they were so impressive that many travelled to see them.\n\n \n [6]\nIn 653, an Arab force under Muslim general Muawiyah I conquered Rhodes, and according to the Chronicle of Theophanes the Confessor,[7] the statue was completely destroyed and the remains sold;[8] this account may be unreliable.[9]\nSince 2008, a series of as-yet-unrealized proposals to build a new Colossus at Rhodes Harbour have been announced, although the actual location of the original monument remains in dispute.[10][11]\n\nSiege of Rhodes[edit]\nMain article: Siege of Rhodes (305\u2013304\u00a0BC)\nIn the early fourth century BC, Rhodes, allied with Ptolemy I of Egypt, prevented a mass invasion staged by their common enemy, Antigonus I Monophthalmus.\nIn 304\u00a0BC a relief force of ships sent by Ptolemy arrived, and Demetrius (son of Antigonus) and his army abandoned the siege, leaving behind most of their siege equipment. To celebrate their victory, the Rhodians sold the equipment left behind for 300 talents[12] and decided to use the money to build a colossal statue of their patron god, Helios. Construction was left to the direction of Chares, a native of Lindos in Rhodes, who had been involved with large-scale statues before. His teacher, the sculptor Lysippos, had constructed a 22-metre-high (72-foot)[13] bronze statue of Zeus at Tarentum.\n\n\n \n Seeking to outdo their Athenian rivals, the Eleans employed sculptor Phidias, who had previously made the massive statue of Athena Parthenos in the Parthenon.[2]\nThe statue occupied half the width of the aisle of the temple built to house it. The geographer Strabo noted early in the 1st century BC that the statue gave \"the impression that if Zeus arose and stood erect he would unroof the temple.\"[3] The Zeus was a chryselephantine sculpture, made with ivory and gold panels on a wooden substructure. No copy in marble or bronze has survived, though there are recognizable but only approximate versions on coins of nearby Elis and on Roman coins and engraved gems.[4]\nThe 2nd-century AD geographer and traveler Pausanias left a detailed description: the statue was crowned with a sculpted wreath of olive sprays and wore a gilded robe made from glass and carved with animals and lilies. Its right hand held a small chryselephantine statue of crowned Nike, goddess of victory; its left a scepter inlaid with many metals, supporting an eagle. The throne featured painted figures and wrought images and was decorated with gold, precious stones, ebony, and ivory.\n \n [5] Zeus' golden sandals rested upon a footstool decorated with an Amazonomachy in relief. The passage underneath the throne was restricted by painted screens.[6]\nPausanias also recounts that the statue was kept constantly coated with olive oil to counter the harmful effect on the ivory caused by the \"marshiness\" of the Altis grove. The floor in front of the image was paved with black tiles and surrounded by a raised rim of marble to contain the oil.[7] This reservoir acted as a reflecting pool which doubled the apparent height of the statue.[8]\nAccording to the Roman historian Livy, the Roman general Aemilius Paullus (the victor over Macedon) saw the statue and \"was moved to his soul, as if he had seen the god in person\",[9] while the 1st-century\u00a0AD Greek orator Dio Chrysostom declared that a single glimpse of the statue would make a man forget all his earthly troubles.\n \n Also, the fallen statue would have blocked the harbour, and since the ancient Rhodians did not have the ability to remove the fallen statue from the harbour, it would not have remained visible on land for the next 800 years, as discussed above. Even neglecting these objections, the statue was made of bronze, and engineering analyses indicate that it could not have been built with its legs apart without collapsing under its own weight.[29]\nMany researchers have considered alternative positions for the statue which would have made it more feasible for actual construction by the ancients.[29][30] There is also no evidence that the statue held a torch aloft; the records simply say that after completion, the Rhodians kindled the \"torch of freedom\". A relief in a nearby temple shows Helios standing with one hand shielding his eyes, similar to the way a person shields their eyes when looking toward the sun, and it is quite possible that the colossus was constructed in the same pose.\n\n\n \n The Statue of Zeus at Olympia was a giant seated figure, about 12.4\u00a0m (41\u00a0ft) tall,[1] made by the Greek sculptor Phidias around 435 BC at the sanctuary of Olympia, Greece, and erected in the Temple of Zeus there. Zeus is the sky and thunder god in ancient Greek religion, who rules as king of the gods of Mount Olympus.\nThe statue was a chryselephantine sculpture of ivory plates and gold panels on a wooden framework. Zeus sat on a painted cedarwood throne ornamented with ebony, ivory, gold, and precious stones. It was one of the Seven Wonders of the Ancient World.\nThe statue was lost and destroyed before the end of the 5th century AD, with conflicting accounts of the date and circumstances. Details of its form are known only from ancient Greek descriptions and representations on coins.\n\nCoin from Elis district in southern Greece illustrating the Olympian Zeus statue (Nordisk familjebok)\n\nHistory[edit]\nThe statue of Zeus was commissioned by the Eleans, custodians of the Olympic Games, in the latter half of the fifth century BC for their newly constructed Temple of Zeus. \n \n\n Question: What does Rhodes Statue look like?\n Answer:\n ", +# "llm_input": "\n Given the following information, answer the question.\n\n Context:\n \n Within it, too, are to be seen large masses of rock, by the weight of which the artist steadied it while erecting it.[22][23]\nDestruction of the remains[edit]\nThe ultimate fate of the remains of the statue is uncertain. Rhodes has two serious earthquakes per century, owing to its location on the seismically unstable Hellenic Arc. \n \n\n Question: What does Rhodes Statue look like?\n Answer:\n ", # "model_name": "gpt-3.5-turbo", -# "completion_tokens": 74, +# "completion_tokens": 90, # "prompt_tokens": 2464, -# "total_tokens": 2538 +# "total_tokens": 2554 # }, # "events": [], # "links": [], @@ -163,14 +162,14 @@ def haystack_app(): # { # "name": "haystack_pipeline.workflow", # "context": { -# "trace_id": "0xee46d79f09814a638517cd05f26ab21f", -# "span_id": "0xad97601a1ba8388d", +# "trace_id": "0x1db120b68e3a759882ac457b07af344f", +# "span_id": "0x346eb05a7dd44a99", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", # "parent_id": null, -# "start_time": "2024-09-13T11:58:06.053639Z", -# "end_time": "2024-09-13T11:58:08.299411Z", +# "start_time": "2024-05-23T03:57:52.395585Z", +# "end_time": "2024-05-23T03:57:54.793340Z", # "status": { # "status_code": "UNSET" # }, @@ -178,7 +177,7 @@ def haystack_app(): # "input": "What does Rhodes Statue look like?", # "workflow_name": "haystack_app_1", # "workflow_type": "workflow.haystack", -# "output": "The Colossus of Rhodes depicted the Greek sun-god Helios and was approximately 33 meters (108 feet) tall. The head likely had curly hair with spikes of bronze or silver flame radiating, similar to the images found on contemporary Rhodian coins. The statue's exact appearance is unknown, but it was described as having a standard rendering of the time." +# "output": "The Rhodes Statue, also known as the Colossus of Rhodes, depicted the Greek sun-god Helios. It was a bronze statue standing approximately 33 meters (108 feet) tall with a standard rendering of the head and face, featuring curly hair with evenly spaced spikes of bronze or silver flame radiating. The actual appearance of the rest of the statue remains unknown, but it was considered the tallest statue in the ancient world." # }, # "events": [], # "links": [], diff --git a/tests/langchain_chat_sample.py b/tests/langchain_chat_sample.py index bd74bda..84103c3 100644 --- a/tests/langchain_chat_sample.py +++ b/tests/langchain_chat_sample.py @@ -103,77 +103,17 @@ def format_docs(docs): -# { -# "name": "haystack.tracing.auto_enable", -# "context": { -# "trace_id": "0xa6129cbc4adb0e601ad9f0569a591613", -# "span_id": "0x4fb64308d7261b4a", -# "trace_state": "[]" -# }, -# "kind": "SpanKind.INTERNAL", -# "parent_id": null, -# "start_time": "2024-09-13T11:53:46.324551Z", -# "end_time": "2024-09-13T11:53:46.325633Z", -# "status": { -# "status_code": "ERROR", -# "description": "ImportError: cannot import name 'Span' from partially initialized module 'haystack.tracing' (most likely due to a circular import) (/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/haystack/tracing/__init__.py)" -# }, -# "attributes": {}, -# "events": [ -# { -# "name": "exception", -# "timestamp": "2024-09-13T11:53:46.325617Z", -# "attributes": { -# "exception.type": "ImportError", -# "exception.message": "cannot import name 'Span' from partially initialized module 'haystack.tracing' (most likely due to a circular import) (/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/haystack/tracing/__init__.py)", -# "exception.stacktrace": "Traceback (most recent call last):\n File \"/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/opentelemetry/trace/__init__.py\", line 590, in use_span\n yield span\n File \"/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/opentelemetry/sdk/trace/__init__.py\", line 1108, in start_as_current_span\n yield span\n File \"/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/haystack/tracing/tracer.py\", line 207, in _auto_configured_opentelemetry_tracer\n from haystack.tracing.opentelemetry import OpenTelemetryTracer\n File \"/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/haystack/tracing/opentelemetry.py\", line 9, in \n from haystack.tracing import Span, Tracer\nImportError: cannot import name 'Span' from partially initialized module 'haystack.tracing' (most likely due to a circular import) (/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/haystack/tracing/__init__.py)\n", -# "exception.escaped": "False" -# } -# } -# ], -# "links": [], -# "resource": { -# "attributes": { -# "service.name": "langchain_app_1" -# }, -# "schema_url": "" -# } -# } -# { -# "name": "haystack.tracing.auto_enable", -# "context": { -# "trace_id": "0xb98697469cb0b72734a2db8e3e7c8d90", -# "span_id": "0x65f5cb09e1240da0", -# "trace_state": "[]" -# }, -# "kind": "SpanKind.INTERNAL", -# "parent_id": null, -# "start_time": "2024-09-13T11:53:47.765068Z", -# "end_time": "2024-09-13T11:53:47.765132Z", -# "status": { -# "status_code": "UNSET" -# }, -# "attributes": {}, -# "events": [], -# "links": [], -# "resource": { -# "attributes": { -# "service.name": "langchain_app_1" -# }, -# "schema_url": "" -# } -# } # { # "name": "langchain.task.VectorStoreRetriever", # "context": { -# "trace_id": "0xf490e6be306375354c564c94075ed8df", -# "span_id": "0xb6a2ad7ab055eaa8", +# "trace_id": "0xca3159edb8ac4ba9fd87ba54aa5df4aa", +# "span_id": "0x036011bfdfdcb90a", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x5a377a89c97b361f", -# "start_time": "2024-09-13T11:53:54.945261Z", -# "end_time": "2024-09-13T11:53:55.622886Z", +# "parent_id": "0x7afa7a66a2adfb4a", +# "start_time": "2024-06-10T04:38:55.693625Z", +# "end_time": "2024-06-10T04:38:56.241083Z", # "status": { # "status_code": "UNSET" # }, @@ -190,7 +130,7 @@ def format_docs(docs): # "events": [ # { # "name": "context_input", -# "timestamp": "2024-09-13T11:53:54.945300Z", +# "timestamp": "2024-09-16T09:48:53.462202Z", # "attributes": { # "question": "What is Task Decomposition?" # } @@ -203,18 +143,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0xf490e6be306375354c564c94075ed8df", -# "span_id": "0x5a377a89c97b361f", +# "trace_id": "0xca3159edb8ac4ba9fd87ba54aa5df4aa", +# "span_id": "0x7afa7a66a2adfb4a", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xd5e3bd8abd3bd3b4", -# "start_time": "2024-09-13T11:53:54.943933Z", -# "end_time": "2024-09-13T11:53:55.622959Z", +# "parent_id": "0x536c28587fc639a8", +# "start_time": "2024-06-10T04:38:55.692022Z", +# "end_time": "2024-06-10T04:38:56.241167Z", # "status": { # "status_code": "UNSET" # }, @@ -229,18 +169,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0xf490e6be306375354c564c94075ed8df", -# "span_id": "0xd5e3bd8abd3bd3b4", +# "trace_id": "0xca3159edb8ac4ba9fd87ba54aa5df4aa", +# "span_id": "0x536c28587fc639a8", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x7577b1ddc83509cf", -# "start_time": "2024-09-13T11:53:54.938056Z", -# "end_time": "2024-09-13T11:53:55.623621Z", +# "parent_id": "0xf38e594bba842099", +# "start_time": "2024-06-10T04:38:55.686227Z", +# "end_time": "2024-06-10T04:38:56.241965Z", # "status": { # "status_code": "UNSET" # }, @@ -255,18 +195,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0xf490e6be306375354c564c94075ed8df", -# "span_id": "0x31a2623e96243933", +# "trace_id": "0xca3159edb8ac4ba9fd87ba54aa5df4aa", +# "span_id": "0x54fa0fc40129d7c8", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xce9266e22f7c05f3", -# "start_time": "2024-09-13T11:53:55.652987Z", -# "end_time": "2024-09-13T11:53:55.654604Z", +# "parent_id": "0xfd62c1c2c9d666ed", +# "start_time": "2024-06-10T04:38:56.268526Z", +# "end_time": "2024-06-10T04:38:56.270750Z", # "status": { # "status_code": "UNSET" # }, @@ -281,18 +221,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.task.ChatPromptTemplate", # "context": { -# "trace_id": "0xf490e6be306375354c564c94075ed8df", -# "span_id": "0x4aa5fd5c16119390", +# "trace_id": "0xca3159edb8ac4ba9fd87ba54aa5df4aa", +# "span_id": "0xcc431732937f7052", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xce9266e22f7c05f3", -# "start_time": "2024-09-13T11:53:55.654711Z", -# "end_time": "2024-09-13T11:53:55.655612Z", +# "parent_id": "0xfd62c1c2c9d666ed", +# "start_time": "2024-06-10T04:38:56.270832Z", +# "end_time": "2024-06-10T04:38:56.271675Z", # "status": { # "status_code": "UNSET" # }, @@ -307,29 +247,27 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.task.ChatOpenAI", # "context": { -# "trace_id": "0xf490e6be306375354c564c94075ed8df", -# "span_id": "0xb1dade5154633d8a", +# "trace_id": "0xca3159edb8ac4ba9fd87ba54aa5df4aa", +# "span_id": "0x55453deb49cda82d", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xce9266e22f7c05f3", -# "start_time": "2024-09-13T11:53:55.655688Z", -# "end_time": "2024-09-13T11:53:57.997859Z", +# "parent_id": "0xfd62c1c2c9d666ed", +# "start_time": "2024-06-10T04:38:56.271747Z", +# "end_time": "2024-06-10T04:38:57.914210Z", # "status": { # "status_code": "UNSET" # }, # "attributes": { # "session.session_id": "0x4fa6d91d1f2a4bdbb7a1287d90ec4a16", -# "temperature": 0.7, -# "model_name": "gpt-3.5-turbo-0125", -# "provider_name": "api.openai.com", -# "completion_tokens": 62, +# "server_url": "http://triton22.eastus.cloudapp.azure.com:8000/v2/models/flan_t5_783m/versions/1/infer", +# "completion_tokens": 57, # "prompt_tokens": 580, -# "total_tokens": 642 +# "total_tokens": 637 # }, # "events": [], # "links": [], @@ -339,18 +277,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.task.StrOutputParser", # "context": { -# "trace_id": "0xf490e6be306375354c564c94075ed8df", -# "span_id": "0x61ed8cef55304a27", +# "trace_id": "0xca3159edb8ac4ba9fd87ba54aa5df4aa", +# "span_id": "0x32539134995fccec", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xce9266e22f7c05f3", -# "start_time": "2024-09-13T11:53:57.998110Z", -# "end_time": "2024-09-13T11:53:57.998636Z", +# "parent_id": "0xfd62c1c2c9d666ed", +# "start_time": "2024-06-10T04:38:57.914369Z", +# "end_time": "2024-06-10T04:38:57.914929Z", # "status": { # "status_code": "UNSET" # }, @@ -365,18 +303,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0xf490e6be306375354c564c94075ed8df", -# "span_id": "0xce9266e22f7c05f3", +# "trace_id": "0xca3159edb8ac4ba9fd87ba54aa5df4aa", +# "span_id": "0xfd62c1c2c9d666ed", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x7ad3c9118ba28f72", -# "start_time": "2024-09-13T11:53:55.646757Z", -# "end_time": "2024-09-13T11:53:57.998664Z", +# "parent_id": "0xfcc716b485539b93", +# "start_time": "2024-06-10T04:38:56.261349Z", +# "end_time": "2024-06-10T04:38:57.914961Z", # "status": { # "status_code": "UNSET" # }, @@ -391,18 +329,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0xf490e6be306375354c564c94075ed8df", -# "span_id": "0x7ad3c9118ba28f72", +# "trace_id": "0xca3159edb8ac4ba9fd87ba54aa5df4aa", +# "span_id": "0xfcc716b485539b93", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x7577b1ddc83509cf", -# "start_time": "2024-09-13T11:53:55.640859Z", -# "end_time": "2024-09-13T11:53:57.998842Z", +# "parent_id": "0xf38e594bba842099", +# "start_time": "2024-06-10T04:38:56.253582Z", +# "end_time": "2024-06-10T04:38:57.915145Z", # "status": { # "status_code": "UNSET" # }, @@ -417,36 +355,29 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0xf490e6be306375354c564c94075ed8df", -# "span_id": "0x7577b1ddc83509cf", +# "trace_id": "0xca3159edb8ac4ba9fd87ba54aa5df4aa", +# "span_id": "0xf38e594bba842099", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": null, -# "start_time": "2024-09-13T11:53:54.892772Z", -# "end_time": "2024-09-13T11:53:57.998937Z", +# "parent_id": "None", +# "start_time": "2024-06-10T04:38:55.640160Z", +# "end_time": "2024-06-10T04:38:57.915229Z", # "status": { # "status_code": "UNSET" # }, # "attributes": { # "session.session_id": "0x4fa6d91d1f2a4bdbb7a1287d90ec4a16", +# "workflow_input": "What is Task Decomposition?", # "workflow_name": "langchain_app_1", +# "workflow_output": "Task decomposition is a technique used to break down complex tasks into smaller and more manageable steps. This process helps agents or models handle intricate tasks by dividing them into simpler subtasks. Various methods, such as Chain of Thought and Tree of Thoughts, can be employed to decompose tasks effectively.", # "workflow_type": "workflow.langchain" # }, -# "events": [ -# { -# "name": "input", -# "timestamp": "2024-09-13T11:53:54.892847Z", -# "attributes": { -# "input": "What is Task Decomposition?", -# "chat_history": [] -# } -# } -# ], +# "events": [], # "links": [], # "resource": { # "attributes": { @@ -454,18 +385,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.task.ChatPromptTemplate", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0xd6c40115270fbe7a", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0xa3ae254e712e3f90", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x342dd0dfd45770e4", -# "start_time": "2024-09-13T11:53:58.028033Z", -# "end_time": "2024-09-13T11:53:58.028906Z", +# "parent_id": "0xa9b366f5c4fb2eda", +# "start_time": "2024-06-10T04:38:57.941590Z", +# "end_time": "2024-06-10T04:38:57.942342Z", # "status": { # "status_code": "UNSET" # }, @@ -480,29 +411,27 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.task.ChatOpenAI", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0xf772bf22bc9316c6", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0x419b04f8a3eb4883", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x342dd0dfd45770e4", -# "start_time": "2024-09-13T11:53:58.028981Z", -# "end_time": "2024-09-13T11:53:59.002471Z", +# "parent_id": "0xa9b366f5c4fb2eda", +# "start_time": "2024-06-10T04:38:57.942406Z", +# "end_time": "2024-06-10T04:38:59.211431Z", # "status": { # "status_code": "UNSET" # }, # "attributes": { # "session.session_id": "0x4fa6d91d1f2a4bdbb7a1287d90ec4a16", -# "temperature": 0.7, -# "model_name": "gpt-3.5-turbo-0125", -# "provider_name": "api.openai.com", -# "completion_tokens": 9, -# "prompt_tokens": 145, -# "total_tokens": 154 +# "server_url": "http://triton22.eastus.cloudapp.azure.com:8000/v2/models/flan_t5_783m/versions/1/infer", +# "completion_tokens": 10, +# "prompt_tokens": 140, +# "total_tokens": 150 # }, # "events": [], # "links": [], @@ -512,18 +441,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.task.StrOutputParser", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0xded31022b03dc5c6", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0xaaa3a958fb1da0e9", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x342dd0dfd45770e4", -# "start_time": "2024-09-13T11:53:59.002761Z", -# "end_time": "2024-09-13T11:53:59.003855Z", +# "parent_id": "0xa9b366f5c4fb2eda", +# "start_time": "2024-06-10T04:38:59.211922Z", +# "end_time": "2024-06-10T04:38:59.213538Z", # "status": { # "status_code": "UNSET" # }, @@ -538,18 +467,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.task.VectorStoreRetriever", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0x9110963dc0694b6d", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0x3e8142ee7d8d4927", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x342dd0dfd45770e4", -# "start_time": "2024-09-13T11:53:59.004127Z", -# "end_time": "2024-09-13T11:53:59.578593Z", +# "parent_id": "0xa9b366f5c4fb2eda", +# "start_time": "2024-06-10T04:38:59.213754Z", +# "end_time": "2024-06-10T04:38:59.699996Z", # "status": { # "status_code": "UNSET" # }, @@ -566,7 +495,7 @@ def format_docs(docs): # "events": [ # { # "name": "context_input", -# "timestamp": "2024-09-13T11:53:59.004235Z", +# "timestamp": "2024-09-16T09:48:56.731819Z", # "attributes": { # "question": "What are some typical methods for task decomposition?" # } @@ -579,18 +508,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0x342dd0dfd45770e4", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0xa9b366f5c4fb2eda", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x376010a99723e8f6", -# "start_time": "2024-09-13T11:53:58.026890Z", -# "end_time": "2024-09-13T11:53:59.578649Z", +# "parent_id": "0xefdcdb61e167f73a", +# "start_time": "2024-06-10T04:38:57.940414Z", +# "end_time": "2024-06-10T04:38:59.700076Z", # "status": { # "status_code": "UNSET" # }, @@ -605,18 +534,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0x376010a99723e8f6", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0xefdcdb61e167f73a", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x6271cda8f5cf74cc", -# "start_time": "2024-09-13T11:53:58.020961Z", -# "end_time": "2024-09-13T11:53:59.579314Z", +# "parent_id": "0xffdc0a0d41b85218", +# "start_time": "2024-06-10T04:38:57.934140Z", +# "end_time": "2024-06-10T04:38:59.700674Z", # "status": { # "status_code": "UNSET" # }, @@ -631,18 +560,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0x3abe3e6f01faf7d3", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0xa0b015ed781ad960", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xe78159af24754221", -# "start_time": "2024-09-13T11:53:59.603308Z", -# "end_time": "2024-09-13T11:53:59.604992Z", +# "parent_id": "0x3711b72dfa932d3e", +# "start_time": "2024-06-10T04:38:59.726886Z", +# "end_time": "2024-06-10T04:38:59.729179Z", # "status": { # "status_code": "UNSET" # }, @@ -657,18 +586,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.task.ChatPromptTemplate", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0x932b5b9249809e07", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0x0768296ba09b7230", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xe78159af24754221", -# "start_time": "2024-09-13T11:53:59.605136Z", -# "end_time": "2024-09-13T11:53:59.606022Z", +# "parent_id": "0x3711b72dfa932d3e", +# "start_time": "2024-06-10T04:38:59.729256Z", +# "end_time": "2024-06-10T04:38:59.730086Z", # "status": { # "status_code": "UNSET" # }, @@ -683,30 +612,27 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } -# Common ways of task decomposition include using techniques like Chain of Thought (CoT) and Tree of Thoughts, which break down tasks into smaller steps for easier execution. Task decomposition can also be done through simple prompting using language models, task-specific instructions tailored to the specific task, or by incorporating human inputs to guide the decomposition process. +# }, # { # "name": "langchain.task.ChatOpenAI", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0x95bb04f2be7f0ef0", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0xa32f64207539d7a8", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xe78159af24754221", -# "start_time": "2024-09-13T11:53:59.606117Z", -# "end_time": "2024-09-13T11:54:01.052883Z", +# "parent_id": "0x3711b72dfa932d3e", +# "start_time": "2024-06-10T04:38:59.730152Z", +# "end_time": "2024-06-10T04:39:01.261308Z", # "status": { # "status_code": "UNSET" # }, # "attributes": { # "session.session_id": "0x4fa6d91d1f2a4bdbb7a1287d90ec4a16", -# "temperature": 0.7, -# "model_name": "gpt-3.5-turbo-0125", -# "provider_name": "api.openai.com", -# "completion_tokens": 65, -# "prompt_tokens": 658, -# "total_tokens": 723 +# "server_url": "http://triton22.eastus.cloudapp.azure.com:8000/v2/models/flan_t5_783m/versions/1/infer", +# "completion_tokens": 63, +# "prompt_tokens": 619, +# "total_tokens": 682 # }, # "events": [], # "links": [], @@ -716,18 +642,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.task.StrOutputParser", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0xf08d575527e91af2", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0xb664f045c3716fa3", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0xe78159af24754221", -# "start_time": "2024-09-13T11:54:01.053197Z", -# "end_time": "2024-09-13T11:54:01.054195Z", +# "parent_id": "0x3711b72dfa932d3e", +# "start_time": "2024-06-10T04:39:01.261566Z", +# "end_time": "2024-06-10T04:39:01.262450Z", # "status": { # "status_code": "UNSET" # }, @@ -742,18 +668,18 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0xe78159af24754221", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0x3711b72dfa932d3e", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x29c21e517e9a247a", -# "start_time": "2024-09-13T11:53:59.597366Z", -# "end_time": "2024-09-13T11:54:01.054250Z", +# "parent_id": "0x0a6e7fac9826a16c", +# "start_time": "2024-06-10T04:38:59.719843Z", +# "end_time": "2024-06-10T04:39:01.262503Z", # "status": { # "status_code": "UNSET" # }, @@ -768,61 +694,53 @@ def format_docs(docs): # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0x29c21e517e9a247a", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0x0a6e7fac9826a16c", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": "0x6271cda8f5cf74cc", -# "start_time": "2024-09-13T11:53:59.592121Z", -# "end_time": "2024-09-13T11:54:01.054516Z", +# "parent_id": "0xffdc0a0d41b85218", +# "start_time": "2024-06-10T04:38:59.712013Z", +# "end_time": "2024-06-10T04:39:01.262831Z", # "status": { # "status_code": "UNSET" # }, # "attributes": { # "session.session_id": "0x4fa6d91d1f2a4bdbb7a1287d90ec4a16" # }, -# "events": [], -# "links": [], # "resource": { # "attributes": { # "service.name": "langchain_app_1" # }, # "schema_url": "" # } -# } +# }, # { # "name": "langchain.workflow", # "context": { -# "trace_id": "0x460ddbe0096e740e5fce324188e8b783", -# "span_id": "0x6271cda8f5cf74cc", +# "trace_id": "0xfcb89e0c5f4aba8a1377664f6dee7661", +# "span_id": "0xffdc0a0d41b85218", # "trace_state": "[]" # }, # "kind": "SpanKind.INTERNAL", -# "parent_id": null, -# "start_time": "2024-09-13T11:53:57.999167Z", -# "end_time": "2024-09-13T11:54:01.054623Z", +# "parent_id": "None", +# "start_time": "2024-06-10T04:38:57.915422Z", +# "end_time": "2024-06-10T04:39:01.262926Z", # "status": { # "status_code": "UNSET" # }, # "attributes": { # "session.session_id": "0x4fa6d91d1f2a4bdbb7a1287d90ec4a16", +# "workflow_input": "What are common ways of doing it?", # "workflow_name": "langchain_app_1", +# "workflow_output": "Task decomposition can be achieved through methods such as using Language Model (LLM) prompting with specific instructions like \"Steps for XYZ\" or \"What are the subgoals for achieving XYZ?\", providing task-specific instructions, or incorporating human inputs. These approaches help in breaking down tasks into smaller components for easier handling and execution.", # "workflow_type": "workflow.langchain" # }, -# "events": [ -# { -# "name": "input", -# "timestamp": "2024-09-13T11:53:57.999329Z", -# "attributes": { -# "input": "What are common ways of doing it?" -# } -# } -# ], +# "events": [], # "links": [], # "resource": { # "attributes": { diff --git a/tests/llama_index_sample.py b/tests/llama_index_sample.py index 021fe3c..dcbc256 100644 --- a/tests/llama_index_sample.py +++ b/tests/llama_index_sample.py @@ -44,217 +44,81 @@ print(response) # { -# "name": "haystack.tracing.auto_enable", -# "context": { -# "trace_id": "0xf199d425ff9455d2fa18da30508e8120", -# "span_id": "0xced2da35b439e06c", -# "trace_state": "[]" -# }, -# "kind": "SpanKind.INTERNAL", -# "parent_id": null, -# "start_time": "2024-09-13T11:37:37.143875Z", -# "end_time": "2024-09-13T11:37:37.144983Z", -# "status": { -# "status_code": "ERROR", -# "description": "ImportError: cannot import name 'Span' from partially initialized module 'haystack.tracing' (most likely due to a circular import) (/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/haystack/tracing/__init__.py)" -# }, -# "attributes": {}, -# "events": [ +# "trace_id": "0xbd54e5d0edcd96634fa8a02c25c27519", +# "start_time": "2024-04-15T23:27:54.806477Z", +# "end_time": "2024-04-15T23:27:57.182261Z", +# "duration_ms": "2376", +# "spans": [ # { -# "name": "exception", -# "timestamp": "2024-09-13T11:37:37.144968Z", +# "span_name": "llamaindex.retrieve", +# "start_time": "2024-04-15T23:27:54.806773Z", +# "end_time": "2024-04-15T23:27:55.732604Z", +# "duration_ms": "926", +# "span_id": "0x030cf03872d4a092", +# "trace_id": "0xbd54e5d0edcd96634fa8a02c25c27519", +# "parent_id": "0xb4b14a8f14e7e770", # "attributes": { -# "exception.type": "ImportError", -# "exception.message": "cannot import name 'Span' from partially initialized module 'haystack.tracing' (most likely due to a circular import) (/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/haystack/tracing/__init__.py)", -# "exception.stacktrace": "Traceback (most recent call last):\n File \"/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/opentelemetry/trace/__init__.py\", line 590, in use_span\n yield span\n File \"/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/opentelemetry/sdk/trace/__init__.py\", line 1108, in start_as_current_span\n yield span\n File \"/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/haystack/tracing/tracer.py\", line 207, in _auto_configured_opentelemetry_tracer\n from haystack.tracing.opentelemetry import OpenTelemetryTracer\n File \"/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/haystack/tracing/opentelemetry.py\", line 9, in \n from haystack.tracing import Span, Tracer\nImportError: cannot import name 'Span' from partially initialized module 'haystack.tracing' (most likely due to a circular import) (/home/beehyv/Documents/monocle/venv/lib/python3.10/site-packages/haystack/tracing/__init__.py)\n", -# "exception.escaped": "False" -# } -# } -# ], -# "links": [], -# "resource": { -# "attributes": { -# "service.name": "llama_index_1" -# }, -# "schema_url": "" -# } -# } -# { -# "name": "haystack.tracing.auto_enable", -# "context": { -# "trace_id": "0x9be2be7e2994b5f923d17defaed1c00e", -# "span_id": "0xc790b64df4a91ff6", -# "trace_state": "[]" -# }, -# "kind": "SpanKind.INTERNAL", -# "parent_id": null, -# "start_time": "2024-09-13T11:37:38.692198Z", -# "end_time": "2024-09-13T11:37:38.692245Z", -# "status": { -# "status_code": "UNSET" -# }, -# "attributes": {}, -# "events": [], -# "links": [], -# "resource": { -# "attributes": { -# "service.name": "llama_index_1" -# }, -# "schema_url": "" -# } -# } -# { -# "name": "llamaindex.retrieve", -# "context": { -# "trace_id": "0x79de813d316bd63f40767fec67560d0f", -# "span_id": "0xd9661af547dfcfc1", -# "trace_state": "[]" -# }, -# "kind": "SpanKind.INTERNAL", -# "parent_id": "0xa9e16efccceb9368", -# "start_time": "2024-09-13T11:37:40.848528Z", -# "end_time": "2024-09-13T11:37:41.760007Z", -# "status": { -# "status_code": "UNSET" -# }, -# "attributes": {}, -# "events": [ -# { -# "name": "context_input", -# "timestamp": "2024-09-13T11:37:40.848554Z", -# "attributes": { -# "question": "What did the author do growing up?" -# } -# }, -# { -# "name": "context_output", -# "timestamp": "2024-09-13T11:37:41.759986Z", -# "attributes": { -# "response": "this is some sample text" -# } -# } -# ], -# "links": [], -# "resource": { -# "attributes": { -# "service.name": "llama_index_1" -# }, -# "schema_url": "" -# } -# } -# The context does not provide information about what the author did while growing up. -# { -# "name": "llamaindex.openai", -# "context": { -# "trace_id": "0x79de813d316bd63f40767fec67560d0f", -# "span_id": "0xf17975e22cc1f920", -# "trace_state": "[]" -# }, -# "kind": "SpanKind.INTERNAL", -# "parent_id": "0x99c6a02cfdfddb24", -# "start_time": "2024-09-13T11:37:41.762514Z", -# "end_time": "2024-09-13T11:37:43.806178Z", -# "status": { -# "status_code": "UNSET" -# }, -# "attributes": { -# "temperature": 0.1, -# "model_name": "gpt-4", -# "provider_name": "api.openai.com", -# "inference_endpoint": "https://api.openai.com/v1", -# "completion_tokens": 15, -# "prompt_tokens": 142, -# "total_tokens": 157 -# }, -# "events": [], -# "links": [], -# "resource": { -# "attributes": { -# "service.name": "llama_index_1" +# }, +# "events": [] # }, -# "schema_url": "" -# } -# } -# { -# "name": "llamaindex.openai", -# "context": { -# "trace_id": "0x79de813d316bd63f40767fec67560d0f", -# "span_id": "0x99c6a02cfdfddb24", -# "trace_state": "[]" -# }, -# "kind": "SpanKind.INTERNAL", -# "parent_id": "0xa9e16efccceb9368", -# "start_time": "2024-09-13T11:37:41.762330Z", -# "end_time": "2024-09-13T11:37:43.806247Z", -# "status": { -# "status_code": "UNSET" -# }, -# "attributes": { -# "temperature": 0.1, -# "model_name": "gpt-4", -# "provider_name": "api.openai.com", -# "inference_endpoint": "https://api.openai.com/v1", -# "completion_tokens": 15, -# "prompt_tokens": 142, -# "total_tokens": 157 -# }, -# "events": [], -# "links": [], -# "resource": { -# "attributes": { -# "service.name": "llama_index_1" -# }, -# "schema_url": "" -# } -# } -# { -# "name": "llamaindex.query", -# "context": { -# "trace_id": "0x79de813d316bd63f40767fec67560d0f", -# "span_id": "0xa9e16efccceb9368", -# "trace_state": "[]" -# }, -# "kind": "SpanKind.INTERNAL", -# "parent_id": null, -# "start_time": "2024-09-13T11:37:40.848077Z", -# "end_time": "2024-09-13T11:37:43.806928Z", -# "status": { -# "status_code": "UNSET" -# }, -# "attributes": { -# "tags": [ -# "text-embedding-3-large", -# "ChromaVectorStore" -# ], -# "type": "vector_store", -# "provider_name": "ChromaVectorStore", -# "embedding_model": "text-embedding-3-large", -# "workflow_name": "llama_index_1", -# "workflow_type": "workflow.llamaindex" -# }, -# "events": [ # { -# "name": "input", -# "timestamp": "2024-09-13T11:37:40.848123Z", +# "span_name": "llamaindex.openai", +# "start_time": "2024-04-15T23:27:55.740299Z", +# "end_time": "2024-04-15T23:27:57.181992Z", +# "duration_ms": "1442", +# "span_id": "0x225fbfb58481e58c", +# "trace_id": "0xbd54e5d0edcd96634fa8a02c25c27519", +# "parent_id": "0xb4b14a8f14e7e770", # "attributes": { -# "question": "What did the author do growing up?" -# } +# "model_name": "gpt-3.5-turbo-0125", +# "provider_name": "openai.com", +# }, +# "events": [] # }, # { -# "name": "output", -# "timestamp": "2024-09-13T11:37:43.806899Z", +# "span_name": "llamaindex.query", +# "start_time": "2024-04-15T23:27:54.806477Z", +# "end_time": "2024-04-15T23:27:57.182261Z", +# "duration_ms": "2376", +# "span_id": "0xb4b14a8f14e7e770", +# "trace_id": "0xbd54e5d0edcd96634fa8a02c25c27519", +# "parent_id": "None", # "attributes": { -# "response": "The context does not provide information about what the author did while growing up." -# } +# "tags": [ +# "text-embedding-3-large", +# "ChromaVectorStore" +# ], +# "type": "vector_store", +# "provider_name": "ChromaVectorStore", +# "embedding_model": "text-embedding-3-large", +# "workflow_name": "llama_index_1", +# "workflow_type": "workflow.llamaindex" +# }, +# "events": [ +# { +# "name": "input", +# "timestamp": "2024-09-16T10:05:44.687175Z", +# "attributes": { +# "question": "What did the author do growing up?" +# } +# }, +# { +# "name": "output", +# "timestamp": "2024-09-16T10:05:47.345643Z", +# "attributes": { +# "response": "The context does not provide information about what the author did while growing up." +# } +# } +# ], +# "links": [], +# "resource": { +# "attributes": { +# "service.name": "llama_index_1" +# }, +# "schema_url": "" +# } # } -# ], -# "links": [], -# "resource": { -# "attributes": { -# "service.name": "llama_index_1" -# }, -# "schema_url": "" -# } +# ] # }