diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1b29c928..bd9b9953 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 ### Bug Fixes
diff --git a/lerna.json b/lerna.json
index 4f9c5074..1fb75207 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,6 +1,6 @@
 {
   "$schema": "node_modules/lerna/schemas/lerna-schema.json",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "packages": ["packages/*"],
   "useNx": true
 }
diff --git a/package-lock.json b/package-lock.json
index 6110f9b1..fafceb61 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -15,7 +15,7 @@
         "@commitlint/cli": "^18.4.4",
         "@commitlint/config-conventional": "^18.4.4",
         "@jscutlery/semver": "^3.4.1",
-        "@nx/eslint-plugin": "^17.2.8",
+        "@nx/eslint-plugin": "^18.2.3",
         "@nx/js": "^17.2.8",
         "@nx/linter": "^17.2.8",
         "@rollup/plugin-json": "^6.1.0",
@@ -4770,12 +4770,12 @@
       }
     },
     "node_modules/@nrwl/eslint-plugin-nx": {
-      "version": "17.3.2",
-      "resolved": "https://registry.npmjs.org/@nrwl/eslint-plugin-nx/-/eslint-plugin-nx-17.3.2.tgz",
-      "integrity": "sha512-KfZtT+breRD7D8dy+YLIdKD7S9t4aqtEQLpRQCnJrCALYdqIRuql9rC2J69RUosozgrk55C0LERF0/kJVPe6Gg==",
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nrwl/eslint-plugin-nx/-/eslint-plugin-nx-18.2.3.tgz",
+      "integrity": "sha512-hwIqxp2A0G250tFzEDmVbhwhtldoB7858848AME99Nt9Ij27ThzYaLIG+TYicmb+Rq2FqG9G/6wS89eg1aAg2Q==",
       "dev": true,
       "dependencies": {
-        "@nx/eslint-plugin": "17.3.2"
+        "@nx/eslint-plugin": "18.2.3"
       }
     },
     "node_modules/@nrwl/js": {
@@ -4887,16 +4887,16 @@
       }
     },
     "node_modules/@nx/eslint-plugin": {
-      "version": "17.3.2",
-      "resolved": "https://registry.npmjs.org/@nx/eslint-plugin/-/eslint-plugin-17.3.2.tgz",
-      "integrity": "sha512-szNXnMr54SH3uQjsTgSb/ySomhbqF0nJnca1yoC7XJG8+jlQLTs8EiyqjdQ9CVo+KTxsb9ilDtAZXRNCHEyGlw==",
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/eslint-plugin/-/eslint-plugin-18.2.3.tgz",
+      "integrity": "sha512-vOHkzHNpDLLd5RMrL/8/sAdqBqMcf2FrSJWug6W4cC0x8hzUpNwnfEn+i4ZCV/QxduQH4/UP96AbOm7rzwoAdg==",
       "dev": true,
       "dependencies": {
-        "@nrwl/eslint-plugin-nx": "17.3.2",
-        "@nx/devkit": "17.3.2",
-        "@nx/js": "17.3.2",
-        "@typescript-eslint/type-utils": "^6.13.2",
-        "@typescript-eslint/utils": "^6.13.2",
+        "@nrwl/eslint-plugin-nx": "18.2.3",
+        "@nx/devkit": "18.2.3",
+        "@nx/js": "18.2.3",
+        "@typescript-eslint/type-utils": "^7.3.0",
+        "@typescript-eslint/utils": "^7.3.0",
         "chalk": "^4.1.0",
         "confusing-browser-globals": "^1.0.9",
         "jsonc-eslint-parser": "^2.1.0",
@@ -4904,7 +4904,7 @@
         "tslib": "^2.3.0"
       },
       "peerDependencies": {
-        "@typescript-eslint/parser": "^6.13.2",
+        "@typescript-eslint/parser": "^6.13.2 || ^7.0.0",
         "eslint-config-prettier": "^9.0.0"
       },
       "peerDependenciesMeta": {
@@ -4914,21 +4914,52 @@
       }
     },
     "node_modules/@nx/eslint-plugin/node_modules/@nrwl/devkit": {
-      "version": "17.3.2",
-      "resolved": "https://registry.npmjs.org/@nrwl/devkit/-/devkit-17.3.2.tgz",
-      "integrity": "sha512-31wh7dDZPM1YUCfhhk/ioHnUeoPIlKYLFLW0fGdw76Ow2nmTqrmxha2m0CSIR1/9En9GpYut2IdUdNh9CctNlA==",
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nrwl/devkit/-/devkit-18.2.3.tgz",
+      "integrity": "sha512-BJQdPmXFze7g4zsHhwSTssAcm/hvl0rXbIzZYQxncsVU4d+Fx0GS3JYBZ+9EcfnCeAEb10jGvn7Rfk+0okMmOw==",
       "dev": true,
       "dependencies": {
-        "@nx/devkit": "17.3.2"
+        "@nx/devkit": "18.2.3"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nrwl/js": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nrwl/js/-/js-18.2.3.tgz",
+      "integrity": "sha512-fOpKQg7CvzOmcow9fbBc5l96Pbv8gTe9qba4jiw3Z+EH776qraNBL9pRpff653V+obVh//gkq84BUeoJgk8vzQ==",
+      "dev": true,
+      "dependencies": {
+        "@nx/js": "18.2.3"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nrwl/tao": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nrwl/tao/-/tao-18.2.3.tgz",
+      "integrity": "sha512-vmteqzGcKPbexaAVPb/7VfXI5dXxzZwSm3rem3z20QlDOmNh1545VLO9YEfT5xzmZT2CC7F0etR4KcrJLtoT5g==",
+      "dev": true,
+      "dependencies": {
+        "nx": "18.2.3",
+        "tslib": "^2.3.0"
+      },
+      "bin": {
+        "tao": "index.js"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nrwl/workspace": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nrwl/workspace/-/workspace-18.2.3.tgz",
+      "integrity": "sha512-5tVtui/iy+VZTk3x/eFj21Zm0ICPUre9CfB5jlJ2MwH8w+96+186Yt2XGJATkFfnVnjqnszOcjk5BLlra8fdLA==",
+      "dev": true,
+      "dependencies": {
+        "@nx/workspace": "18.2.3"
       }
     },
     "node_modules/@nx/eslint-plugin/node_modules/@nx/devkit": {
-      "version": "17.3.2",
-      "resolved": "https://registry.npmjs.org/@nx/devkit/-/devkit-17.3.2.tgz",
-      "integrity": "sha512-gbOIhwrZKCSSFFbh6nE6LLCvAU7mhSdBSnRiS14YBwJJMu4CRJ0IcaFz58iXqGWZefMivKtkNFtx+zqwUC4ziw==",
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/devkit/-/devkit-18.2.3.tgz",
+      "integrity": "sha512-dugw9Jm3Og28uwGee94P3KYkqiUV7J8RgibOQjQG4J2Vt3DPBNEGSgBD72qKkzpioEo+XSVUkn9h3GrdmnRU+Q==",
       "dev": true,
       "dependencies": {
-        "@nrwl/devkit": "17.3.2",
+        "@nrwl/devkit": "18.2.3",
         "ejs": "^3.1.7",
         "enquirer": "~2.3.6",
         "ignore": "^5.0.4",
@@ -4941,6 +4972,425 @@
         "nx": ">= 16 <= 18"
       }
     },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/js": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/js/-/js-18.2.3.tgz",
+      "integrity": "sha512-hFSmgyaMVIlN/SyFwOwn/IveHsGxxJOv7qhewACg9NlKOa6+eEJYlEbOik9LjvcosDOh5icrngjsFgFJoC1sWA==",
+      "dev": true,
+      "dependencies": {
+        "@babel/core": "^7.23.2",
+        "@babel/plugin-proposal-decorators": "^7.22.7",
+        "@babel/plugin-transform-class-properties": "^7.22.5",
+        "@babel/plugin-transform-runtime": "^7.23.2",
+        "@babel/preset-env": "^7.23.2",
+        "@babel/preset-typescript": "^7.22.5",
+        "@babel/runtime": "^7.22.6",
+        "@nrwl/js": "18.2.3",
+        "@nx/devkit": "18.2.3",
+        "@nx/workspace": "18.2.3",
+        "@phenomnomnominal/tsquery": "~5.0.1",
+        "babel-plugin-const-enum": "^1.0.1",
+        "babel-plugin-macros": "^2.8.0",
+        "babel-plugin-transform-typescript-metadata": "^0.3.1",
+        "chalk": "^4.1.0",
+        "columnify": "^1.6.0",
+        "detect-port": "^1.5.1",
+        "fast-glob": "3.2.7",
+        "fs-extra": "^11.1.0",
+        "ignore": "^5.0.4",
+        "js-tokens": "^4.0.0",
+        "minimatch": "9.0.3",
+        "npm-package-arg": "11.0.1",
+        "npm-run-path": "^4.0.1",
+        "ora": "5.3.0",
+        "semver": "^7.5.3",
+        "source-map-support": "0.5.19",
+        "ts-node": "10.9.1",
+        "tsconfig-paths": "^4.1.2",
+        "tslib": "^2.3.0"
+      },
+      "peerDependencies": {
+        "verdaccio": "^5.0.4"
+      },
+      "peerDependenciesMeta": {
+        "verdaccio": {
+          "optional": true
+        }
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/nx-darwin-arm64": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/nx-darwin-arm64/-/nx-darwin-arm64-18.2.3.tgz",
+      "integrity": "sha512-TEks/vXHE87rNvVqhcIzQOM/+aZvNCf/70PhGG4RBEb+qV0C1kw7nygzdoLI4inFC76Qxhyya/K3J2OnU5ATiw==",
+      "cpu": [
+        "arm64"
+      ],
+      "dev": true,
+      "optional": true,
+      "os": [
+        "darwin"
+      ],
+      "engines": {
+        "node": ">= 10"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/nx-darwin-x64": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/nx-darwin-x64/-/nx-darwin-x64-18.2.3.tgz",
+      "integrity": "sha512-UsBbNbNXj+L2OzPyQYotyzmZF4h+ryaZ8quYDfdnlYwvFeqkdb2QJ3vJRd6in0kMWGrdk/ria/wZMCxR7U1ggg==",
+      "cpu": [
+        "x64"
+      ],
+      "dev": true,
+      "optional": true,
+      "os": [
+        "darwin"
+      ],
+      "engines": {
+        "node": ">= 10"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/nx-freebsd-x64": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/nx-freebsd-x64/-/nx-freebsd-x64-18.2.3.tgz",
+      "integrity": "sha512-f9BXGOeRPhrsNm99TCnOqZZeZUqN1BUOEzWa12eo3u+vQG6Qba3qKn7T92SeEzxOx/mUP/Csv3pFYoY6TE26jA==",
+      "cpu": [
+        "x64"
+      ],
+      "dev": true,
+      "optional": true,
+      "os": [
+        "freebsd"
+      ],
+      "engines": {
+        "node": ">= 10"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/nx-linux-arm-gnueabihf": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/nx-linux-arm-gnueabihf/-/nx-linux-arm-gnueabihf-18.2.3.tgz",
+      "integrity": "sha512-ekqr5jZhD6PxGM5IbI/RtlERDJ+8HR04OIdfo6HkbwxwCHxZlzZq+ApEZYum4AbjP6cuc3Zd/us1uuDqfQbeHw==",
+      "cpu": [
+        "arm"
+      ],
+      "dev": true,
+      "optional": true,
+      "os": [
+        "linux"
+      ],
+      "engines": {
+        "node": ">= 10"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/nx-linux-arm64-gnu": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/nx-linux-arm64-gnu/-/nx-linux-arm64-gnu-18.2.3.tgz",
+      "integrity": "sha512-iAW2J8NBFU4zDn5nqRgUq4t7gYC8ALyALzznr97ZvMTQorWfmHYgPUAj/opNqUcr10fjxcmXT0Ux2SX3DgUDmw==",
+      "cpu": [
+        "arm64"
+      ],
+      "dev": true,
+      "optional": true,
+      "os": [
+        "linux"
+      ],
+      "engines": {
+        "node": ">= 10"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/nx-linux-arm64-musl": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/nx-linux-arm64-musl/-/nx-linux-arm64-musl-18.2.3.tgz",
+      "integrity": "sha512-AJjGVHGGew0QVKUL30mjFjafowrSDYSQ1GgkJCLuWef5jl4rFvm9ruZswVja1KfZTFaImTCU01tZjPBr3zhmAA==",
+      "cpu": [
+        "arm64"
+      ],
+      "dev": true,
+      "optional": true,
+      "os": [
+        "linux"
+      ],
+      "engines": {
+        "node": ">= 10"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/nx-linux-x64-gnu": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/nx-linux-x64-gnu/-/nx-linux-x64-gnu-18.2.3.tgz",
+      "integrity": "sha512-nk5Xg8vmbBRoL0fOgZNBl1paC7hmjACLaSBmU7U2X+Y+QPGQzSw2b+Zn1MKVUWDmc4E6VnQfZ8n0L27+r9NgRw==",
+      "cpu": [
+        "x64"
+      ],
+      "dev": true,
+      "optional": true,
+      "os": [
+        "linux"
+      ],
+      "engines": {
+        "node": ">= 10"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/nx-linux-x64-musl": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/nx-linux-x64-musl/-/nx-linux-x64-musl-18.2.3.tgz",
+      "integrity": "sha512-bOlhul/eov58k9fX8lltopUDOIBEohZq2qc4ag91W2r4jdp6suAiqfXRxQwNZ2iHd8nAXuCDIHCbUuojs6OZnA==",
+      "cpu": [
+        "x64"
+      ],
+      "dev": true,
+      "optional": true,
+      "os": [
+        "linux"
+      ],
+      "engines": {
+        "node": ">= 10"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/nx-win32-arm64-msvc": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/nx-win32-arm64-msvc/-/nx-win32-arm64-msvc-18.2.3.tgz",
+      "integrity": "sha512-olXer0LnCvJrdV5ynd19fZHvvarRK/p1JnkoOUZDPVV+A3jGQQ8+paz+/5iLQBKA+5VcgWyqAaGFJnpyEFmnoQ==",
+      "cpu": [
+        "arm64"
+      ],
+      "dev": true,
+      "optional": true,
+      "os": [
+        "win32"
+      ],
+      "engines": {
+        "node": ">= 10"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/nx-win32-x64-msvc": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/nx-win32-x64-msvc/-/nx-win32-x64-msvc-18.2.3.tgz",
+      "integrity": "sha512-BgzPjF/wqi7zIFcspcKzN37BX1wgGo0OTLncK2PN5nyzSQ+XeNbR5laDswxzOGdB4CRLPqak2+YMhYnoiXeRCg==",
+      "cpu": [
+        "x64"
+      ],
+      "dev": true,
+      "optional": true,
+      "os": [
+        "win32"
+      ],
+      "engines": {
+        "node": ">= 10"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@nx/workspace": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/@nx/workspace/-/workspace-18.2.3.tgz",
+      "integrity": "sha512-en3lSArMrHZ75SqMHnnZjXiMunc6QFDMcglNPQwIE8TuXnV8UWQ1e4hkzRo6hY/YOoY7HcFvMEJ5KyP8OWCmQg==",
+      "dev": true,
+      "dependencies": {
+        "@nrwl/workspace": "18.2.3",
+        "@nx/devkit": "18.2.3",
+        "chalk": "^4.1.0",
+        "enquirer": "~2.3.6",
+        "nx": "18.2.3",
+        "tslib": "^2.3.0",
+        "yargs-parser": "21.1.1"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
+      "version": "7.5.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.5.0.tgz",
+      "integrity": "sha512-Z1r7uJY0MDeUlql9XJ6kRVgk/sP11sr3HKXn268HZyqL7i4cEfrdFuSSY/0tUqT37l5zT0tJOsuDP16kio85iA==",
+      "dev": true,
+      "dependencies": {
+        "@typescript-eslint/types": "7.5.0",
+        "@typescript-eslint/visitor-keys": "7.5.0"
+      },
+      "engines": {
+        "node": "^18.18.0 || >=20.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/typescript-eslint"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@typescript-eslint/type-utils": {
+      "version": "7.5.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.5.0.tgz",
+      "integrity": "sha512-A021Rj33+G8mx2Dqh0nMO9GyjjIBK3MqgVgZ2qlKf6CJy51wY/lkkFqq3TqqnH34XyAHUkq27IjlUkWlQRpLHw==",
+      "dev": true,
+      "dependencies": {
+        "@typescript-eslint/typescript-estree": "7.5.0",
+        "@typescript-eslint/utils": "7.5.0",
+        "debug": "^4.3.4",
+        "ts-api-utils": "^1.0.1"
+      },
+      "engines": {
+        "node": "^18.18.0 || >=20.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/typescript-eslint"
+      },
+      "peerDependencies": {
+        "eslint": "^8.56.0"
+      },
+      "peerDependenciesMeta": {
+        "typescript": {
+          "optional": true
+        }
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@typescript-eslint/types": {
+      "version": "7.5.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.5.0.tgz",
+      "integrity": "sha512-tv5B4IHeAdhR7uS4+bf8Ov3k793VEVHd45viRRkehIUZxm0WF82VPiLgHzA/Xl4TGPg1ZD49vfxBKFPecD5/mg==",
+      "dev": true,
+      "engines": {
+        "node": "^18.18.0 || >=20.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/typescript-eslint"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@typescript-eslint/typescript-estree": {
+      "version": "7.5.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.5.0.tgz",
+      "integrity": "sha512-YklQQfe0Rv2PZEueLTUffiQGKQneiIEKKnfIqPIOxgM9lKSZFCjT5Ad4VqRKj/U4+kQE3fa8YQpskViL7WjdPQ==",
+      "dev": true,
+      "dependencies": {
+        "@typescript-eslint/types": "7.5.0",
+        "@typescript-eslint/visitor-keys": "7.5.0",
+        "debug": "^4.3.4",
+        "globby": "^11.1.0",
+        "is-glob": "^4.0.3",
+        "minimatch": "9.0.3",
+        "semver": "^7.5.4",
+        "ts-api-utils": "^1.0.1"
+      },
+      "engines": {
+        "node": "^18.18.0 || >=20.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/typescript-eslint"
+      },
+      "peerDependenciesMeta": {
+        "typescript": {
+          "optional": true
+        }
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@typescript-eslint/utils": {
+      "version": "7.5.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.5.0.tgz",
+      "integrity": "sha512-3vZl9u0R+/FLQcpy2EHyRGNqAS/ofJ3Ji8aebilfJe+fobK8+LbIFmrHciLVDxjDoONmufDcnVSF38KwMEOjzw==",
+      "dev": true,
+      "dependencies": {
+        "@eslint-community/eslint-utils": "^4.4.0",
+        "@types/json-schema": "^7.0.12",
+        "@types/semver": "^7.5.0",
+        "@typescript-eslint/scope-manager": "7.5.0",
+        "@typescript-eslint/types": "7.5.0",
+        "@typescript-eslint/typescript-estree": "7.5.0",
+        "semver": "^7.5.4"
+      },
+      "engines": {
+        "node": "^18.18.0 || >=20.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/typescript-eslint"
+      },
+      "peerDependencies": {
+        "eslint": "^8.56.0"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
+      "version": "7.5.0",
+      "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.5.0.tgz",
+      "integrity": "sha512-mcuHM/QircmA6O7fy6nn2w/3ditQkj+SgtOc8DW3uQ10Yfj42amm2i+6F2K4YAOPNNTmE6iM1ynM6lrSwdendA==",
+      "dev": true,
+      "dependencies": {
+        "@typescript-eslint/types": "7.5.0",
+        "eslint-visitor-keys": "^3.4.1"
+      },
+      "engines": {
+        "node": "^18.18.0 || >=20.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/typescript-eslint"
+      }
+    },
+    "node_modules/@nx/eslint-plugin/node_modules/nx": {
+      "version": "18.2.3",
+      "resolved": "https://registry.npmjs.org/nx/-/nx-18.2.3.tgz",
+      "integrity": "sha512-4XGvvIzXeeeSj1hObiBL7E7aXX6rbiB1F856AqUdGoysYfkhcxOFyeAv5XsXeukl9gYwh/LH84paXjEOkGaJlA==",
+      "dev": true,
+      "hasInstallScript": true,
+      "dependencies": {
+        "@nrwl/tao": "18.2.3",
+        "@yarnpkg/lockfile": "^1.1.0",
+        "@yarnpkg/parsers": "3.0.0-rc.46",
+        "@zkochan/js-yaml": "0.0.6",
+        "axios": "^1.6.0",
+        "chalk": "^4.1.0",
+        "cli-cursor": "3.1.0",
+        "cli-spinners": "2.6.1",
+        "cliui": "^8.0.1",
+        "dotenv": "~16.3.1",
+        "dotenv-expand": "~10.0.0",
+        "enquirer": "~2.3.6",
+        "figures": "3.2.0",
+        "flat": "^5.0.2",
+        "fs-extra": "^11.1.0",
+        "ignore": "^5.0.4",
+        "jest-diff": "^29.4.1",
+        "js-yaml": "4.1.0",
+        "jsonc-parser": "3.2.0",
+        "lines-and-columns": "~2.0.3",
+        "minimatch": "9.0.3",
+        "node-machine-id": "1.1.12",
+        "npm-run-path": "^4.0.1",
+        "open": "^8.4.0",
+        "ora": "5.3.0",
+        "semver": "^7.5.3",
+        "string-width": "^4.2.3",
+        "strong-log-transformer": "^2.1.0",
+        "tar-stream": "~2.2.0",
+        "tmp": "~0.2.1",
+        "tsconfig-paths": "^4.1.2",
+        "tslib": "^2.3.0",
+        "yargs": "^17.6.2",
+        "yargs-parser": "21.1.1"
+      },
+      "bin": {
+        "nx": "bin/nx.js",
+        "nx-cloud": "bin/nx-cloud.js"
+      },
+      "optionalDependencies": {
+        "@nx/nx-darwin-arm64": "18.2.3",
+        "@nx/nx-darwin-x64": "18.2.3",
+        "@nx/nx-freebsd-x64": "18.2.3",
+        "@nx/nx-linux-arm-gnueabihf": "18.2.3",
+        "@nx/nx-linux-arm64-gnu": "18.2.3",
+        "@nx/nx-linux-arm64-musl": "18.2.3",
+        "@nx/nx-linux-x64-gnu": "18.2.3",
+        "@nx/nx-linux-x64-musl": "18.2.3",
+        "@nx/nx-win32-arm64-msvc": "18.2.3",
+        "@nx/nx-win32-x64-msvc": "18.2.3"
+      },
+      "peerDependencies": {
+        "@swc-node/register": "^1.8.0",
+        "@swc/core": "^1.3.85"
+      },
+      "peerDependenciesMeta": {
+        "@swc-node/register": {
+          "optional": true
+        },
+        "@swc/core": {
+          "optional": true
+        }
+      }
+    },
     "node_modules/@nx/eslint/node_modules/@nrwl/devkit": {
       "version": "17.3.2",
       "resolved": "https://registry.npmjs.org/@nrwl/devkit/-/devkit-17.3.2.tgz",
@@ -6292,6 +6742,64 @@
         "win32"
       ]
     },
+    "node_modules/@sentry-internal/tracing": {
+      "version": "7.111.0",
+      "resolved": "https://registry.npmjs.org/@sentry-internal/tracing/-/tracing-7.111.0.tgz",
+      "integrity": "sha512-CgXly8rsdu4loWVKi2RqpInH3C2cVBuaYsx4ZP5IJpzSinsUAMyyr3Pc0PZzCyoVpBBXGBGj/4HhFsY3q6Z0Vg==",
+      "dependencies": {
+        "@sentry/core": "7.111.0",
+        "@sentry/types": "7.111.0",
+        "@sentry/utils": "7.111.0"
+      },
+      "engines": {
+        "node": ">=8"
+      }
+    },
+    "node_modules/@sentry/core": {
+      "version": "7.111.0",
+      "resolved": "https://registry.npmjs.org/@sentry/core/-/core-7.111.0.tgz",
+      "integrity": "sha512-/ljeMjZu8CSrLGrseBi/7S2zRIFsqMcvfyG6Nwgfc07J9nbHt8/MqouE1bXZfiaILqDBpK7BK9MLAAph4mkAWg==",
+      "dependencies": {
+        "@sentry/types": "7.111.0",
+        "@sentry/utils": "7.111.0"
+      },
+      "engines": {
+        "node": ">=8"
+      }
+    },
+    "node_modules/@sentry/node": {
+      "version": "7.111.0",
+      "resolved": "https://registry.npmjs.org/@sentry/node/-/node-7.111.0.tgz",
+      "integrity": "sha512-bTLZNETT7W89HEk04rwsch02KSpu++Yec/BEyM3AxUNY+ZQ9ZLL/lrNZuCwbe7fURpKoZrvGAhxpPjgs5UcB9w==",
+      "dependencies": {
+        "@sentry-internal/tracing": "7.111.0",
+        "@sentry/core": "7.111.0",
+        "@sentry/types": "7.111.0",
+        "@sentry/utils": "7.111.0"
+      },
+      "engines": {
+        "node": ">=8"
+      }
+    },
+    "node_modules/@sentry/types": {
+      "version": "7.111.0",
+      "resolved": "https://registry.npmjs.org/@sentry/types/-/types-7.111.0.tgz",
+      "integrity": "sha512-Oti4pgQ55+FBHKKcHGu51ZUxO1u52G5iVNK4mbtAN+5ArSCy/2s1H8IDJiOMswn3acfUnCR0oB/QsbEgAPZ26g==",
+      "engines": {
+        "node": ">=8"
+      }
+    },
+    "node_modules/@sentry/utils": {
+      "version": "7.111.0",
+      "resolved": "https://registry.npmjs.org/@sentry/utils/-/utils-7.111.0.tgz",
+      "integrity": "sha512-CB5rz1EgCSwj3xoXogsCZ5pQtfERrURc/ItcCuoaijUhkD0iMq5MCNWMHW3mBsBrqx/Oba+XGvDu0t/5+SWwBg==",
+      "dependencies": {
+        "@sentry/types": "7.111.0"
+      },
+      "engines": {
+        "node": ">=8"
+      }
+    },
     "node_modules/@sevinf/maybe": {
       "version": "0.5.0",
       "resolved": "https://registry.npmjs.org/@sevinf/maybe/-/maybe-0.5.0.tgz",
@@ -21994,7 +22502,7 @@
     },
     "packages/instrumentation-anthropic": {
       "name": "@traceloop/instrumentation-anthropic",
-      "version": "0.6.0",
+      "version": "0.6.1",
       "license": "Apache-2.0",
       "dependencies": {
         "@opentelemetry/core": "^1.22.0",
@@ -22071,7 +22579,7 @@
     },
     "packages/instrumentation-azure": {
       "name": "@traceloop/instrumentation-azure",
-      "version": "0.6.0",
+      "version": "0.6.1",
       "license": "Apache-2.0",
       "dependencies": {
         "@opentelemetry/core": "^1.22.0",
@@ -22121,7 +22629,7 @@
     },
     "packages/instrumentation-bedrock": {
       "name": "@traceloop/instrumentation-bedrock",
-      "version": "0.6.0",
+      "version": "0.6.1",
       "license": "Apache-2.0",
       "dependencies": {
         "@opentelemetry/core": "^1.22.0",
@@ -22169,7 +22677,7 @@
     },
     "packages/instrumentation-cohere": {
       "name": "@traceloop/instrumentation-cohere",
-      "version": "0.6.0",
+      "version": "0.6.1",
       "license": "Apache-2.0",
       "dependencies": {
         "@opentelemetry/core": "^1.22.0",
@@ -22189,7 +22697,7 @@
     },
     "packages/instrumentation-langchain": {
       "name": "@traceloop/instrumentation-langchain",
-      "version": "0.6.0",
+      "version": "0.6.1",
       "license": "Apache-2.0",
       "dependencies": {
         "@opentelemetry/core": "^1.22.0",
@@ -22777,7 +23285,7 @@
     },
     "packages/instrumentation-llamaindex": {
       "name": "@traceloop/instrumentation-llamaindex",
-      "version": "0.6.0",
+      "version": "0.6.1",
       "license": "Apache-2.0",
       "dependencies": {
         "@opentelemetry/core": "^1.22.0",
@@ -22828,7 +23336,7 @@
     },
     "packages/instrumentation-openai": {
       "name": "@traceloop/instrumentation-openai",
-      "version": "0.6.0",
+      "version": "0.6.1",
       "license": "Apache-2.0",
       "dependencies": {
         "@opentelemetry/core": "^1.22.0",
@@ -22880,7 +23388,7 @@
     },
     "packages/instrumentation-pinecone": {
       "name": "@traceloop/instrumentation-pinecone",
-      "version": "0.6.0",
+      "version": "0.6.1",
       "license": "Apache-2.0",
       "dependencies": {
         "@opentelemetry/core": "^1.22.0",
@@ -22928,7 +23436,7 @@
     },
     "packages/instrumentation-vertexai": {
       "name": "@traceloop/instrumentation-vertexai",
-      "version": "0.6.0",
+      "version": "0.6.1",
       "license": "Apache-2.0",
       "dependencies": {
         "@opentelemetry/core": "^1.22.0",
@@ -23414,21 +23922,22 @@
     },
     "packages/traceloop-sdk": {
       "name": "@traceloop/node-server-sdk",
-      "version": "0.6.0",
+      "version": "0.6.1",
       "license": "Apache-2.0",
       "dependencies": {
         "@opentelemetry/exporter-trace-otlp-proto": "^0.49.1",
         "@opentelemetry/sdk-node": "^0.49.1",
+        "@sentry/node": "^7.111.0",
         "@traceloop/ai-semantic-conventions": "^0.6.0",
-        "@traceloop/instrumentation-anthropic": "^0.6.0",
-        "@traceloop/instrumentation-azure": "^0.6.0",
-        "@traceloop/instrumentation-bedrock": "^0.6.0",
-        "@traceloop/instrumentation-cohere": "^0.6.0",
-        "@traceloop/instrumentation-langchain": "^0.6.0",
-        "@traceloop/instrumentation-llamaindex": "^0.6.0",
-        "@traceloop/instrumentation-openai": "^0.6.0",
-        "@traceloop/instrumentation-pinecone": "^0.6.0",
-        "@traceloop/instrumentation-vertexai": "^0.6.0",
+        "@traceloop/instrumentation-anthropic": "^0.6.1",
+        "@traceloop/instrumentation-azure": "^0.6.1",
+        "@traceloop/instrumentation-bedrock": "^0.6.1",
+        "@traceloop/instrumentation-cohere": "^0.6.1",
+        "@traceloop/instrumentation-langchain": "^0.6.1",
+        "@traceloop/instrumentation-llamaindex": "^0.6.1",
+        "@traceloop/instrumentation-openai": "^0.6.1",
+        "@traceloop/instrumentation-pinecone": "^0.6.1",
+        "@traceloop/instrumentation-vertexai": "^0.6.1",
         "@types/nunjucks": "^3.2.5",
         "cross-fetch": "^4.0.0",
         "fetch-retry": "^5.0.6",
diff --git a/package.json b/package.json
index 674c23db..90cd76fd 100644
--- a/package.json
+++ b/package.json
@@ -11,7 +11,7 @@
     "@commitlint/cli": "^18.4.4",
     "@commitlint/config-conventional": "^18.4.4",
     "@jscutlery/semver": "^3.4.1",
-    "@nx/eslint-plugin": "^17.2.8",
+    "@nx/eslint-plugin": "^18.2.3",
     "@nx/js": "^17.2.8",
     "@nx/linter": "^17.2.8",
     "@rollup/plugin-json": "^6.1.0",
diff --git a/packages/instrumentation-anthropic/CHANGELOG.md b/packages/instrumentation-anthropic/CHANGELOG.md
index c84a8b69..ea3768b1 100644
--- a/packages/instrumentation-anthropic/CHANGELOG.md
+++ b/packages/instrumentation-anthropic/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 ### Bug Fixes
diff --git a/packages/instrumentation-anthropic/package.json b/packages/instrumentation-anthropic/package.json
index 07ad0411..d2f7b5f3 100644
--- a/packages/instrumentation-anthropic/package.json
+++ b/packages/instrumentation-anthropic/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@traceloop/instrumentation-anthropic",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "description": "Anthropic Instrumentaion",
   "main": "dist/index.js",
   "module": "dist/index.mjs",
diff --git a/packages/instrumentation-anthropic/src/instrumentation.ts b/packages/instrumentation-anthropic/src/instrumentation.ts
index 00880d2d..57e9d92e 100644
--- a/packages/instrumentation-anthropic/src/instrumentation.ts
+++ b/packages/instrumentation-anthropic/src/instrumentation.ts
@@ -193,44 +193,49 @@ export class AnthropicInstrumentation extends InstrumentationBase<any> {
       [SpanAttributes.LLM_REQUEST_TYPE]: type,
     };
 
-    attributes[SpanAttributes.LLM_REQUEST_MODEL] = params.model;
-    attributes[SpanAttributes.LLM_TEMPERATURE] = params.temperature;
-    attributes[SpanAttributes.LLM_TOP_P] = params.top_p;
-    attributes[SpanAttributes.LLM_TOP_K] = params.top_k;
-
-    if (type === "completion") {
-      attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] =
-        params.max_tokens_to_sample;
-    } else {
-      attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] = params.max_tokens;
-    }
-
-    if (
-      params.extraAttributes !== undefined &&
-      typeof params.extraAttributes === "object"
-    ) {
-      Object.keys(params.extraAttributes).forEach((key: string) => {
-        attributes[key] = params.extraAttributes![key];
-      });
-    }
+    try {
+      attributes[SpanAttributes.LLM_REQUEST_MODEL] = params.model;
+      attributes[SpanAttributes.LLM_TEMPERATURE] = params.temperature;
+      attributes[SpanAttributes.LLM_TOP_P] = params.top_p;
+      attributes[SpanAttributes.LLM_TOP_K] = params.top_k;
+
+      if (type === "completion") {
+        attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] =
+          params.max_tokens_to_sample;
+      } else {
+        attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] = params.max_tokens;
+      }
 
-    if (this._shouldSendPrompts()) {
-      if (type === "chat") {
-        params.messages.forEach((message, index) => {
-          attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
-            message.role;
-          if (typeof message.content === "string") {
-            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
-              (message.content as string) || "";
-          } else {
-            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
-              JSON.stringify(message.content);
-          }
+      if (
+        params.extraAttributes !== undefined &&
+        typeof params.extraAttributes === "object"
+      ) {
+        Object.keys(params.extraAttributes).forEach((key: string) => {
+          attributes[key] = params.extraAttributes![key];
         });
-      } else {
-        attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
-        attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] = params.prompt;
       }
+
+      if (this._shouldSendPrompts()) {
+        if (type === "chat") {
+          params.messages.forEach((message, index) => {
+            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
+              message.role;
+            if (typeof message.content === "string") {
+              attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
+                (message.content as string) || "";
+            } else {
+              attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
+                JSON.stringify(message.content);
+            }
+          });
+        } else {
+          attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
+          attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] = params.prompt;
+        }
+      }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     return this.tracer.startSpan(`anthropic.${type}`, {
@@ -268,20 +273,25 @@ export class AnthropicInstrumentation extends InstrumentationBase<any> {
       for await (const chunk of await promise) {
         yield chunk;
 
-        switch (chunk.type) {
-          case "content_block_start":
-            if (result.content.length <= chunk.index) {
-              result.content.push(chunk.content_block);
-            }
-            break;
-
-          case "content_block_delta":
-            if (chunk.index < result.content.length) {
-              result.content[chunk.index] = {
-                type: "text",
-                text: result.content[chunk.index].text + chunk.delta.text,
-              };
-            }
+        try {
+          switch (chunk.type) {
+            case "content_block_start":
+              if (result.content.length <= chunk.index) {
+                result.content.push(chunk.content_block);
+              }
+              break;
+
+            case "content_block_delta":
+              if (chunk.index < result.content.length) {
+                result.content[chunk.index] = {
+                  type: "text",
+                  text: result.content[chunk.index].text + chunk.delta.text,
+                };
+              }
+          }
+        } catch (e) {
+          this._diag.warn(e);
+          this._config.exceptionLogger?.(e);
         }
       }
 
@@ -297,17 +307,22 @@ export class AnthropicInstrumentation extends InstrumentationBase<any> {
       for await (const chunk of await promise) {
         yield chunk;
 
-        result.id = chunk.id;
-        result.model = chunk.model;
-
-        if (chunk.stop_reason) {
-          result.stop_reason = chunk.stop_reason;
-        }
-        if (chunk.model) {
+        try {
+          result.id = chunk.id;
           result.model = chunk.model;
-        }
-        if (chunk.completion) {
-          result.completion += chunk.completion;
+
+          if (chunk.stop_reason) {
+            result.stop_reason = chunk.stop_reason;
+          }
+          if (chunk.model) {
+            result.model = chunk.model;
+          }
+          if (chunk.completion) {
+            result.completion += chunk.completion;
+          }
+        } catch (e) {
+          this._diag.warn(e);
+          this._config.exceptionLogger?.(e);
         }
       }
 
@@ -365,48 +380,53 @@ export class AnthropicInstrumentation extends InstrumentationBase<any> {
         type: "completion";
         result: Completion;
       }) {
-    span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, result.model);
-    if (type === "chat" && result.usage) {
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
-        result.usage?.input_tokens + result.usage?.output_tokens,
-      );
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
-        result.usage?.output_tokens,
-      );
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
-        result.usage?.input_tokens,
-      );
-    }
-
-    result.stop_reason &&
-      span.setAttribute(
-        `${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`,
-        result.stop_reason,
-      );
-
-    if (this._shouldSendPrompts()) {
-      if (type === "chat") {
+    try {
+      span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, result.model);
+      if (type === "chat" && result.usage) {
         span.setAttribute(
-          `${SpanAttributes.LLM_COMPLETIONS}.0.role`,
-          "assistant",
+          SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
+          result.usage?.input_tokens + result.usage?.output_tokens,
         );
         span.setAttribute(
-          `${SpanAttributes.LLM_COMPLETIONS}.0.content`,
-          JSON.stringify(result.content),
+          SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
+          result.usage?.output_tokens,
         );
-      } else {
         span.setAttribute(
-          `${SpanAttributes.LLM_COMPLETIONS}.0.role`,
-          "assistant",
+          SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
+          result.usage?.input_tokens,
         );
+      }
+
+      result.stop_reason &&
         span.setAttribute(
-          `${SpanAttributes.LLM_COMPLETIONS}.0.content`,
-          result.completion,
+          `${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`,
+          result.stop_reason,
         );
+
+      if (this._shouldSendPrompts()) {
+        if (type === "chat") {
+          span.setAttribute(
+            `${SpanAttributes.LLM_COMPLETIONS}.0.role`,
+            "assistant",
+          );
+          span.setAttribute(
+            `${SpanAttributes.LLM_COMPLETIONS}.0.content`,
+            JSON.stringify(result.content),
+          );
+        } else {
+          span.setAttribute(
+            `${SpanAttributes.LLM_COMPLETIONS}.0.role`,
+            "assistant",
+          );
+          span.setAttribute(
+            `${SpanAttributes.LLM_COMPLETIONS}.0.content`,
+            result.completion,
+          );
+        }
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     span.end();
diff --git a/packages/instrumentation-anthropic/src/types.ts b/packages/instrumentation-anthropic/src/types.ts
index 2fbf4036..8dfc0e14 100644
--- a/packages/instrumentation-anthropic/src/types.ts
+++ b/packages/instrumentation-anthropic/src/types.ts
@@ -6,4 +6,9 @@ export interface AnthropicInstrumentationConfig extends InstrumentationConfig {
    * @default true
    */
   traceContent?: boolean;
+
+  /**
+   * A custom logger to log any exceptions that happen during span creation.
+   */
+  exceptionLogger?: (e: Error) => void;
 }
diff --git a/packages/instrumentation-azure/CHANGELOG.md b/packages/instrumentation-azure/CHANGELOG.md
index c5a3e194..b4a8573b 100644
--- a/packages/instrumentation-azure/CHANGELOG.md
+++ b/packages/instrumentation-azure/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 ### Features
diff --git a/packages/instrumentation-azure/package.json b/packages/instrumentation-azure/package.json
index e7cdb719..af528fa6 100644
--- a/packages/instrumentation-azure/package.json
+++ b/packages/instrumentation-azure/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@traceloop/instrumentation-azure",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "description": "Azure OpenAI Instrumentaion",
   "main": "dist/index.js",
   "module": "dist/index.mjs",
diff --git a/packages/instrumentation-azure/src/instrumentation.ts b/packages/instrumentation-azure/src/instrumentation.ts
index 627fd9a6..ee486296 100644
--- a/packages/instrumentation-azure/src/instrumentation.ts
+++ b/packages/instrumentation-azure/src/instrumentation.ts
@@ -177,43 +177,49 @@ export class AzureOpenAIInstrumentation extends InstrumentationBase<any> {
       [SpanAttributes.LLM_REQUEST_TYPE]: type,
     };
 
-    attributes[SpanAttributes.LLM_REQUEST_MODEL] = deployment;
+    try {
+      attributes[SpanAttributes.LLM_REQUEST_MODEL] = deployment;
 
-    if (
-      params.extraAttributes !== undefined &&
-      typeof params.extraAttributes === "object"
-    ) {
-      Object.keys(params.extraAttributes).forEach((key: string) => {
-        attributes[key] = params.extraAttributes![key];
-      });
-    }
-
-    if (this._shouldSendPrompts()) {
-      if (type === "chat") {
-        params.forEach((message, index) => {
-          attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
-            message.role;
-          if (typeof message.content === "string") {
-            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
-              (message.content as string) || "";
-          } else {
-            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
-              JSON.stringify(message.content);
-          }
+      if (
+        params.extraAttributes !== undefined &&
+        typeof params.extraAttributes === "object"
+      ) {
+        Object.keys(params.extraAttributes).forEach((key: string) => {
+          attributes[key] = params.extraAttributes![key];
         });
-      } else {
-        attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
-        if (typeof params === "string") {
-          attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] = params;
-        } else {
-          params.forEach((prompt, index) => {
-            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] = "user";
+      }
 
-            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
-              prompt;
+      if (this._shouldSendPrompts()) {
+        if (type === "chat") {
+          params.forEach((message, index) => {
+            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
+              message.role;
+            if (typeof message.content === "string") {
+              attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
+                (message.content as string) || "";
+            } else {
+              attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
+                JSON.stringify(message.content);
+            }
           });
+        } else {
+          attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
+          if (typeof params === "string") {
+            attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] = params;
+          } else {
+            params.forEach((prompt, index) => {
+              attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
+                "user";
+
+              attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
+                prompt;
+            });
+          }
         }
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     return this.tracer.startSpan(`openai.${type}`, {
@@ -277,69 +283,74 @@ export class AzureOpenAIInstrumentation extends InstrumentationBase<any> {
         type: "completion";
         result: Completions;
       }) {
-    span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, deployment);
-    if (result.usage) {
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
-        result.usage?.totalTokens,
-      );
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
-        result.usage?.completionTokens,
-      );
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
-        result.usage?.promptTokens,
-      );
-    }
+    try {
+      span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, deployment);
+      if (result.usage) {
+        span.setAttribute(
+          SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
+          result.usage?.totalTokens,
+        );
+        span.setAttribute(
+          SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
+          result.usage?.completionTokens,
+        );
+        span.setAttribute(
+          SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
+          result.usage?.promptTokens,
+        );
+      }
 
-    if (this._shouldSendPrompts()) {
-      if (type === "chat") {
-        result.choices.forEach((choice, index) => {
-          choice.finishReason &&
-            span.setAttribute(
-              `${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
-              choice.finishReason,
-            );
-          choice.message &&
+      if (this._shouldSendPrompts()) {
+        if (type === "chat") {
+          result.choices.forEach((choice, index) => {
+            choice.finishReason &&
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
+                choice.finishReason,
+              );
+            choice.message &&
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
+                choice.message.role,
+              );
+            choice.message?.content &&
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
+                choice.message.content,
+              );
+
+            if (choice.message?.functionCall) {
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.name`,
+                choice.message.functionCall.name,
+              );
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.arguments`,
+                choice.message.functionCall.arguments,
+              );
+            }
+          });
+        } else {
+          result.choices.forEach((choice, index) => {
+            choice.finishReason &&
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
+                choice.finishReason,
+              );
             span.setAttribute(
               `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
-              choice.message.role,
+              "assistant",
             );
-          choice.message?.content &&
             span.setAttribute(
               `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
-              choice.message.content,
-            );
-
-          if (choice.message?.functionCall) {
-            span.setAttribute(
-              `${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.name`,
-              choice.message.functionCall.name,
+              choice.text,
             );
-            span.setAttribute(
-              `${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.arguments`,
-              choice.message.functionCall.arguments,
-            );
-          }
-        });
-      } else {
-        result.choices.forEach((choice, index) => {
-          choice.finishReason &&
-            span.setAttribute(
-              `${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
-              choice.finishReason,
-            );
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
-            "assistant",
-          );
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
-            choice.text,
-          );
-        });
+          });
+        }
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     span.end();
diff --git a/packages/instrumentation-azure/src/types.ts b/packages/instrumentation-azure/src/types.ts
index b21e441a..f4898cae 100644
--- a/packages/instrumentation-azure/src/types.ts
+++ b/packages/instrumentation-azure/src/types.ts
@@ -7,4 +7,9 @@ export interface AzureOpenAIInstrumentationConfig
    * @default true
    */
   traceContent?: boolean;
+
+  /**
+   * A custom logger to log any exceptions that happen during span creation.
+   */
+  exceptionLogger?: (e: Error) => void;
 }
diff --git a/packages/instrumentation-bedrock/CHANGELOG.md b/packages/instrumentation-bedrock/CHANGELOG.md
index 4e824a33..f487c422 100644
--- a/packages/instrumentation-bedrock/CHANGELOG.md
+++ b/packages/instrumentation-bedrock/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 ### Features
diff --git a/packages/instrumentation-bedrock/package.json b/packages/instrumentation-bedrock/package.json
index dda343c2..7344540c 100644
--- a/packages/instrumentation-bedrock/package.json
+++ b/packages/instrumentation-bedrock/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@traceloop/instrumentation-bedrock",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "description": "Amazon Bedrock Instrumentation",
   "main": "dist/index.js",
   "module": "dist/index.mjs",
diff --git a/packages/instrumentation-bedrock/src/instrumentation.ts b/packages/instrumentation-bedrock/src/instrumentation.ts
index 23d73133..36227722 100644
--- a/packages/instrumentation-bedrock/src/instrumentation.ts
+++ b/packages/instrumentation-bedrock/src/instrumentation.ts
@@ -148,24 +148,31 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
   }: {
     params: Parameters<bedrock.BedrockRuntimeClient["send"]>[0];
   }): Span {
-    const [vendor, model] = params.input.modelId
-      ? params.input.modelId.split(".")
-      : ["", ""];
-
-    let attributes: Attributes = {
-      [SpanAttributes.LLM_VENDOR]: vendor,
-      [SpanAttributes.LLM_REQUEST_MODEL]: model,
-      [SpanAttributes.LLM_RESPONSE_MODEL]: model,
-      [SpanAttributes.LLM_REQUEST_TYPE]: LLMRequestTypeValues.COMPLETION,
-    };
+    let attributes: Attributes = {};
 
-    if (typeof params.input.body === "string") {
-      const requestBody = JSON.parse(params.input.body);
+    try {
+      const [vendor, model] = params.input.modelId
+        ? params.input.modelId.split(".")
+        : ["", ""];
 
       attributes = {
-        ...attributes,
-        ...this._setRequestAttributes(vendor, requestBody),
+        [SpanAttributes.LLM_VENDOR]: vendor,
+        [SpanAttributes.LLM_REQUEST_MODEL]: model,
+        [SpanAttributes.LLM_RESPONSE_MODEL]: model,
+        [SpanAttributes.LLM_REQUEST_TYPE]: LLMRequestTypeValues.COMPLETION,
       };
+
+      if (typeof params.input.body === "string") {
+        const requestBody = JSON.parse(params.input.body);
+
+        attributes = {
+          ...attributes,
+          ...this._setRequestAttributes(vendor, requestBody),
+        };
+      }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     return this.tracer.startSpan(`bedrock.completion`, {
@@ -183,85 +190,92 @@ export class BedrockInstrumentation extends InstrumentationBase<any> {
       | bedrock.InvokeModelCommandOutput
       | bedrock.InvokeModelWithResponseStreamCommandOutput;
   }) {
-    if ("body" in result) {
-      const attributes =
-        "attributes" in span ? (span["attributes"] as Record<string, any>) : {};
-
-      if (SpanAttributes.LLM_VENDOR in attributes) {
-        if (!(result.body instanceof Object.getPrototypeOf(Uint8Array))) {
-          const rawRes = result.body as AsyncIterable<bedrock.ResponseStream>;
-
-          let streamedContent = "";
-          for await (const value of rawRes) {
-            // Convert it to a JSON String
-            const jsonString = new TextDecoder().decode(value.chunk?.bytes);
-            // Parse the JSON string
-            const parsedResponse = JSON.parse(jsonString);
-
-            if ("amazon-bedrock-invocationMetrics" in parsedResponse) {
-              span.setAttribute(
-                SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
-                parsedResponse["amazon-bedrock-invocationMetrics"][
-                  "inputTokenCount"
-                ],
-              );
-              span.setAttribute(
-                SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
-                parsedResponse["amazon-bedrock-invocationMetrics"][
-                  "outputTokenCount"
-                ],
-              );
-
-              span.setAttribute(
-                SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
-                parsedResponse["amazon-bedrock-invocationMetrics"][
-                  "inputTokenCount"
-                ] +
+    try {
+      if ("body" in result) {
+        const attributes =
+          "attributes" in span
+            ? (span["attributes"] as Record<string, any>)
+            : {};
+
+        if (SpanAttributes.LLM_VENDOR in attributes) {
+          if (!(result.body instanceof Object.getPrototypeOf(Uint8Array))) {
+            const rawRes = result.body as AsyncIterable<bedrock.ResponseStream>;
+
+            let streamedContent = "";
+            for await (const value of rawRes) {
+              // Convert it to a JSON String
+              const jsonString = new TextDecoder().decode(value.chunk?.bytes);
+              // Parse the JSON string
+              const parsedResponse = JSON.parse(jsonString);
+
+              if ("amazon-bedrock-invocationMetrics" in parsedResponse) {
+                span.setAttribute(
+                  SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
+                  parsedResponse["amazon-bedrock-invocationMetrics"][
+                    "inputTokenCount"
+                  ],
+                );
+                span.setAttribute(
+                  SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
                   parsedResponse["amazon-bedrock-invocationMetrics"][
                     "outputTokenCount"
                   ],
+                );
+
+                span.setAttribute(
+                  SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
+                  parsedResponse["amazon-bedrock-invocationMetrics"][
+                    "inputTokenCount"
+                  ] +
+                    parsedResponse["amazon-bedrock-invocationMetrics"][
+                      "outputTokenCount"
+                    ],
+                );
+              }
+
+              let responseAttributes = this._setResponseAttributes(
+                attributes[SpanAttributes.LLM_VENDOR],
+                parsedResponse,
+                true,
               );
+
+              // ! NOTE: This make sure the content always have all streamed chunks
+              if (this._shouldSendPrompts()) {
+                // Update local value with attribute value that was set by _setResponseAttributes
+                streamedContent +=
+                  responseAttributes[
+                    `${SpanAttributes.LLM_COMPLETIONS}.0.content`
+                  ];
+                // re-assign the new value to responseAttributes
+                responseAttributes = {
+                  ...responseAttributes,
+                  [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:
+                    streamedContent,
+                };
+              }
+
+              span.setAttributes(responseAttributes);
             }
+          } else if (result.body instanceof Object.getPrototypeOf(Uint8Array)) {
+            // Convert it to a JSON String
+            const jsonString = new TextDecoder().decode(
+              result.body as Uint8Array,
+            );
+            // Parse the JSON string
+            const parsedResponse = JSON.parse(jsonString);
 
-            let responseAttributes = this._setResponseAttributes(
+            const responseAttributes = this._setResponseAttributes(
               attributes[SpanAttributes.LLM_VENDOR],
               parsedResponse,
-              true,
             );
 
-            // ! NOTE: This make sure the content always have all streamed chunks
-            if (this._shouldSendPrompts()) {
-              // Update local value with attribute value that was set by _setResponseAttributes
-              streamedContent +=
-                responseAttributes[
-                  `${SpanAttributes.LLM_COMPLETIONS}.0.content`
-                ];
-              // re-assign the new value to responseAttributes
-              responseAttributes = {
-                ...responseAttributes,
-                [`${SpanAttributes.LLM_COMPLETIONS}.0.content`]:
-                  streamedContent,
-              };
-            }
-
             span.setAttributes(responseAttributes);
           }
-        } else if (result.body instanceof Object.getPrototypeOf(Uint8Array)) {
-          // Convert it to a JSON String
-          const jsonString = new TextDecoder().decode(
-            result.body as Uint8Array,
-          );
-          // Parse the JSON string
-          const parsedResponse = JSON.parse(jsonString);
-
-          const responseAttributes = this._setResponseAttributes(
-            attributes[SpanAttributes.LLM_VENDOR],
-            parsedResponse,
-          );
-
-          span.setAttributes(responseAttributes);
         }
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     span.setStatus({ code: SpanStatusCode.OK });
diff --git a/packages/instrumentation-bedrock/src/types.ts b/packages/instrumentation-bedrock/src/types.ts
index 335091a8..e842aaa5 100644
--- a/packages/instrumentation-bedrock/src/types.ts
+++ b/packages/instrumentation-bedrock/src/types.ts
@@ -6,4 +6,9 @@ export interface BedrockInstrumentationConfig extends InstrumentationConfig {
    * @default true
    */
   traceContent?: boolean;
+
+  /**
+   * A custom logger to log any exceptions that happen during span creation.
+   */
+  exceptionLogger?: (e: Error) => void;
 }
diff --git a/packages/instrumentation-cohere/CHANGELOG.md b/packages/instrumentation-cohere/CHANGELOG.md
index b8c5d8ff..36ce5d61 100644
--- a/packages/instrumentation-cohere/CHANGELOG.md
+++ b/packages/instrumentation-cohere/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 **Note:** Version bump only for package @traceloop/instrumentation-cohere
diff --git a/packages/instrumentation-cohere/package.json b/packages/instrumentation-cohere/package.json
index 2ef4c2b9..57018283 100644
--- a/packages/instrumentation-cohere/package.json
+++ b/packages/instrumentation-cohere/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@traceloop/instrumentation-cohere",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "description": "Cohere Instrumentation",
   "main": "dist/index.js",
   "module": "dist/index.mjs",
diff --git a/packages/instrumentation-cohere/src/instrumentation.ts b/packages/instrumentation-cohere/src/instrumentation.ts
index 9404ed37..5a612c33 100644
--- a/packages/instrumentation-cohere/src/instrumentation.ts
+++ b/packages/instrumentation-cohere/src/instrumentation.ts
@@ -220,48 +220,55 @@ export class CohereInstrumentation extends InstrumentationBase<any> {
       [SpanAttributes.LLM_REQUEST_TYPE]: this._getLlmRequestTypeByMethod(type),
     };
 
-    const model = params.model ?? "command";
-    attributes[SpanAttributes.LLM_REQUEST_MODEL] = model;
-    attributes[SpanAttributes.LLM_REQUEST_MODEL] = model;
-
-    if (!("query" in params)) {
-      attributes[SpanAttributes.LLM_TOP_P] = params.p;
-      attributes[SpanAttributes.LLM_TOP_K] = params.k;
-      attributes[SpanAttributes.LLM_TEMPERATURE] = params.temperature;
-      attributes[SpanAttributes.LLM_FREQUENCY_PENALTY] =
-        params.frequencyPenalty;
-      attributes[SpanAttributes.LLM_PRESENCE_PENALTY] = params.presencePenalty;
-      attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] = params.maxTokens;
-    } else {
-      attributes["topN"] = params["topN"];
-      attributes["maxChunksPerDoc"] = params["maxChunksPerDoc"];
-    }
+    try {
+      const model = params.model ?? "command";
+      attributes[SpanAttributes.LLM_REQUEST_MODEL] = model;
+      attributes[SpanAttributes.LLM_REQUEST_MODEL] = model;
+
+      if (!("query" in params)) {
+        attributes[SpanAttributes.LLM_TOP_P] = params.p;
+        attributes[SpanAttributes.LLM_TOP_K] = params.k;
+        attributes[SpanAttributes.LLM_TEMPERATURE] = params.temperature;
+        attributes[SpanAttributes.LLM_FREQUENCY_PENALTY] =
+          params.frequencyPenalty;
+        attributes[SpanAttributes.LLM_PRESENCE_PENALTY] =
+          params.presencePenalty;
+        attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] = params.maxTokens;
+      } else {
+        attributes["topN"] = params["topN"];
+        attributes["maxChunksPerDoc"] = params["maxChunksPerDoc"];
+      }
 
-    if (this._shouldSendPrompts()) {
-      if (type === "completion" && "prompt" in params) {
-        attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
-        attributes[`${SpanAttributes.LLM_PROMPTS}.0.user`] = params.prompt;
-      } else if (type === "chat" && "message" in params) {
-        params.chatHistory?.forEach((msg, index) => {
-          attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] = msg.role;
-          attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.user`] =
-            msg.message;
-        });
+      if (this._shouldSendPrompts()) {
+        if (type === "completion" && "prompt" in params) {
+          attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
+          attributes[`${SpanAttributes.LLM_PROMPTS}.0.user`] = params.prompt;
+        } else if (type === "chat" && "message" in params) {
+          params.chatHistory?.forEach((msg, index) => {
+            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
+              msg.role;
+            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.user`] =
+              msg.message;
+          });
 
-        attributes[
-          `${SpanAttributes.LLM_PROMPTS}.${params.chatHistory?.length ?? 0}.role`
-        ] = "user";
-        attributes[
-          `${SpanAttributes.LLM_PROMPTS}.${params.chatHistory?.length ?? 0}.user`
-        ] = params.message;
-      } else if (type === "rerank" && "query" in params) {
-        attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
-        attributes[`${SpanAttributes.LLM_PROMPTS}.0.user`] = params.query;
-        params.documents.forEach((doc, index) => {
-          attributes[`documents.${index}.index`] =
-            typeof doc === "string" ? doc : doc.text;
-        });
+          attributes[
+            `${SpanAttributes.LLM_PROMPTS}.${params.chatHistory?.length ?? 0}.role`
+          ] = "user";
+          attributes[
+            `${SpanAttributes.LLM_PROMPTS}.${params.chatHistory?.length ?? 0}.user`
+          ] = params.message;
+        } else if (type === "rerank" && "query" in params) {
+          attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
+          attributes[`${SpanAttributes.LLM_PROMPTS}.0.user`] = params.query;
+          params.documents.forEach((doc, index) => {
+            attributes[`documents.${index}.index`] =
+              typeof doc === "string" ? doc : doc.text;
+          });
+        }
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     return this.tracer.startSpan(`cohere.${type}`, {
@@ -343,41 +350,46 @@ export class CohereInstrumentation extends InstrumentationBase<any> {
     span: Span,
     result: cohere.Cohere.RerankResponse,
   ) {
-    if ("meta" in result) {
-      if (result.meta?.billedUnits?.searchUnits !== undefined) {
-        span.setAttribute(
-          SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
-          result.meta?.billedUnits?.searchUnits,
-        );
-      }
-
-      if (this._shouldSendPrompts()) {
-        result.results.forEach((each, idx) => {
+    try {
+      if ("meta" in result) {
+        if (result.meta?.billedUnits?.searchUnits !== undefined) {
           span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${idx}.relevanceScore`,
-            each.relevanceScore,
+            SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
+            result.meta?.billedUnits?.searchUnits,
           );
+        }
+
+        if (this._shouldSendPrompts()) {
+          result.results.forEach((each, idx) => {
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.${idx}.relevanceScore`,
+              each.relevanceScore,
+            );
 
-          if (each.document && each.document?.text) {
+            if (each.document && each.document?.text) {
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${idx}.content`,
+                each.document.text,
+              );
+            }
+          });
+        } else {
+          result.results.forEach((each, idx) => {
             span.setAttribute(
               `${SpanAttributes.LLM_COMPLETIONS}.${idx}.content`,
-              each.document.text,
+              each.index,
             );
-          }
-        });
-      } else {
-        result.results.forEach((each, idx) => {
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${idx}.content`,
-            each.index,
-          );
 
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${idx}.relevanceScore`,
-            each.relevanceScore,
-          );
-        });
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.${idx}.relevanceScore`,
+              each.relevanceScore,
+            );
+          });
+        }
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
   }
 
@@ -385,139 +397,153 @@ export class CohereInstrumentation extends InstrumentationBase<any> {
     span: Span,
     result: cohere.Cohere.NonStreamedChatResponse,
   ) {
-    if ("token_count" in result && typeof result.token_count === "object") {
-      if (
-        result.token_count &&
-        "prompt_tokens" in result.token_count &&
-        typeof result.token_count.prompt_tokens === "number"
-      ) {
-        span.setAttribute(
-          SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
-          result.token_count?.prompt_tokens,
-        );
-      }
+    try {
+      if ("token_count" in result && typeof result.token_count === "object") {
+        if (
+          result.token_count &&
+          "prompt_tokens" in result.token_count &&
+          typeof result.token_count.prompt_tokens === "number"
+        ) {
+          span.setAttribute(
+            SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
+            result.token_count?.prompt_tokens,
+          );
+        }
 
-      if (
-        result.token_count &&
-        "response_tokens" in result.token_count &&
-        typeof result.token_count.response_tokens === "number"
-      ) {
-        span.setAttribute(
-          SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
-          result.token_count?.response_tokens,
-        );
+        if (
+          result.token_count &&
+          "response_tokens" in result.token_count &&
+          typeof result.token_count.response_tokens === "number"
+        ) {
+          span.setAttribute(
+            SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
+            result.token_count?.response_tokens,
+          );
+        }
+
+        if (
+          result.token_count &&
+          "total_tokens" in result.token_count &&
+          typeof result.token_count.total_tokens === "number"
+        ) {
+          span.setAttribute(
+            SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
+            result.token_count?.total_tokens,
+          );
+        }
       }
 
-      if (
-        result.token_count &&
-        "total_tokens" in result.token_count &&
-        typeof result.token_count.total_tokens === "number"
-      ) {
+      if (this._shouldSendPrompts()) {
         span.setAttribute(
-          SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
-          result.token_count?.total_tokens,
+          `${SpanAttributes.LLM_COMPLETIONS}.0.role`,
+          "assistant",
         );
-      }
-    }
-
-    if (this._shouldSendPrompts()) {
-      span.setAttribute(
-        `${SpanAttributes.LLM_COMPLETIONS}.0.role`,
-        "assistant",
-      );
-      span.setAttribute(
-        `${SpanAttributes.LLM_COMPLETIONS}.0.content`,
-        result.text,
-      );
-
-      if (result.searchQueries?.[0].text)
         span.setAttribute(
-          `${SpanAttributes.LLM_COMPLETIONS}.0.searchQuery`,
-          result.searchQueries?.[0].text,
+          `${SpanAttributes.LLM_COMPLETIONS}.0.content`,
+          result.text,
         );
 
-      if (result.searchResults?.length) {
-        result.searchResults.forEach((searchResult, index) => {
+        if (result.searchQueries?.[0].text) {
           span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.0.searchResult.${index}.text`,
-            searchResult.searchQuery.text,
+            `${SpanAttributes.LLM_COMPLETIONS}.0.searchQuery`,
+            result.searchQueries?.[0].text,
           );
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.0.searchResult.${index}.connector`,
-            searchResult.connector.id,
-          );
-        });
+        }
+
+        if (result.searchResults?.length) {
+          result.searchResults.forEach((searchResult, index) => {
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.0.searchResult.${index}.text`,
+              searchResult.searchQuery.text,
+            );
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.0.searchResult.${index}.connector`,
+              searchResult.connector.id,
+            );
+          });
+        }
       }
-    }
 
-    if ("finishReason" in result && typeof result.finishReason === "string")
-      span.setAttribute(
-        `${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`,
-        result.finishReason,
-      );
+      if ("finishReason" in result && typeof result.finishReason === "string") {
+        span.setAttribute(
+          `${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`,
+          result.finishReason,
+        );
+      }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
+    }
   }
 
   private _setResponseSpanForGenerate(
     span: Span,
     result: cohere.Cohere.Generation | cohere.Cohere.GenerateStreamEndResponse,
   ) {
-    if (result && "meta" in result) {
-      if (typeof result.meta?.billedUnits?.inputTokens === "number") {
+    try {
+      if (result && "meta" in result) {
+        if (typeof result.meta?.billedUnits?.inputTokens === "number") {
+          span.setAttribute(
+            SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
+            result.meta?.billedUnits?.inputTokens,
+          );
+        }
+
+        if (typeof result.meta?.billedUnits?.outputTokens === "number") {
+          span.setAttribute(
+            SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
+            result.meta?.billedUnits?.outputTokens,
+          );
+        }
+
+        if (
+          typeof result.meta?.billedUnits?.inputTokens === "number" &&
+          typeof result.meta?.billedUnits?.outputTokens === "number"
+        ) {
+          span.setAttribute(
+            SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
+            result.meta?.billedUnits?.inputTokens +
+              result.meta?.billedUnits?.outputTokens,
+          );
+        }
+      }
+
+      if (this._shouldSendPrompts() && result.generations) {
         span.setAttribute(
-          SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
-          result.meta?.billedUnits?.inputTokens,
+          `${SpanAttributes.LLM_COMPLETIONS}.0.role`,
+          "assistant",
+        );
+        span.setAttribute(
+          `${SpanAttributes.LLM_COMPLETIONS}.0.content`,
+          result.generations[0].text,
         );
       }
 
-      if (typeof result.meta?.billedUnits?.outputTokens === "number") {
+      if (
+        result.generations &&
+        "finish_reason" in result.generations[0] &&
+        typeof result.generations[0].finish_reason === "string"
+      ) {
         span.setAttribute(
-          SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
-          result.meta?.billedUnits?.outputTokens,
+          `${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`,
+          result.generations[0].finish_reason,
         );
       }
 
       if (
-        typeof result.meta?.billedUnits?.inputTokens === "number" &&
-        typeof result.meta?.billedUnits?.outputTokens === "number"
+        result.generations &&
+        "finishReason" in result.generations[0] &&
+        typeof result.generations[0].finishReason === "string"
       ) {
         span.setAttribute(
-          SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
-          result.meta?.billedUnits?.inputTokens +
-            result.meta?.billedUnits?.outputTokens,
+          `${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`,
+          result.generations[0].finishReason,
         );
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
-
-    if (this._shouldSendPrompts() && result.generations) {
-      span.setAttribute(
-        `${SpanAttributes.LLM_COMPLETIONS}.0.role`,
-        "assistant",
-      );
-      span.setAttribute(
-        `${SpanAttributes.LLM_COMPLETIONS}.0.content`,
-        result.generations[0].text,
-      );
-    }
-
-    if (
-      result.generations &&
-      "finish_reason" in result.generations[0] &&
-      typeof result.generations[0].finish_reason === "string"
-    )
-      span.setAttribute(
-        `${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`,
-        result.generations[0].finish_reason,
-      );
-
-    if (
-      result.generations &&
-      "finishReason" in result.generations[0] &&
-      typeof result.generations[0].finishReason === "string"
-    )
-      span.setAttribute(
-        `${SpanAttributes.LLM_COMPLETIONS}.0.finish_reason`,
-        result.generations[0].finishReason,
-      );
   }
 
   private _getLlmRequestTypeByMethod(type: string) {
diff --git a/packages/instrumentation-cohere/src/types.ts b/packages/instrumentation-cohere/src/types.ts
index ea8e7bba..c3143bee 100644
--- a/packages/instrumentation-cohere/src/types.ts
+++ b/packages/instrumentation-cohere/src/types.ts
@@ -6,4 +6,9 @@ export interface CohereInstrumentationConfig extends InstrumentationConfig {
    * @default true
    */
   traceContent?: boolean;
+
+  /**
+   * A custom logger to log any exceptions that happen during span creation.
+   */
+  exceptionLogger?: (e: Error) => void;
 }
diff --git a/packages/instrumentation-langchain/CHANGELOG.md b/packages/instrumentation-langchain/CHANGELOG.md
index ad087caf..3937cd5c 100644
--- a/packages/instrumentation-langchain/CHANGELOG.md
+++ b/packages/instrumentation-langchain/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 **Note:** Version bump only for package @traceloop/instrumentation-langchain
diff --git a/packages/instrumentation-langchain/package.json b/packages/instrumentation-langchain/package.json
index 7aa7568b..52747cf9 100644
--- a/packages/instrumentation-langchain/package.json
+++ b/packages/instrumentation-langchain/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@traceloop/instrumentation-langchain",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "description": "OpenTelemetry instrumentation for LangchainJS",
   "main": "dist/index.js",
   "module": "dist/index.mjs",
diff --git a/packages/instrumentation-langchain/src/types.ts b/packages/instrumentation-langchain/src/types.ts
index 78d9fe9f..3fc27c19 100644
--- a/packages/instrumentation-langchain/src/types.ts
+++ b/packages/instrumentation-langchain/src/types.ts
@@ -6,4 +6,9 @@ export interface LangChainInstrumentationConfig extends InstrumentationConfig {
    * @default true
    */
   traceContent?: boolean;
+
+  /**
+   * A custom logger to log any exceptions that happen during span creation.
+   */
+  exceptionLogger?: (e: Error) => void;
 }
diff --git a/packages/instrumentation-langchain/src/utils.ts b/packages/instrumentation-langchain/src/utils.ts
index 17c839ca..1ea3348a 100644
--- a/packages/instrumentation-langchain/src/utils.ts
+++ b/packages/instrumentation-langchain/src/utils.ts
@@ -41,8 +41,9 @@ export function genericWrapper(
               }),
             );
           }
-        } catch {
-          /* empty */
+        } catch (e) {
+          this._diag.warn(e);
+          this._config.exceptionLogger?.(e);
         }
       }
 
@@ -75,6 +76,9 @@ export function genericWrapper(
                   );
                 }
               }
+            } catch (e) {
+              this._diag.warn(e);
+              this._config.exceptionLogger?.(e);
             } finally {
               span.end();
               resolve(result);
diff --git a/packages/instrumentation-llamaindex/CHANGELOG.md b/packages/instrumentation-llamaindex/CHANGELOG.md
index 54f89534..4d9ee165 100644
--- a/packages/instrumentation-llamaindex/CHANGELOG.md
+++ b/packages/instrumentation-llamaindex/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 **Note:** Version bump only for package @traceloop/instrumentation-llamaindex
diff --git a/packages/instrumentation-llamaindex/package.json b/packages/instrumentation-llamaindex/package.json
index 876bdb89..0db42c4a 100644
--- a/packages/instrumentation-llamaindex/package.json
+++ b/packages/instrumentation-llamaindex/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@traceloop/instrumentation-llamaindex",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "description": "Llamaindex Instrumentation",
   "main": "dist/index.js",
   "module": "dist/index.mjs",
diff --git a/packages/instrumentation-llamaindex/src/custom-llm-instrumentation.ts b/packages/instrumentation-llamaindex/src/custom-llm-instrumentation.ts
index 17126f92..b3c2ac3c 100644
--- a/packages/instrumentation-llamaindex/src/custom-llm-instrumentation.ts
+++ b/packages/instrumentation-llamaindex/src/custom-llm-instrumentation.ts
@@ -9,6 +9,7 @@ import {
   SpanStatusCode,
   trace,
   context,
+  DiagLogger,
 } from "@opentelemetry/api";
 import { safeExecuteInTheMiddle } from "@opentelemetry/instrumentation";
 
@@ -25,13 +26,11 @@ type AsyncResponseType =
   | AsyncIterable<llamaindex.CompletionResponse>;
 
 export class CustomLLMInstrumentation {
-  private config: LlamaIndexInstrumentationConfig;
-  private tracer: () => Tracer;
-
-  constructor(config: LlamaIndexInstrumentationConfig, tracer: () => Tracer) {
-    this.config = config;
-    this.tracer = tracer;
-  }
+  constructor(
+    private config: LlamaIndexInstrumentationConfig,
+    private diag: DiagLogger,
+    private tracer: () => Tracer,
+  ) {}
 
   chatWrapper({ className }: { className: string }) {
     // eslint-disable-next-line @typescript-eslint/no-this-alias
@@ -49,24 +48,29 @@ export class CustomLLMInstrumentation {
             kind: SpanKind.CLIENT,
           });
 
-        span.setAttribute(SpanAttributes.LLM_VENDOR, className);
-        span.setAttribute(
-          SpanAttributes.LLM_REQUEST_MODEL,
-          this.metadata.model,
-        );
-        span.setAttribute(SpanAttributes.LLM_REQUEST_TYPE, "chat");
-        span.setAttribute(SpanAttributes.LLM_TOP_P, this.metadata.topP);
-        if (shouldSendPrompts(plugin.config)) {
-          for (const messageIdx in messages) {
-            span.setAttribute(
-              `${SpanAttributes.LLM_PROMPTS}.${messageIdx}.content`,
-              messages[messageIdx].content,
-            );
-            span.setAttribute(
-              `${SpanAttributes.LLM_PROMPTS}.${messageIdx}.role`,
-              messages[messageIdx].role,
-            );
+        try {
+          span.setAttribute(SpanAttributes.LLM_VENDOR, className);
+          span.setAttribute(
+            SpanAttributes.LLM_REQUEST_MODEL,
+            this.metadata.model,
+          );
+          span.setAttribute(SpanAttributes.LLM_REQUEST_TYPE, "chat");
+          span.setAttribute(SpanAttributes.LLM_TOP_P, this.metadata.topP);
+          if (shouldSendPrompts(plugin.config)) {
+            for (const messageIdx in messages) {
+              span.setAttribute(
+                `${SpanAttributes.LLM_PROMPTS}.${messageIdx}.content`,
+                messages[messageIdx].content,
+              );
+              span.setAttribute(
+                `${SpanAttributes.LLM_PROMPTS}.${messageIdx}.role`,
+                messages[messageIdx].role,
+              );
+            }
           }
+        } catch (e) {
+          plugin.diag.warn(e);
+          plugin.config.exceptionLogger?.(e);
         }
 
         const execContext = trace.setSpan(context.active(), span);
@@ -123,18 +127,25 @@ export class CustomLLMInstrumentation {
       return result;
     }
 
-    if ((result as llamaindex.ChatResponse).message) {
-      span.setAttribute(
-        `${SpanAttributes.LLM_COMPLETIONS}.0.role`,
-        (result as llamaindex.ChatResponse).message.role,
-      );
-      span.setAttribute(
-        `${SpanAttributes.LLM_COMPLETIONS}.0.content`,
-        (result as llamaindex.ChatResponse).message.content,
-      );
-      span.setStatus({ code: SpanStatusCode.OK });
-      span.end();
+    try {
+      if ((result as llamaindex.ChatResponse).message) {
+        span.setAttribute(
+          `${SpanAttributes.LLM_COMPLETIONS}.0.role`,
+          (result as llamaindex.ChatResponse).message.role,
+        );
+        span.setAttribute(
+          `${SpanAttributes.LLM_COMPLETIONS}.0.content`,
+          (result as llamaindex.ChatResponse).message.content,
+        );
+        span.setStatus({ code: SpanStatusCode.OK });
+      }
+    } catch (e) {
+      this.diag.warn(e);
+      this.config.exceptionLogger?.(e);
     }
+
+    span.end();
+
     return result;
   }
 
diff --git a/packages/instrumentation-llamaindex/src/instrumentation.ts b/packages/instrumentation-llamaindex/src/instrumentation.ts
index 70827bfd..3019bf25 100644
--- a/packages/instrumentation-llamaindex/src/instrumentation.ts
+++ b/packages/instrumentation-llamaindex/src/instrumentation.ts
@@ -88,6 +88,7 @@ export class LlamaIndexInstrumentation extends InstrumentationBase<any> {
 
     const customLLMInstrumentation = new CustomLLMInstrumentation(
       this._config,
+      this._diag,
       () => this.tracer, // this is on purpose. Tracer may change
     );
 
diff --git a/packages/instrumentation-llamaindex/src/types.ts b/packages/instrumentation-llamaindex/src/types.ts
index a27a0150..50d47276 100644
--- a/packages/instrumentation-llamaindex/src/types.ts
+++ b/packages/instrumentation-llamaindex/src/types.ts
@@ -6,4 +6,9 @@ export interface LlamaIndexInstrumentationConfig extends InstrumentationConfig {
    * @default true
    */
   traceContent?: boolean;
+
+  /**
+   * A custom logger to log any exceptions that happen during span creation.
+   */
+  exceptionLogger?: (e: Error) => void;
 }
diff --git a/packages/instrumentation-openai/CHANGELOG.md b/packages/instrumentation-openai/CHANGELOG.md
index cb191dde..93441751 100644
--- a/packages/instrumentation-openai/CHANGELOG.md
+++ b/packages/instrumentation-openai/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 **Note:** Version bump only for package @traceloop/instrumentation-openai
diff --git a/packages/instrumentation-openai/package.json b/packages/instrumentation-openai/package.json
index 9881bb66..a75269ea 100644
--- a/packages/instrumentation-openai/package.json
+++ b/packages/instrumentation-openai/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@traceloop/instrumentation-openai",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "description": "OpenAI Instrumentaion",
   "main": "dist/index.js",
   "module": "dist/index.mjs",
diff --git a/packages/instrumentation-openai/src/instrumentation.ts b/packages/instrumentation-openai/src/instrumentation.ts
index e8d99d51..ef1c2af0 100644
--- a/packages/instrumentation-openai/src/instrumentation.ts
+++ b/packages/instrumentation-openai/src/instrumentation.ts
@@ -238,55 +238,62 @@ export class OpenAIInstrumentation extends InstrumentationBase<any> {
       [SpanAttributes.LLM_REQUEST_TYPE]: type,
     };
 
-    attributes[SpanAttributes.LLM_REQUEST_MODEL] = params.model;
-    if (params.max_tokens) {
-      attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] = params.max_tokens;
-    }
-    if (params.temperature) {
-      attributes[SpanAttributes.LLM_TEMPERATURE] = params.temperature;
-    }
-    if (params.top_p) {
-      attributes[SpanAttributes.LLM_TOP_P] = params.top_p;
-    }
-    if (params.frequency_penalty) {
-      attributes[SpanAttributes.LLM_FREQUENCY_PENALTY] =
-        params.frequency_penalty;
-    }
-    if (params.presence_penalty) {
-      attributes[SpanAttributes.LLM_PRESENCE_PENALTY] = params.presence_penalty;
-    }
+    try {
+      attributes[SpanAttributes.LLM_REQUEST_MODEL] = params.model;
+      if (params.max_tokens) {
+        attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] = params.max_tokens;
+      }
+      if (params.temperature) {
+        attributes[SpanAttributes.LLM_TEMPERATURE] = params.temperature;
+      }
+      if (params.top_p) {
+        attributes[SpanAttributes.LLM_TOP_P] = params.top_p;
+      }
+      if (params.frequency_penalty) {
+        attributes[SpanAttributes.LLM_FREQUENCY_PENALTY] =
+          params.frequency_penalty;
+      }
+      if (params.presence_penalty) {
+        attributes[SpanAttributes.LLM_PRESENCE_PENALTY] =
+          params.presence_penalty;
+      }
 
-    if (
-      params.extraAttributes !== undefined &&
-      typeof params.extraAttributes === "object"
-    ) {
-      Object.keys(params.extraAttributes).forEach((key: string) => {
-        attributes[key] = params.extraAttributes![key];
-      });
-    }
+      if (
+        params.extraAttributes !== undefined &&
+        typeof params.extraAttributes === "object"
+      ) {
+        Object.keys(params.extraAttributes).forEach((key: string) => {
+          attributes[key] = params.extraAttributes![key];
+        });
+      }
 
-    if (this._shouldSendPrompts()) {
-      if (type === "chat") {
-        params.messages.forEach((message, index) => {
-          attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
-            message.role;
-          if (typeof message.content === "string") {
-            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
-              (message.content as string) || "";
+      if (this._shouldSendPrompts()) {
+        if (type === "chat") {
+          params.messages.forEach((message, index) => {
+            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.role`] =
+              message.role;
+            if (typeof message.content === "string") {
+              attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
+                (message.content as string) || "";
+            } else {
+              attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
+                JSON.stringify(message.content);
+            }
+          });
+        } else {
+          attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
+          if (typeof params.prompt === "string") {
+            attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] =
+              params.prompt;
           } else {
-            attributes[`${SpanAttributes.LLM_PROMPTS}.${index}.content`] =
-              JSON.stringify(message.content);
+            attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] =
+              JSON.stringify(params.prompt);
           }
-        });
-      } else {
-        attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
-        if (typeof params.prompt === "string") {
-          attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] = params.prompt;
-        } else {
-          attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] =
-            JSON.stringify(params.prompt);
         }
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     return this.tracer.startSpan(`openai.${type}`, {
@@ -403,40 +410,51 @@ export class OpenAIInstrumentation extends InstrumentationBase<any> {
       for await (const chunk of await promise) {
         yield chunk;
 
-        result.id = chunk.id;
-        result.created = chunk.created;
-        result.model = chunk.model;
+        try {
+          result.id = chunk.id;
+          result.created = chunk.created;
+          result.model = chunk.model;
 
-        if (chunk.choices[0]?.finish_reason) {
-          result.choices[0].finish_reason = chunk.choices[0].finish_reason;
-        }
-        if (chunk.choices[0]?.logprobs) {
-          result.choices[0].logprobs = chunk.choices[0].logprobs;
-        }
-        if (chunk.choices[0]?.text) {
-          result.choices[0].text += chunk.choices[0].text;
+          if (chunk.choices[0]?.finish_reason) {
+            result.choices[0].finish_reason = chunk.choices[0].finish_reason;
+          }
+          if (chunk.choices[0]?.logprobs) {
+            result.choices[0].logprobs = chunk.choices[0].logprobs;
+          }
+          if (chunk.choices[0]?.text) {
+            result.choices[0].text += chunk.choices[0].text;
+          }
+        } catch (e) {
+          this._diag.warn(e);
+          this._config.exceptionLogger?.(e);
         }
       }
 
-      if (result.choices[0].logprobs) {
-        this._addLogProbsEvent(span, result.choices[0].logprobs);
-      }
+      try {
+        if (result.choices[0].logprobs) {
+          this._addLogProbsEvent(span, result.choices[0].logprobs);
+        }
 
-      if (this._config.enrichTokens) {
-        const promptTokens =
-          this.tokenCountFromString(params.prompt as string, result.model) ?? 0;
+        if (this._config.enrichTokens) {
+          const promptTokens =
+            this.tokenCountFromString(params.prompt as string, result.model) ??
+            0;
 
-        const completionTokens = this.tokenCountFromString(
-          result.choices[0].text ?? "",
-          result.model,
-        );
-        if (completionTokens) {
-          result.usage = {
-            prompt_tokens: promptTokens,
-            completion_tokens: completionTokens,
-            total_tokens: promptTokens + completionTokens,
-          };
+          const completionTokens = this.tokenCountFromString(
+            result.choices[0].text ?? "",
+            result.model,
+          );
+          if (completionTokens) {
+            result.usage = {
+              prompt_tokens: promptTokens,
+              completion_tokens: completionTokens,
+              total_tokens: promptTokens + completionTokens,
+            };
+          }
         }
+      } catch (e) {
+        this._diag.warn(e);
+        this._config.exceptionLogger?.(e);
       }
 
       this._endSpan({ span, type, result });
@@ -513,65 +531,70 @@ export class OpenAIInstrumentation extends InstrumentationBase<any> {
   }:
     | { span: Span; type: "chat"; result: ChatCompletion }
     | { span: Span; type: "completion"; result: Completion }) {
-    span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, result.model);
-    if (result.usage) {
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
-        result.usage?.total_tokens,
-      );
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
-        result.usage?.completion_tokens,
-      );
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
-        result.usage?.prompt_tokens,
-      );
-    }
+    try {
+      span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, result.model);
+      if (result.usage) {
+        span.setAttribute(
+          SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
+          result.usage?.total_tokens,
+        );
+        span.setAttribute(
+          SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
+          result.usage?.completion_tokens,
+        );
+        span.setAttribute(
+          SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
+          result.usage?.prompt_tokens,
+        );
+      }
 
-    if (this._shouldSendPrompts()) {
-      if (type === "chat") {
-        result.choices.forEach((choice, index) => {
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
-            choice.finish_reason,
-          );
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
-            choice.message.role,
-          );
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
-            choice.message.content ?? "",
-          );
+      if (this._shouldSendPrompts()) {
+        if (type === "chat") {
+          result.choices.forEach((choice, index) => {
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
+              choice.finish_reason,
+            );
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
+              choice.message.role,
+            );
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
+              choice.message.content ?? "",
+            );
 
-          if (choice.message.function_call) {
+            if (choice.message.function_call) {
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.name`,
+                choice.message.function_call.name,
+              );
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.arguments`,
+                choice.message.function_call.arguments,
+              );
+            }
+          });
+        } else {
+          result.choices.forEach((choice, index) => {
             span.setAttribute(
-              `${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.name`,
-              choice.message.function_call.name,
+              `${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
+              choice.finish_reason,
             );
             span.setAttribute(
-              `${SpanAttributes.LLM_COMPLETIONS}.${index}.function_call.arguments`,
-              choice.message.function_call.arguments,
+              `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
+              "assistant",
             );
-          }
-        });
-      } else {
-        result.choices.forEach((choice, index) => {
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
-            choice.finish_reason,
-          );
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
-            "assistant",
-          );
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
-            choice.text,
-          );
-        });
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
+              choice.text,
+            );
+          });
+        }
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     span.end();
@@ -599,57 +622,62 @@ export class OpenAIInstrumentation extends InstrumentationBase<any> {
       | CompletionChoice.Logprobs
       | null,
   ) {
-    let result: { token: string; logprob: number }[] = [];
+    try {
+      let result: { token: string; logprob: number }[] = [];
 
-    if (!logprobs) {
-      return;
-    }
+      if (!logprobs) {
+        return;
+      }
 
-    const chatLogprobs = logprobs as
-      | ChatCompletion.Choice.Logprobs
-      | ChatCompletionChunk.Choice.Logprobs;
-    const completionLogprobs = logprobs as CompletionChoice.Logprobs;
-    if (chatLogprobs.content) {
-      result = chatLogprobs.content.map((logprob) => {
-        return {
-          token: logprob.token,
-          logprob: logprob.logprob,
-        };
-      });
-    } else if (
-      completionLogprobs?.tokens &&
-      completionLogprobs?.token_logprobs
-    ) {
-      completionLogprobs.tokens.forEach((token, index) => {
-        const logprob = completionLogprobs.token_logprobs?.at(index);
-        if (logprob) {
-          result.push({
-            token,
-            logprob,
-          });
-        }
-      });
-    }
+      const chatLogprobs = logprobs as
+        | ChatCompletion.Choice.Logprobs
+        | ChatCompletionChunk.Choice.Logprobs;
+      const completionLogprobs = logprobs as CompletionChoice.Logprobs;
+      if (chatLogprobs.content) {
+        result = chatLogprobs.content.map((logprob) => {
+          return {
+            token: logprob.token,
+            logprob: logprob.logprob,
+          };
+        });
+      } else if (
+        completionLogprobs?.tokens &&
+        completionLogprobs?.token_logprobs
+      ) {
+        completionLogprobs.tokens.forEach((token, index) => {
+          const logprob = completionLogprobs.token_logprobs?.at(index);
+          if (logprob) {
+            result.push({
+              token,
+              logprob,
+            });
+          }
+        });
+      }
 
-    span.addEvent("logprobs", { logprobs: JSON.stringify(result) });
+      span.addEvent("logprobs", { logprobs: JSON.stringify(result) });
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
+    }
   }
 
   private _encodingCache = new Map<string, Tiktoken>();
 
   private tokenCountFromString(text: string, model: string) {
-    if (!this._encodingCache.has(model)) {
+    let encoding = this._encodingCache.get(model);
+
+    if (!encoding) {
       try {
-        const encoding = encoding_for_model(model as TiktokenModel);
+        encoding = encoding_for_model(model as TiktokenModel);
         this._encodingCache.set(model, encoding);
       } catch (e) {
-        this._diag.warn(
-          `Failed to get tiktoken encoding for model_name: ${model}, error: ${e}`,
-        );
-        return;
+        this._diag.warn(e);
+        this._config.exceptionLogger?.(e);
+        return 0;
       }
     }
 
-    const encoding = this._encodingCache.get(model);
-    return encoding!.encode(text).length;
+    return encoding.encode(text).length;
   }
 }
diff --git a/packages/instrumentation-openai/src/types.ts b/packages/instrumentation-openai/src/types.ts
index fa5ae747..a99b5212 100644
--- a/packages/instrumentation-openai/src/types.ts
+++ b/packages/instrumentation-openai/src/types.ts
@@ -12,4 +12,9 @@ export interface OpenAIInstrumentationConfig extends InstrumentationConfig {
    * @default false
    */
   enrichTokens?: boolean;
+
+  /**
+   * A custom logger to log any exceptions that happen during span creation.
+   */
+  exceptionLogger?: (e: Error) => void;
 }
diff --git a/packages/instrumentation-pinecone/CHANGELOG.md b/packages/instrumentation-pinecone/CHANGELOG.md
index 6740aa05..b24c8325 100644
--- a/packages/instrumentation-pinecone/CHANGELOG.md
+++ b/packages/instrumentation-pinecone/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 **Note:** Version bump only for package @traceloop/instrumentation-pinecone
diff --git a/packages/instrumentation-pinecone/package.json b/packages/instrumentation-pinecone/package.json
index 692c79bc..7b5d9308 100644
--- a/packages/instrumentation-pinecone/package.json
+++ b/packages/instrumentation-pinecone/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@traceloop/instrumentation-pinecone",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "description": "OpenTelemetry instrumentation for pinecone vector DB",
   "main": "dist/index.js",
   "module": "dist/index.mjs",
diff --git a/packages/instrumentation-pinecone/src/instrumentation.ts b/packages/instrumentation-pinecone/src/instrumentation.ts
index 2021a74f..74c853cb 100644
--- a/packages/instrumentation-pinecone/src/instrumentation.ts
+++ b/packages/instrumentation-pinecone/src/instrumentation.ts
@@ -18,7 +18,6 @@ import type * as pinecone from "@pinecone-database/pinecone";
 import { context, trace, Tracer, SpanStatusCode } from "@opentelemetry/api";
 import {
   InstrumentationBase,
-  InstrumentationConfig,
   InstrumentationModuleDefinition,
   InstrumentationNodeModuleDefinition,
   safeExecuteInTheMiddle,
@@ -28,12 +27,17 @@ import {
   EventAttributes,
 } from "@traceloop/ai-semantic-conventions";
 import { version } from "../package.json";
+import { PineconeInstrumentationConfig } from "./types";
 
 export class PineconeInstrumentation extends InstrumentationBase<any> {
-  constructor(config: InstrumentationConfig = {}) {
+  constructor(config: PineconeInstrumentationConfig = {}) {
     super("@traceloop/instrumentation-pinecone", version, config);
   }
 
+  public override setConfig(config: PineconeInstrumentationConfig = {}) {
+    super.setConfig(config);
+  }
+
   public manuallyInstrument(module: typeof pinecone) {
     this.patch(module);
   }
@@ -145,33 +149,38 @@ export class PineconeInstrumentation extends InstrumentationBase<any> {
       return function method(this: any, ...args: unknown[]) {
         const span = tracer.startSpan(`pinecone.query`);
         const execContext = trace.setSpan(context.active(), span);
-        const options = args[0] as pinecone.QueryOptions;
-        span.setAttribute(SpanAttributes.VECTOR_DB_VENDOR, "Pinecone");
-        const query_request_event = span.addEvent("pinecone.query.request");
-        query_request_event.setAttribute(
-          EventAttributes.VECTOR_DB_QUERY_TOP_K,
-          options.topK,
-        );
-        query_request_event.setAttribute(
-          EventAttributes.VECTOR_DB_QUERY_INCLUDE_VALUES,
-          options.includeValues || false,
-        );
-        query_request_event.setAttribute(
-          EventAttributes.VECTOR_DB_QUERY_INCLUDE_METADATA,
-          options.includeMetadata || false,
-        );
-        query_request_event.setAttribute(
-          EventAttributes.VECTOR_DB_QUERY_ID,
-          (options as pinecone.QueryByRecordId).id,
-        );
-        query_request_event.setAttribute(
-          EventAttributes.VECTOR_DB_QUERY_EMBEDDINGS_VECTOR,
-          (options as pinecone.QueryByVectorValues).vector,
-        );
-        query_request_event.setAttribute(
-          EventAttributes.VECTOR_DB_QUERY_METADATA_FILTER,
-          JSON.stringify(options.filter ? options.filter : {}),
-        );
+        try {
+          const options = args[0] as pinecone.QueryOptions;
+          span.setAttribute(SpanAttributes.VECTOR_DB_VENDOR, "Pinecone");
+          const query_request_event = span.addEvent("pinecone.query.request");
+          query_request_event.setAttribute(
+            EventAttributes.VECTOR_DB_QUERY_TOP_K,
+            options.topK,
+          );
+          query_request_event.setAttribute(
+            EventAttributes.VECTOR_DB_QUERY_INCLUDE_VALUES,
+            options.includeValues || false,
+          );
+          query_request_event.setAttribute(
+            EventAttributes.VECTOR_DB_QUERY_INCLUDE_METADATA,
+            options.includeMetadata || false,
+          );
+          query_request_event.setAttribute(
+            EventAttributes.VECTOR_DB_QUERY_ID,
+            (options as pinecone.QueryByRecordId).id,
+          );
+          query_request_event.setAttribute(
+            EventAttributes.VECTOR_DB_QUERY_EMBEDDINGS_VECTOR,
+            (options as pinecone.QueryByVectorValues).vector,
+          );
+          query_request_event.setAttribute(
+            EventAttributes.VECTOR_DB_QUERY_METADATA_FILTER,
+            JSON.stringify(options.filter ? options.filter : {}),
+          );
+        } catch (e) {
+          this._diag.warn(e);
+          this._config.exceptionLogger?.(e);
+        }
 
         const execPromise = safeExecuteInTheMiddle(
           () => {
@@ -189,71 +198,78 @@ export class PineconeInstrumentation extends InstrumentationBase<any> {
           .then((result: any) => {
             return new Promise((resolve) => {
               span.setStatus({ code: SpanStatusCode.OK });
-              const result_obj =
-                result as pinecone.QueryResponse<pinecone.RecordMetadata>;
-              const query_result_event = span.addEvent("pinecone.query.result");
-              query_result_event.setAttribute(
-                EventAttributes.VECTOR_DB_QUERY_RESULT_NAMESPACE,
-                result_obj.namespace,
-              );
-              if (result_obj.usage?.readUnits !== undefined) {
-                query_result_event.setAttribute(
-                  EventAttributes.VECTOR_DB_QUERY_RESULT_READ_UNITS_CONSUMED,
-                  result_obj.usage?.readUnits,
+              try {
+                const result_obj =
+                  result as pinecone.QueryResponse<pinecone.RecordMetadata>;
+                const query_result_event = span.addEvent(
+                  "pinecone.query.result",
                 );
-              }
-              query_result_event.setAttribute(
-                EventAttributes.VECTOR_DB_QUERY_RESULT_MATCHES_LENGTH,
-                result_obj.matches.length,
-              );
-              for (let i = 0; i < result_obj.matches.length; i++) {
-                const match = result_obj.matches[i];
-                const query_result_match_event = query_result_event.addEvent(
-                  `pinecone.query.result.${i}`,
+                query_result_event.setAttribute(
+                  EventAttributes.VECTOR_DB_QUERY_RESULT_NAMESPACE,
+                  result_obj.namespace,
                 );
-                if (match.score !== undefined) {
-                  query_result_match_event.setAttribute(
-                    EventAttributes.VECTOR_DB_QUERY_RESULT_SCORE.replace(
-                      "{i}",
-                      i.toString(),
-                    ),
-                    match.score,
+                if (result_obj.usage?.readUnits !== undefined) {
+                  query_result_event.setAttribute(
+                    EventAttributes.VECTOR_DB_QUERY_RESULT_READ_UNITS_CONSUMED,
+                    result_obj.usage?.readUnits,
                   );
                 }
-                if (match.sparseValues !== undefined) {
+                query_result_event.setAttribute(
+                  EventAttributes.VECTOR_DB_QUERY_RESULT_MATCHES_LENGTH,
+                  result_obj.matches.length,
+                );
+                for (let i = 0; i < result_obj.matches.length; i++) {
+                  const match = result_obj.matches[i];
+                  const query_result_match_event = query_result_event.addEvent(
+                    `pinecone.query.result.${i}`,
+                  );
+                  if (match.score !== undefined) {
+                    query_result_match_event.setAttribute(
+                      EventAttributes.VECTOR_DB_QUERY_RESULT_SCORE.replace(
+                        "{i}",
+                        i.toString(),
+                      ),
+                      match.score,
+                    );
+                  }
+                  if (match.sparseValues !== undefined) {
+                    query_result_match_event.setAttribute(
+                      EventAttributes.VECTOR_DB_QUERY_RESULT_SPARSE_INDICES.replace(
+                        "{i}",
+                        i.toString(),
+                      ),
+                      match.sparseValues?.indices,
+                    );
+                    query_result_match_event.setAttribute(
+                      EventAttributes.VECTOR_DB_QUERY_RESULT_SPARSE_VALUES.replace(
+                        "{i}",
+                        i.toString(),
+                      ),
+                      match.sparseValues?.values,
+                    );
+                  }
                   query_result_match_event.setAttribute(
-                    EventAttributes.VECTOR_DB_QUERY_RESULT_SPARSE_INDICES.replace(
+                    EventAttributes.VECTOR_DB_QUERY_RESULT_ID.replace(
                       "{i}",
                       i.toString(),
                     ),
-                    match.sparseValues?.indices,
+                    match.id,
                   );
                   query_result_match_event.setAttribute(
-                    EventAttributes.VECTOR_DB_QUERY_RESULT_SPARSE_VALUES.replace(
+                    EventAttributes.VECTOR_DB_QUERY_RESULT_VALUES.replace(
                       "{i}",
                       i.toString(),
                     ),
-                    match.sparseValues?.values,
+                    match.values,
+                  );
+                  query_result_match_event.addEvent(
+                    `pinecone.query.result.${i}.metadata`,
+                    match.metadata,
                   );
                 }
-                query_result_match_event.setAttribute(
-                  EventAttributes.VECTOR_DB_QUERY_RESULT_ID.replace(
-                    "{i}",
-                    i.toString(),
-                  ),
-                  match.id,
-                );
-                query_result_match_event.setAttribute(
-                  EventAttributes.VECTOR_DB_QUERY_RESULT_VALUES.replace(
-                    "{i}",
-                    i.toString(),
-                  ),
-                  match.values,
-                );
-                query_result_match_event.addEvent(
-                  `pinecone.query.result.${i}.metadata`,
-                  match.metadata,
-                );
+              } catch (e) {
+                this._diag.warn(e);
+                this._config.exceptionLogger?.(e);
               }
               span.end();
               resolve(result);
diff --git a/packages/instrumentation-pinecone/src/types.ts b/packages/instrumentation-pinecone/src/types.ts
new file mode 100644
index 00000000..ed8b449d
--- /dev/null
+++ b/packages/instrumentation-pinecone/src/types.ts
@@ -0,0 +1,8 @@
+import { InstrumentationConfig } from "@opentelemetry/instrumentation";
+
+export interface PineconeInstrumentationConfig extends InstrumentationConfig {
+  /**
+   * A custom logger to log any exceptions that happen during span creation.
+   */
+  exceptionLogger?: (e: Error) => void;
+}
diff --git a/packages/instrumentation-vertexai/CHANGELOG.md b/packages/instrumentation-vertexai/CHANGELOG.md
index 0ddd1be8..f84e3665 100644
--- a/packages/instrumentation-vertexai/CHANGELOG.md
+++ b/packages/instrumentation-vertexai/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 **Note:** Version bump only for package @traceloop/instrumentation-vertexai
diff --git a/packages/instrumentation-vertexai/package.json b/packages/instrumentation-vertexai/package.json
index 6a5aad7a..3bc19e1e 100644
--- a/packages/instrumentation-vertexai/package.json
+++ b/packages/instrumentation-vertexai/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@traceloop/instrumentation-vertexai",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "description": "Google's VertexAI Instrumentation",
   "main": "dist/index.js",
   "module": "dist/index.mjs",
diff --git a/packages/instrumentation-vertexai/src/aiplatform-instrumentation.ts b/packages/instrumentation-vertexai/src/aiplatform-instrumentation.ts
index 2612adcb..1b8a12d7 100644
--- a/packages/instrumentation-vertexai/src/aiplatform-instrumentation.ts
+++ b/packages/instrumentation-vertexai/src/aiplatform-instrumentation.ts
@@ -161,58 +161,63 @@ export class AIPlatformInstrumentation extends InstrumentationBase<any> {
       [SpanAttributes.LLM_REQUEST_TYPE]: "completion",
     };
 
-    if (params !== undefined) {
-      if (params.endpoint) {
-        const model = params.endpoint.split("/").pop();
-        attributes[SpanAttributes.LLM_REQUEST_MODEL] = model;
-        attributes[SpanAttributes.LLM_RESPONSE_MODEL] = model;
-      }
-      if (params?.parameters) {
-        if (
-          params?.parameters.structValue?.fields?.maxOutputTokens.numberValue
-        ) {
-          attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] =
-            params?.parameters.structValue?.fields?.maxOutputTokens.numberValue;
-        }
-        if (params?.parameters.structValue?.fields?.temperature.numberValue) {
-          attributes[SpanAttributes.LLM_TEMPERATURE] =
-            params?.parameters.structValue?.fields?.temperature.numberValue;
+    try {
+      if (params !== undefined) {
+        if (params.endpoint) {
+          const model = params.endpoint.split("/").pop();
+          attributes[SpanAttributes.LLM_REQUEST_MODEL] = model;
+          attributes[SpanAttributes.LLM_RESPONSE_MODEL] = model;
         }
-        if (params?.parameters.structValue?.fields?.topP.numberValue) {
-          attributes[SpanAttributes.LLM_TOP_P] =
-            params?.parameters.structValue?.fields?.topP.numberValue;
-        }
-        if (params?.parameters.structValue?.fields?.topK.numberValue) {
-          attributes[SpanAttributes.LLM_TOP_K] =
-            params?.parameters.structValue?.fields?.topK.numberValue;
+        if (params?.parameters) {
+          if (
+            params?.parameters.structValue?.fields?.maxOutputTokens.numberValue
+          ) {
+            attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] =
+              params?.parameters.structValue?.fields?.maxOutputTokens.numberValue;
+          }
+          if (params?.parameters.structValue?.fields?.temperature.numberValue) {
+            attributes[SpanAttributes.LLM_TEMPERATURE] =
+              params?.parameters.structValue?.fields?.temperature.numberValue;
+          }
+          if (params?.parameters.structValue?.fields?.topP.numberValue) {
+            attributes[SpanAttributes.LLM_TOP_P] =
+              params?.parameters.structValue?.fields?.topP.numberValue;
+          }
+          if (params?.parameters.structValue?.fields?.topK.numberValue) {
+            attributes[SpanAttributes.LLM_TOP_K] =
+              params?.parameters.structValue?.fields?.topK.numberValue;
+          }
         }
-      }
 
-      if (
-        this._shouldSendPrompts() &&
-        params.instances &&
-        params.instances?.length !== 0
-      ) {
         if (
-          params.instances[0].structValue?.fields &&
-          "prompt" in params.instances[0].structValue.fields &&
-          params.instances[0].structValue?.fields?.prompt.stringValue
+          this._shouldSendPrompts() &&
+          params.instances &&
+          params.instances?.length !== 0
         ) {
-          attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
-          attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] =
-            params.instances[0].structValue?.fields?.prompt.stringValue;
-        } else if (
-          params.instances[0].structValue &&
-          params.instances[0].structValue.fields?.messages.listValue
-            ?.values?.[0].structValue?.fields?.content.stringValue
-        ) {
-          attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] =
+          if (
+            params.instances[0].structValue?.fields &&
+            "prompt" in params.instances[0].structValue.fields &&
+            params.instances[0].structValue?.fields?.prompt.stringValue
+          ) {
+            attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] = "user";
+            attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] =
+              params.instances[0].structValue?.fields?.prompt.stringValue;
+          } else if (
+            params.instances[0].structValue &&
             params.instances[0].structValue.fields?.messages.listValue
-              ?.values?.[0].structValue?.fields?.author.stringValue ?? "user";
-          attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] =
-            params.instances[0].structValue.fields?.messages.listValue?.values?.[0].structValue?.fields?.content.stringValue;
+              ?.values?.[0].structValue?.fields?.content.stringValue
+          ) {
+            attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] =
+              params.instances[0].structValue.fields?.messages.listValue
+                ?.values?.[0].structValue?.fields?.author.stringValue ?? "user";
+            attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] =
+              params.instances[0].structValue.fields?.messages.listValue?.values?.[0].structValue?.fields?.content.stringValue;
+          }
         }
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     return this.tracer.startSpan(`vertexai.completion`, {
@@ -264,89 +269,94 @@ export class AIPlatformInstrumentation extends InstrumentationBase<any> {
       object | undefined,
     ];
   }) {
-    if (result[0].model)
-      span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, result[0].model);
+    try {
+      if (result[0].model)
+        span.setAttribute(SpanAttributes.LLM_RESPONSE_MODEL, result[0].model);
 
-    if (result) {
-      if (result[0].metadata) {
-        if (
-          typeof result[0].metadata?.structValue?.fields?.tokenMetadata
-            .structValue?.fields?.outputTokenCount.structValue?.fields
-            ?.totalTokens.numberValue === "number"
-        )
-          span.setAttribute(
-            SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
-            result[0].metadata?.structValue?.fields?.tokenMetadata.structValue
-              ?.fields?.outputTokenCount.structValue?.fields?.totalTokens
-              .numberValue,
-          );
-
-        if (
-          typeof result[0].metadata?.structValue?.fields?.tokenMetadata
-            .structValue?.fields?.inputTokenCount.structValue?.fields
-            ?.totalTokens.numberValue === "number"
-        )
-          span.setAttribute(
-            SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
-            result[0].metadata?.structValue?.fields?.tokenMetadata.structValue
-              ?.fields?.inputTokenCount.structValue?.fields?.totalTokens
-              .numberValue,
-          );
-
-        if (
-          typeof result[0].metadata?.structValue?.fields?.tokenMetadata
-            .structValue?.fields?.inputTokenCount.structValue?.fields
-            ?.totalTokens.numberValue === "number" &&
-          typeof result[0].metadata?.structValue?.fields?.tokenMetadata
-            .structValue?.fields?.outputTokenCount.structValue?.fields
-            ?.totalTokens.numberValue === "number"
-        )
-          span.setAttribute(
-            SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
-            result[0].metadata?.structValue?.fields?.tokenMetadata.structValue
-              ?.fields?.inputTokenCount.structValue?.fields?.totalTokens
-              .numberValue +
+      if (result) {
+        if (result[0].metadata) {
+          if (
+            typeof result[0].metadata?.structValue?.fields?.tokenMetadata
+              .structValue?.fields?.outputTokenCount.structValue?.fields
+              ?.totalTokens.numberValue === "number"
+          )
+            span.setAttribute(
+              SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
               result[0].metadata?.structValue?.fields?.tokenMetadata.structValue
                 ?.fields?.outputTokenCount.structValue?.fields?.totalTokens
                 .numberValue,
-          );
-      }
+            );
 
-      if (this._shouldSendPrompts()) {
-        result[0].predictions?.forEach((prediction, index) => {
           if (
-            prediction.structValue?.fields &&
-            "content" in prediction.structValue.fields &&
-            !!prediction.structValue?.fields?.content.stringValue
-          ) {
+            typeof result[0].metadata?.structValue?.fields?.tokenMetadata
+              .structValue?.fields?.inputTokenCount.structValue?.fields
+              ?.totalTokens.numberValue === "number"
+          )
             span.setAttribute(
-              `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
-              "assistant",
+              SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
+              result[0].metadata?.structValue?.fields?.tokenMetadata.structValue
+                ?.fields?.inputTokenCount.structValue?.fields?.totalTokens
+                .numberValue,
             );
 
+          if (
+            typeof result[0].metadata?.structValue?.fields?.tokenMetadata
+              .structValue?.fields?.inputTokenCount.structValue?.fields
+              ?.totalTokens.numberValue === "number" &&
+            typeof result[0].metadata?.structValue?.fields?.tokenMetadata
+              .structValue?.fields?.outputTokenCount.structValue?.fields
+              ?.totalTokens.numberValue === "number"
+          )
             span.setAttribute(
-              `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
-              prediction.structValue?.fields?.content.stringValue,
-            );
-          } else if (
-            prediction.structValue?.fields &&
-            "candidates" in prediction.structValue.fields &&
-            !!prediction.structValue?.fields?.candidates.listValue?.values?.[0]
-              ?.structValue?.fields?.content.stringValue
-          ) {
-            span.setAttribute(
-              `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
-              "assistant",
+              SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
+              result[0].metadata?.structValue?.fields?.tokenMetadata.structValue
+                ?.fields?.inputTokenCount.structValue?.fields?.totalTokens
+                .numberValue +
+                result[0].metadata?.structValue?.fields?.tokenMetadata
+                  .structValue?.fields?.outputTokenCount.structValue?.fields
+                  ?.totalTokens.numberValue,
             );
+        }
 
-            span.setAttribute(
-              `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
-              prediction.structValue?.fields?.candidates.listValue?.values?.[0]
-                ?.structValue?.fields?.content.stringValue,
-            );
-          }
-        });
+        if (this._shouldSendPrompts()) {
+          result[0].predictions?.forEach((prediction, index) => {
+            if (
+              prediction.structValue?.fields &&
+              "content" in prediction.structValue.fields &&
+              !!prediction.structValue?.fields?.content.stringValue
+            ) {
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
+                "assistant",
+              );
+
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
+                prediction.structValue?.fields?.content.stringValue,
+              );
+            } else if (
+              prediction.structValue?.fields &&
+              "candidates" in prediction.structValue.fields &&
+              !!prediction.structValue?.fields?.candidates.listValue
+                ?.values?.[0]?.structValue?.fields?.content.stringValue
+            ) {
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
+                "assistant",
+              );
+
+              span.setAttribute(
+                `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
+                prediction.structValue?.fields?.candidates.listValue
+                  ?.values?.[0]?.structValue?.fields?.content.stringValue,
+              );
+            }
+          });
+        }
       }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     span.setStatus({ code: SpanStatusCode.OK });
diff --git a/packages/instrumentation-vertexai/src/types.ts b/packages/instrumentation-vertexai/src/types.ts
index 7055968e..b689d077 100644
--- a/packages/instrumentation-vertexai/src/types.ts
+++ b/packages/instrumentation-vertexai/src/types.ts
@@ -6,6 +6,11 @@ export interface VertexAIInstrumentationConfig extends InstrumentationConfig {
    * @default true
    */
   traceContent?: boolean;
+
+  /**
+   * A custom logger to log any exceptions that happen during span creation.
+   */
+  exceptionLogger?: (e: Error) => void;
 }
 
 export interface AIPlatformInstrumentationConfig extends InstrumentationConfig {
@@ -14,4 +19,9 @@ export interface AIPlatformInstrumentationConfig extends InstrumentationConfig {
    * @default true
    */
   traceContent?: boolean;
+
+  /**
+   * A custom logger to log any exceptions that happen during span creation.
+   */
+  exceptionLogger?: (e: Error) => void;
 }
diff --git a/packages/instrumentation-vertexai/src/vertexai-instrumentation.ts b/packages/instrumentation-vertexai/src/vertexai-instrumentation.ts
index 58393337..4cb1a3c8 100644
--- a/packages/instrumentation-vertexai/src/vertexai-instrumentation.ts
+++ b/packages/instrumentation-vertexai/src/vertexai-instrumentation.ts
@@ -59,7 +59,7 @@ export class VertexAIInstrumentation extends InstrumentationBase<any> {
 
   private modelConfig: vertexAI.ModelParams = { model: "" };
 
-  private setModal(newValue: vertexAI.ModelParams) {
+  private setModel(newValue: vertexAI.ModelParams) {
     this.modelConfig = { ...newValue };
   }
 
@@ -69,12 +69,12 @@ export class VertexAIInstrumentation extends InstrumentationBase<any> {
     this._wrap(
       module.VertexAI_Preview.prototype,
       "getGenerativeModel",
-      this.wrapperMethod(),
+      this.wrapperMethod("getGenerativeModel"),
     );
     this._wrap(
       module.GenerativeModel.prototype,
       "generateContentStream",
-      this.wrapperMethod(),
+      this.wrapperMethod("generateContentStream"),
     );
   }
 
@@ -84,12 +84,12 @@ export class VertexAIInstrumentation extends InstrumentationBase<any> {
     this._wrap(
       module.VertexAI_Preview.prototype,
       "getGenerativeModel",
-      this.wrapperMethod(),
+      this.wrapperMethod("getGenerativeModel"),
     );
     this._wrap(
       module.GenerativeModel.prototype,
       "generateContentStream",
-      this.wrapperMethod(),
+      this.wrapperMethod("generateContentStream"),
     );
 
     return module;
@@ -102,7 +102,9 @@ export class VertexAIInstrumentation extends InstrumentationBase<any> {
     this._unwrap(module.GenerativeModel.prototype, "generateContentStream");
   }
 
-  private wrapperMethod() {
+  private wrapperMethod(
+    wrappedMethodName: "getGenerativeModel" | "generateContentStream",
+  ) {
     // eslint-disable-next-line @typescript-eslint/no-this-alias
     const plugin = this;
     // eslint-disable-next-line @typescript-eslint/ban-types
@@ -111,9 +113,9 @@ export class VertexAIInstrumentation extends InstrumentationBase<any> {
         this: any,
         ...args: (vertexAI.GenerateContentRequest & vertexAI.ModelParams)[]
       ) {
-        // To set the model name only
-        if (args[0].model) {
-          plugin.setModal(args[0]);
+        if (wrappedMethodName === "getGenerativeModel") {
+          plugin.setModel(args[0]);
+
           return context.bind(
             context.active(),
             safeExecuteInTheMiddle(
@@ -164,35 +166,40 @@ export class VertexAIInstrumentation extends InstrumentationBase<any> {
       [SpanAttributes.LLM_REQUEST_TYPE]: "completion",
     };
 
-    attributes[SpanAttributes.LLM_REQUEST_MODEL] = this.modelConfig.model;
+    try {
+      attributes[SpanAttributes.LLM_REQUEST_MODEL] = this.modelConfig.model;
 
-    if (
-      this.modelConfig.generation_config !== undefined &&
-      typeof this.modelConfig.generation_config === "object"
-    ) {
-      if (this.modelConfig.generation_config.max_output_tokens) {
-        attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] =
-          this.modelConfig.generation_config.max_output_tokens;
-      }
-      if (this.modelConfig.generation_config.temperature) {
-        attributes[SpanAttributes.LLM_TEMPERATURE] =
-          this.modelConfig.generation_config.temperature;
-      }
-      if (this.modelConfig.generation_config.top_p) {
-        attributes[SpanAttributes.LLM_TOP_P] =
-          this.modelConfig.generation_config.top_p;
-      }
-      if (this.modelConfig.generation_config.top_k) {
-        attributes[SpanAttributes.LLM_TOP_K] =
-          this.modelConfig.generation_config.top_k;
+      if (
+        this.modelConfig.generation_config !== undefined &&
+        typeof this.modelConfig.generation_config === "object"
+      ) {
+        if (this.modelConfig.generation_config.max_output_tokens) {
+          attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] =
+            this.modelConfig.generation_config.max_output_tokens;
+        }
+        if (this.modelConfig.generation_config.temperature) {
+          attributes[SpanAttributes.LLM_TEMPERATURE] =
+            this.modelConfig.generation_config.temperature;
+        }
+        if (this.modelConfig.generation_config.top_p) {
+          attributes[SpanAttributes.LLM_TOP_P] =
+            this.modelConfig.generation_config.top_p;
+        }
+        if (this.modelConfig.generation_config.top_k) {
+          attributes[SpanAttributes.LLM_TOP_K] =
+            this.modelConfig.generation_config.top_k;
+        }
       }
-    }
 
-    if (this._shouldSendPrompts() && "contents" in params) {
-      attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] =
-        params.contents[0].role ?? "user";
-      attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] =
-        this._formatPartsData(params.contents[0].parts);
+      if (this._shouldSendPrompts() && "contents" in params) {
+        attributes[`${SpanAttributes.LLM_PROMPTS}.0.role`] =
+          params.contents[0].role ?? "user";
+        attributes[`${SpanAttributes.LLM_PROMPTS}.0.content`] =
+          this._formatPartsData(params.contents[0].parts);
+      }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     return this.tracer.startSpan(`vertexai.completion`, {
@@ -231,51 +238,56 @@ export class VertexAIInstrumentation extends InstrumentationBase<any> {
     span: Span;
     result: vertexAI.StreamGenerateContentResult;
   }) {
-    span.setAttribute(
-      SpanAttributes.LLM_RESPONSE_MODEL,
-      this.modelConfig.model,
-    );
-
-    const streamResponse = await result.response;
-
-    if (streamResponse.usageMetadata?.totalTokenCount !== undefined)
+    try {
       span.setAttribute(
-        SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
-        streamResponse.usageMetadata.totalTokenCount,
+        SpanAttributes.LLM_RESPONSE_MODEL,
+        this.modelConfig.model,
       );
 
-    if (streamResponse.usageMetadata?.candidates_token_count)
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
-        streamResponse.usageMetadata.candidates_token_count,
-      );
+      const streamResponse = await result.response;
 
-    if (streamResponse.usageMetadata?.prompt_token_count)
-      span.setAttribute(
-        SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
-        streamResponse.usageMetadata.prompt_token_count,
-      );
+      if (streamResponse.usageMetadata?.totalTokenCount !== undefined)
+        span.setAttribute(
+          SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
+          streamResponse.usageMetadata.totalTokenCount,
+        );
 
-    if (this._shouldSendPrompts()) {
-      streamResponse.candidates.forEach((candidate, index) => {
-        if (candidate.finishReason)
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
-            candidate.finishReason,
-          );
+      if (streamResponse.usageMetadata?.candidates_token_count)
+        span.setAttribute(
+          SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
+          streamResponse.usageMetadata.candidates_token_count,
+        );
 
-        if (candidate.content) {
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
-            candidate.content.role ?? "assistant",
-          );
+      if (streamResponse.usageMetadata?.prompt_token_count)
+        span.setAttribute(
+          SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
+          streamResponse.usageMetadata.prompt_token_count,
+        );
 
-          span.setAttribute(
-            `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
-            this._formatPartsData(candidate.content.parts),
-          );
-        }
-      });
+      if (this._shouldSendPrompts()) {
+        streamResponse.candidates.forEach((candidate, index) => {
+          if (candidate.finishReason)
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.${index}.finish_reason`,
+              candidate.finishReason,
+            );
+
+          if (candidate.content) {
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.${index}.role`,
+              candidate.content.role ?? "assistant",
+            );
+
+            span.setAttribute(
+              `${SpanAttributes.LLM_COMPLETIONS}.${index}.content`,
+              this._formatPartsData(candidate.content.parts),
+            );
+          }
+        });
+      }
+    } catch (e) {
+      this._diag.warn(e);
+      this._config.exceptionLogger?.(e);
     }
 
     span.setStatus({ code: SpanStatusCode.OK });
diff --git a/packages/traceloop-sdk/CHANGELOG.md b/packages/traceloop-sdk/CHANGELOG.md
index e43ebb25..19552a9a 100644
--- a/packages/traceloop-sdk/CHANGELOG.md
+++ b/packages/traceloop-sdk/CHANGELOG.md
@@ -3,6 +3,12 @@
 All notable changes to this project will be documented in this file.
 See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
 
+## [0.6.1](https://github.com/traceloop/openllmetry-js/compare/v0.6.0...v0.6.1) (2024-04-22)
+
+### Bug Fixes
+
+- handle exceptions ([#214](https://github.com/traceloop/openllmetry-js/issues/214)) ([65f9be4](https://github.com/traceloop/openllmetry-js/commit/65f9be4fdcaa40f5bfd6c1fe3edc60910b4af894))
+
 # [0.6.0](https://github.com/traceloop/openllmetry-js/compare/v0.5.29...v0.6.0) (2024-04-05)
 
 ### Features
diff --git a/packages/traceloop-sdk/package.json b/packages/traceloop-sdk/package.json
index 4dba8530..951cbb6c 100644
--- a/packages/traceloop-sdk/package.json
+++ b/packages/traceloop-sdk/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@traceloop/node-server-sdk",
-  "version": "0.6.0",
+  "version": "0.6.1",
   "description": "Traceloop Software Development Kit (SDK) for Node.js",
   "main": "dist/index.js",
   "module": "dist/index.mjs",
@@ -37,16 +37,17 @@
   "dependencies": {
     "@opentelemetry/exporter-trace-otlp-proto": "^0.49.1",
     "@opentelemetry/sdk-node": "^0.49.1",
+    "@sentry/node": "^7.111.0",
     "@traceloop/ai-semantic-conventions": "^0.6.0",
-    "@traceloop/instrumentation-anthropic": "^0.6.0",
-    "@traceloop/instrumentation-azure": "^0.6.0",
-    "@traceloop/instrumentation-bedrock": "^0.6.0",
-    "@traceloop/instrumentation-cohere": "^0.6.0",
-    "@traceloop/instrumentation-langchain": "^0.6.0",
-    "@traceloop/instrumentation-llamaindex": "^0.6.0",
-    "@traceloop/instrumentation-openai": "^0.6.0",
-    "@traceloop/instrumentation-pinecone": "^0.6.0",
-    "@traceloop/instrumentation-vertexai": "^0.6.0",
+    "@traceloop/instrumentation-anthropic": "^0.6.1",
+    "@traceloop/instrumentation-azure": "^0.6.1",
+    "@traceloop/instrumentation-bedrock": "^0.6.1",
+    "@traceloop/instrumentation-cohere": "^0.6.1",
+    "@traceloop/instrumentation-langchain": "^0.6.1",
+    "@traceloop/instrumentation-llamaindex": "^0.6.1",
+    "@traceloop/instrumentation-openai": "^0.6.1",
+    "@traceloop/instrumentation-pinecone": "^0.6.1",
+    "@traceloop/instrumentation-vertexai": "^0.6.1",
     "@types/nunjucks": "^3.2.5",
     "cross-fetch": "^4.0.0",
     "fetch-retry": "^5.0.6",
diff --git a/packages/traceloop-sdk/src/lib/telemetry/telemetry.ts b/packages/traceloop-sdk/src/lib/telemetry/telemetry.ts
index 2b49959a..67923273 100644
--- a/packages/traceloop-sdk/src/lib/telemetry/telemetry.ts
+++ b/packages/traceloop-sdk/src/lib/telemetry/telemetry.ts
@@ -3,6 +3,7 @@ import * as fs from "fs";
 import * as path from "path";
 import { v4 as uuid } from "uuid";
 import { PostHog } from "posthog-node";
+import * as Sentry from "@sentry/node";
 import { version } from "../../../package.json";
 
 export class Telemetry {
@@ -31,6 +32,10 @@ export class Telemetry {
       this.posthog = new PostHog(
         "phc_JMTeAfG8OpaPsyHzSBtqquMvko1fmOHcW0gyqLCrF3t",
       );
+      Sentry.init({
+        dsn: "https://0cbbe354864172adc0fbd41621a7a541@o4505278734663680.ingest.us.sentry.io/4507114378166272",
+        defaultIntegrations: false,
+      });
     }
   }
 
@@ -77,4 +82,10 @@ export class Telemetry {
       this.posthog.flush();
     }
   }
+
+  public logException(error: Error) {
+    if (this.telemetryEnabled) {
+      Sentry.captureException(error);
+    }
+  }
 }
diff --git a/packages/traceloop-sdk/src/lib/tracing/index.ts b/packages/traceloop-sdk/src/lib/tracing/index.ts
index b9b9cef4..c82f2904 100644
--- a/packages/traceloop-sdk/src/lib/tracing/index.ts
+++ b/packages/traceloop-sdk/src/lib/tracing/index.ts
@@ -46,69 +46,92 @@ let pineconeInstrumentation: PineconeInstrumentation | undefined;
 const instrumentations: Instrumentation[] = [];
 
 export const initInstrumentations = () => {
-  openAIInstrumentation = new OpenAIInstrumentation();
+  const exceptionLogger = (e: Error) => Telemetry.getInstance().logException(e);
+
+  openAIInstrumentation = new OpenAIInstrumentation({
+    enrichTokens: _configuration?.shouldEnrichMetrics,
+    exceptionLogger,
+  });
   instrumentations.push(openAIInstrumentation);
 
-  anthropicInstrumentation = new AnthropicInstrumentation();
+  anthropicInstrumentation = new AnthropicInstrumentation({ exceptionLogger });
   instrumentations.push(anthropicInstrumentation);
 
-  azureOpenAIInstrumentation = new AzureOpenAIInstrumentation();
+  azureOpenAIInstrumentation = new AzureOpenAIInstrumentation({
+    exceptionLogger,
+  });
   instrumentations.push(azureOpenAIInstrumentation);
 
-  cohereInstrumentation = new CohereInstrumentation();
+  cohereInstrumentation = new CohereInstrumentation({ exceptionLogger });
   instrumentations.push(cohereInstrumentation);
 
-  vertexaiInstrumentation = new VertexAIInstrumentation();
+  vertexaiInstrumentation = new VertexAIInstrumentation({
+    exceptionLogger,
+  });
   instrumentations.push(vertexaiInstrumentation);
 
-  aiplatformInstrumentation = new AIPlatformInstrumentation();
+  aiplatformInstrumentation = new AIPlatformInstrumentation({
+    exceptionLogger,
+  });
   instrumentations.push(aiplatformInstrumentation);
 
-  bedrockInstrumentation = new BedrockInstrumentation();
+  bedrockInstrumentation = new BedrockInstrumentation({ exceptionLogger });
   instrumentations.push(bedrockInstrumentation);
 
-  pineconeInstrumentation = new PineconeInstrumentation();
+  pineconeInstrumentation = new PineconeInstrumentation({ exceptionLogger });
   instrumentations.push(pineconeInstrumentation);
 
-  langchainInstrumentation = new LangChainInstrumentation();
+  langchainInstrumentation = new LangChainInstrumentation({ exceptionLogger });
   instrumentations.push(langchainInstrumentation);
 
-  llamaIndexInstrumentation = new LlamaIndexInstrumentation();
+  llamaIndexInstrumentation = new LlamaIndexInstrumentation({
+    exceptionLogger,
+  });
   instrumentations.push(llamaIndexInstrumentation);
 };
 
 export const manuallyInitInstrumentations = (
   instrumentModules: InitializeOptions["instrumentModules"],
 ) => {
+  const exceptionLogger = (e: Error) => Telemetry.getInstance().logException(e);
+
+  // Clear the instrumentations array that was initialized by default
+  instrumentations.length = 0;
+
   if (instrumentModules?.openAI) {
     openAIInstrumentation = new OpenAIInstrumentation({
       enrichTokens: _configuration?.shouldEnrichMetrics,
+      exceptionLogger,
     });
     instrumentations.push(openAIInstrumentation);
     openAIInstrumentation.manuallyInstrument(instrumentModules.openAI);
   }
 
   if (instrumentModules?.anthropic) {
-    anthropicInstrumentation = new AnthropicInstrumentation();
+    anthropicInstrumentation = new AnthropicInstrumentation({
+      exceptionLogger,
+    });
     instrumentations.push(anthropicInstrumentation);
     anthropicInstrumentation.manuallyInstrument(instrumentModules.anthropic);
   }
 
   if (instrumentModules?.azureOpenAI) {
-    const instrumentation = new AzureOpenAIInstrumentation();
+    const instrumentation = new AzureOpenAIInstrumentation({ exceptionLogger });
     instrumentations.push(instrumentation as Instrumentation);
     azureOpenAIInstrumentation = instrumentation;
     instrumentation.manuallyInstrument(instrumentModules.azureOpenAI);
   }
 
   if (instrumentModules?.cohere) {
-    cohereInstrumentation = new CohereInstrumentation();
+    cohereInstrumentation = new CohereInstrumentation({ exceptionLogger });
     instrumentations.push(cohereInstrumentation);
     cohereInstrumentation.manuallyInstrument(instrumentModules.cohere);
   }
 
   if (instrumentModules?.google_vertexai) {
-    vertexaiInstrumentation = new VertexAIInstrumentation();
+    vertexaiInstrumentation = new VertexAIInstrumentation({
+      exceptionLogger,
+    });
     instrumentations.push(vertexaiInstrumentation);
     vertexaiInstrumentation.manuallyInstrument(
       instrumentModules.google_vertexai,
@@ -116,7 +139,9 @@ export const manuallyInitInstrumentations = (
   }
 
   if (instrumentModules?.google_aiplatform) {
-    aiplatformInstrumentation = new AIPlatformInstrumentation();
+    aiplatformInstrumentation = new AIPlatformInstrumentation({
+      exceptionLogger,
+    });
     instrumentations.push(aiplatformInstrumentation);
     aiplatformInstrumentation.manuallyInstrument(
       instrumentModules.google_aiplatform,
@@ -124,25 +149,29 @@ export const manuallyInitInstrumentations = (
   }
 
   if (instrumentModules?.bedrock) {
-    bedrockInstrumentation = new BedrockInstrumentation();
+    bedrockInstrumentation = new BedrockInstrumentation({ exceptionLogger });
     instrumentations.push(bedrockInstrumentation);
     bedrockInstrumentation.manuallyInstrument(instrumentModules.bedrock);
   }
 
   if (instrumentModules?.pinecone) {
-    const instrumentation = new PineconeInstrumentation();
+    const instrumentation = new PineconeInstrumentation({ exceptionLogger });
     instrumentations.push(instrumentation as Instrumentation);
     instrumentation.manuallyInstrument(instrumentModules.pinecone);
   }
 
   if (instrumentModules?.langchain) {
-    langchainInstrumentation = new LangChainInstrumentation();
+    langchainInstrumentation = new LangChainInstrumentation({
+      exceptionLogger,
+    });
     instrumentations.push(langchainInstrumentation);
     langchainInstrumentation.manuallyInstrument(instrumentModules.langchain);
   }
 
   if (instrumentModules?.llamaIndex) {
-    llamaIndexInstrumentation = new LlamaIndexInstrumentation();
+    llamaIndexInstrumentation = new LlamaIndexInstrumentation({
+      exceptionLogger,
+    });
     instrumentations.push(llamaIndexInstrumentation);
     llamaIndexInstrumentation.manuallyInstrument(instrumentModules.llamaIndex);
   }
@@ -162,7 +191,6 @@ export const startTracing = (options: InitializeOptions) => {
   if (!shouldSendTraces()) {
     openAIInstrumentation?.setConfig({
       traceContent: false,
-      enrichTokens: _configuration?.shouldEnrichMetrics,
     });
     azureOpenAIInstrumentation?.setConfig({
       traceContent: false,