diff --git a/docs/community/integrations/chatgpt_plugins.md b/docs/community/integrations/chatgpt_plugins.md
index 6e1fc8510f18407f93d921f1f7d8aeff3bf64a31..37f793377e680750a22fe70a490a7f17f741b9ba 100644
--- a/docs/community/integrations/chatgpt_plugins.md
+++ b/docs/community/integrations/chatgpt_plugins.md
@@ -78,8 +78,8 @@ reader = ChatGPTRetrievalPluginReader(
 documents = reader.load_data("What did the author do growing up?")
 
 # build and query index
-from llama_index import ListIndex
-index = ListIndex(documents)
+from llama_index import SummaryIndex
+index = SummaryIndex.from_documents(documents)
 # set Logging to DEBUG for more detailed outputs
 query_engine = vector_index.as_query_engine(
     response_mode="compact"
diff --git a/docs/community/integrations/vector_stores.md b/docs/community/integrations/vector_stores.md
index e6ea0a77e41dbfc13548077bc5787b5dad22e20f..ca1fe81ab841dc3870b47fc2532656912f0f244e 100644
--- a/docs/community/integrations/vector_stores.md
+++ b/docs/community/integrations/vector_stores.md
@@ -390,7 +390,7 @@ Chroma stores both documents and vectors. This is an example of how to use Chrom
 ```python
 
 from llama_index.readers.chroma import ChromaReader
-from llama_index.indices import ListIndex
+from llama_index.indices import SummaryIndex
 
 # The chroma reader loads data from a persisted Chroma collection.
 # This requires a collection name and a persist directory.
@@ -402,7 +402,7 @@ reader = ChromaReader(
 query_vector=[n1, n2, n3, ...]
 
 documents = reader.load_data(collection_name="demo", query_vector=query_vector, limit=5)
-index = ListIndex.from_documents(documents)
+index = SummaryIndex.from_documents(documents)
 
 query_engine = index.as_query_engine()
 response = query_engine.query("<query_text>")
diff --git a/docs/core_modules/data_modules/index/composability.md b/docs/core_modules/data_modules/index/composability.md
index 0ba20fd062e72f495689d4520932d53951e76f51..1178045056edf89ec644eeb64b0326d31e0a8630 100644
--- a/docs/core_modules/data_modules/index/composability.md
+++ b/docs/core_modules/data_modules/index/composability.md
@@ -66,7 +66,7 @@ We can query, save, and load the graph to/from disk as any other index.
 from llama_index.indices.composability import ComposableGraph
 
 graph = ComposableGraph.from_indices(
-    ListIndex,
+    SummaryIndex,
     [index1, index2, index3],
     index_summaries=[index1_summary, index2_summary, index3_summary],
     storage_context=storage_context,
diff --git a/docs/core_modules/data_modules/index/document_management.md b/docs/core_modules/data_modules/index/document_management.md
index aa473667642e76e590f21b0b55a93af40cbef095..07da61428aaeda70879b08ee66e861345c09e056 100644
--- a/docs/core_modules/data_modules/index/document_management.md
+++ b/docs/core_modules/data_modules/index/document_management.md
@@ -15,9 +15,9 @@ In this notebook we showcase how to construct an empty index, manually create Do
 An example code snippet is given below:
 
 ```python
-from llama_index import ListIndex, Document
+from llama_index import SummaryIndex, Document
 
-index = ListIndex([])
+index = SummaryIndex([])
 text_chunks = ['text_chunk_1', 'text_chunk_2', 'text_chunk_3']
 
 doc_chunks = []
diff --git a/docs/core_modules/data_modules/index/index_progress_bars.ipynb b/docs/core_modules/data_modules/index/index_progress_bars.ipynb
index 0d04dadfa03d5f4dff97db0090d3e8bdfa2baa75..e467290af90154ad7530fc019872bf4207bdb2bd 100644
--- a/docs/core_modules/data_modules/index/index_progress_bars.ipynb
+++ b/docs/core_modules/data_modules/index/index_progress_bars.ipynb
@@ -1,603 +1,603 @@
 {
- "cells": [
-  {
-   "attachments": {
-    "CleanShot%202023-06-25%20at%2011.59.55@2x.png": {
-     "image/png": "iVBORw0KGgoAAAANSUhEUgAABQwAAACoCAYAAAClr6NdAAAMQmlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnluSkEBoAQSkhN4EkRpASggtgPQiiEpIAoQSYkJQsaOLCq5dRMCGrooodkAsKGJnUWzYFwsqyrpYsCtvkgC67ivfm++bO//958x/zpw7c+8dANROcESiHFQdgFxhvjgm2J8+PimZTnoKcIABZWAPiByuRMSMigoHsAy1fy/vbgBE1l61l2n9s/+/Fg0eX8IFAImCOI0n4eZCfBAAvIorEucDQJTxZlPzRTIMK9ASwwAhXiTDGQpcJcNpCrxXbhMXw4K4FQAlFQ5HnAGA6mXI0wu4GVBDtQ9iRyFPIARAjQ6xT25uHg/iVIitoY0IYpk+I+0HnYy/aaYNa3I4GcNYMRd5UQoQSEQ5nOn/Zzr+d8nNkQ75sIRVJVMcEiObM8zbzey8MBlWgbhXmBYRCbEmxB8EPLk9xCglUxoSr7BHDbgSFswZ0IHYkccJCIPYAOIgYU5E+CCfli4IYkMMVwg6TZDPjoNYF+JFfElg7KDNJnFezKAvtD5dzGIO8uc4Yrlfma/70ux45qD+60w+e1AfUy3MjEuEmAKxeYEgIQJiVYgdJNmxYYM2YwszWRFDNmJpjCx+c4hj+MJgf4U+VpAuDooZtC/JlQzNF9uUKWBHDOL9+ZlxIYr8YK1cjjx+OBfsMl/IjB/S4UvGhw/NhccPCFTMHXvGF8bHDup8EOX7xyjG4hRRTtSgPW7KzwmW8aYQu0gKYgfH4gn5cEEq9PF0UX5UnCJOvDCLExqliAdfDsIBCwQAOpDCmgbyQBYQtPc29MI7RU8Q4AAxyAB8uCsVzNCIRHmPEF5jQSH4EyI+kAyP85f38kEB5L8Os4qrPUiX9xbIR2SDJxDngjCQA++l8lHCYW8J4DFkBP/wzoGVC+PNgVXW/+/5IfY7w4RM+CAjHfJIVxuyJAYSA4ghxCCiDa6P++BeeDi8+sHqhDNwj6F5fLcnPCF0EB4SrhO6CLcmC4rEP0U5DnRB/aDBXKT9mAvcEmq64v64N1SHyrgOrg/scRfoh4n7Qs+ukGUNxi3LCv0n7b/N4IenMWhHdiSj5BFkP7L1zyNVbVVdh1Vkuf4xP4pY04bzzRru+dk/64fs82Ab9rMltgg7gJ3FTmLnsaNYA6BjzVgj1oYdk+Hh1fVYvrqGvMXI48mGOoJ/+Bt6srJMShxrHXscvyj68vnTZO9owMoTTRcLMjLz6Uz4ReDT2UKuwyi6k6OTMwCy74vi9fUmWv7dQHTavnPz/wDAu3lgYODIdy60GYB97nD7H/7OWTPgp0MZgHOHuVJxgYLDZRcCfEuowZ2mB4yAGbCG83ECbsAL+IFAEAoiQRxIApNg9JlwnYvBVDATzAPFoBQsB2tABdgItoAdYDfYDxrAUXASnAEXwWVwHdyBq6cbvAB94B34jCAICaEiNEQPMUYsEDvECWEgPkggEo7EIElIKpKBCBEpMhOZj5QiK5EKZDNSg+xDDiMnkfNIB3ILeYD0IK+RTyiGqqBaqCFqiY5GGSgTDUPj0IloBjoFLUQXoEvRcrQa3YXWoyfRi+h1tAt9gfZjAFPGdDATzB5jYCwsEkvG0jExNhsrwcqwaqwOa4LP+SrWhfViH3EiTsPpuD1cwSF4PM7Fp+Cz8SV4Bb4Dr8db8av4A7wP/0agEgwIdgRPApswnpBBmEooJpQRthEOEU7DvdRNeEckEnWIVkR3uBeTiFnEGcQlxPXEPcQTxA7iI2I/iUTSI9mRvEmRJA4pn1RMWkfaRWomXSF1kz4oKSsZKzkpBSklKwmVipTKlHYqHVe6ovRU6TNZnWxB9iRHknnk6eRl5K3kJvIlcjf5M0WDYkXxpsRRsijzKOWUOsppyl3KG2VlZVNlD+VoZYHyXOVy5b3K55QfKH9U0VSxVWGppKhIVZaqbFc5oXJL5Q2VSrWk+lGTqfnUpdQa6inqfeoHVZqqgypblac6R7VStV71iupLNbKahRpTbZJaoVqZ2gG1S2q96mR1S3WWOkd9tnql+mH1TvV+DZrGGI1IjVyNJRo7Nc5rPNMkaVpqBmryNBdobtE8pfmIhtHMaCwalzaftpV2mtatRdSy0mJrZWmVau3Watfq09bUdtFO0J6mXal9TLtLB9Ox1GHr5Ogs09mvc0Pn0wjDEcwR/BGLR9SNuDLive5IXT9dvm6J7h7d67qf9Oh6gXrZeiv0GvTu6eP6tvrR+lP1N+if1u8dqTXSayR3ZMnI/SNvG6AGtgYxBjMMthi0GfQbGhkGG4oM1xmeMuw10jHyM8oyWm103KjHmGbsYywwXm3cbPycrk1n0nPo5fRWep+JgUmIidRks0m7yWdTK9N40yLTPab3zChmDLN0s9VmLWZ95sbm48xnmtea37YgWzAsMi3WWpy1eG9pZZloudCywfKZla4V26rQqtbqrjXV2td6inW19TUbog3DJttmvc1lW9TW1TbTttL2kh1q52YnsFtv1zGKMMpjlHBU9ahOexV7pn2Bfa39Awcdh3CHIocGh5ejzUcnj14x+uzob46ujjmOWx3vjNEcEzqmaEzTmNdOtk5cp0qna85U5yDnOc6Nzq9c7Fz4LhtcbrrSXMe5LnRtcf3q5u4mdqtz63E3d091r3LvZGgxohhLGOc8CB7+HnM8jnp89HTzzPfc7/mXl71XttdOr2djrcbyx24d+8jb1Jvjvdm7y4fuk+qzyafL18SX41vt+9DPzI/nt83vKdOGmcXcxXzp7+gv9j/k/57lyZrFOhGABQQHlAS0B2oGxgdWBN4PMg3KCKoN6gt2DZ4RfCKEEBIWsiKkk23I5rJr2H2h7qGzQlvDVMJiwyrCHobbhovDm8ah40LHrRp3N8IiQhjREAki2ZGrIu9FWUVNiToSTYyOiq6MfhIzJmZmzNlYWuzk2J2x7+L845bF3Ym3jpfGtySoJaQk1CS8TwxIXJnYNX70+FnjLybpJwmSGpNJyQnJ25L7JwROWDOhO8U1pTjlxkSridMmnp+kPyln0rHJapM5kw+kElITU3emfuFEcqo5/WnstKq0Pi6Lu5b7gufHW83r4XvzV/Kfpnunr0x/luGdsSqjJ9M3syyzV8ASVAheZYVkbcx6nx2ZvT17ICcxZ0+uUm5q7mGhpjBb2JpnlDctr0NkJyoWdU3xnLJmSp84TLxNgkgmShrzteCPfJvUWvqL9EGBT0FlwYepCVMPTNOYJpzWNt12+uLpTwuDCn+bgc/gzmiZaTJz3swHs5izNs9GZqfNbpljNmfBnO65wXN3zKPMy573e5Fj0cqit/MT5zctMFwwd8GjX4J/qS1WLRYXdy70WrhxEb5IsKh9sfPidYu/lfBKLpQ6lpaVflnCXXLh1zG/lv86sDR9afsyt2UblhOXC5ffWOG7YsdKjZWFKx+tGreqfjV9dcnqt2smrzlf5lK2cS1lrXRtV3l4eeM683XL132pyKy4XulfuafKoGpx1fv1vPVXNvhtqNtouLF046dNgk03Nwdvrq+2rC7bQtxSsOXJ1oStZ39j/FazTX9b6bav24Xbu3bE7Gitca+p2Wmwc1ktWiut7dmVsuvy7oDdjXX2dZv36Owp3Qv2Svc+35e678b+sP0tBxgH6g5aHKw6RDtUUo/UT6/va8hs6GpMauw4HHq4pcmr6dARhyPbj5ocrTymfWzZccrxBccHmgub+0+ITvSezDj5qGVyy51T409da41ubT8ddvrcmaAzp84yzzaf8z539Lzn+cMXGBcaLrpdrG9zbTv0u+vvh9rd2usvuV9qvOxxualjbMfxK75XTl4NuHrmGvvaxesR1ztuxN+42ZnS2XWTd/PZrZxbr24X3P58Z+5dwt2Se+r3yu4b3K/+w+aPPV1uXcceBDxoexj78M4j7qMXjyWPv3QveEJ9UvbU+GnNM6dnR3uCei4/n/C8+4Xoxefe4j81/qx6af3y4F9+f7X1je/rfiV+NfB6yRu9N9vfurxt6Y/qv/8u993n9yUf9D7s+Mj4ePZT4qenn6d+IX0p/2rztelb2Le7A7kDAyKOmCP/FcBgRdPTAXi9HQBqEgA0eD6jTFCc/+QFUZxZ5Qj8J6w4I8qLGwB18P89uhf+3XQCsHcrPH5BfbUUAKKoAMR5ANTZebgOndXk50pZIcJzwKaor2m5aeDfFMWZ84e4f26BTNUF/Nz+C6nzfE+Jwo3bAAAAimVYSWZNTQAqAAAACAAEARoABQAAAAEAAAA+ARsABQAAAAEAAABGASgAAwAAAAEAAgAAh2kABAAAAAEAAABOAAAAAAAAAJAAAAABAAAAkAAAAAEAA5KGAAcAAAASAAAAeKACAAQAAAABAAAFDKADAAQAAAABAAAAqAAAAABBU0NJSQAAAFNjcmVlbnNob3Q7WdvJAAAACXBIWXMAABYlAAAWJQFJUiTwAAAB12lUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyI+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj4xNjg8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8ZXhpZjpQaXhlbFhEaW1lbnNpb24+MTI5MjwvZXhpZjpQaXhlbFhEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlVzZXJDb21tZW50PlNjcmVlbnNob3Q8L2V4aWY6VXNlckNvbW1lbnQ+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgo/j0nSAAAAHGlET1QAAAACAAAAAAAAAFQAAAAoAAAAVAAAAFQAADBwb3xS7QAAMDxJREFUeAHsnQe49ETZhoMVC/YGimIvFLFiBQuIgr1jRUVFQewdu4iKvSD2ihUbKoIFOyo2EBGxK4qooCh2QX7u/DznG0Kym+Tbs99yzj3Xdc7uJpmSO5NJ5pl33lnv9DNCZZCABCQgAQlIQAISkIAEJCABCUhAAhKQgAQkcAaB9RQMrQcSkIAEJCABCUhAAhKQgAQkIAEJSEACEpBACCgYhoSfEpCABCQgAQlIQAISkIAEJCABCUhAAhKQgBaG1gEJSEACEpCABCQgAQlIQAISkIAEJCABCUhgDQEtDNew8JsEJCABCUhAAhKQgAQkIAEJSEACEpCABFY9AQXDVV8FBCABCUhAAhKQgAQkIAEJSEACEpCABCQggTUEFAzXsPCbBCQgAQlIQAISkIAEJCABCUhAAhKQgARWPQEFw1VfBQQgAQlIQAISkIAEJCABCUhAAhKQgAQkIIE1BBQM17DwmwQkIAEJSEACEpCABCQgAQlIQAISkIAEVj0BBcNVXwUEIAEJSEACEpCABCQgAQlIQAISkIAEJCCBNQQUDNew8JsEJCABCUhAAhKQgAQkIAEJSEACEpCABFY9AQXDVV8FBCABCUhAAhKQgAQkIAEJSEACEpCABCQggTUEFAzXsPCbBCQgAQlIQAISkIAEJCABCUhAAhKQgARWPQEFw1VfBQQgAQlIQAISkIAEJCABCUhAAhKQgAQkIIE1BBQM17DwmwQkIAEJSEACEpCABCQgAQlIQAISkIAEVj0BBcNVXwUEIAEJSGDdEzjsF1+uPvfTT1fHn3Lcui+MJZCABCQgAQlIYEUQ2GiDjattr3aH6mZX3npFnI8nIQEJSGCeBBQM50nbvCQgAQlI4GwEEAvf9b03nW27GyQgAQlIQAISkMAsCDzoeo9QNJwFSNOQgARWFQEFw1V1uT1ZCUhAAotF4LTTTqv2OnRPLQsX67JYGglIQAISkMCKIoCl4bO323tFnZMnIwEJSGC5CSgYLjdh05eABCQggU4Cp556arX7gTt37neHBCQgAQlIQAISmAWB/e7+nlkkYxoSkIAEVg0BBcNVc6k9UQlIQAKLSWDXjzxgMQtmqSQgAQlIQAISWDEEFAxXzKX0RCQggTkRUDCcE2izkYAEJCCBdgIKhu1c3CoBCUhAAhKQwOwIKBjOjqUpSUACq4OAguHquM6epQQkIIGFJaBguLCXxoJJQAISkIAEVgwBBcMVcyk9EQlIYE4EFAznBNpsJCABCUignYCCYTsXt0pAAhKQgAQkMDsCCoazY2lKEpDA6iCgYLg6rrNnKQEJSGDZCfz5z3+uTjzxxOoCF7hAdYUrXKF3fgqGvVF5oAQkIAEJSEACIwnMSzDkfeiQQw6pLnWpS1XbbrvtyNK2R/vqV79a/eY3v1naedGLXrS6wx3usPTbLxKQwDACX/nKV6rf/va31TbbbFNtuOGGvSJ/85vfrH7xi1+c5dj73Oc+1XrrrXeWbSvhx0wEw1NOOaW6yEUuMpHHFltsUW222WbVk570pOp617vexGPnufO0006rLn7xi1ecAxf+xje+8TyzX6u8Dj744PoBscEGG1R//etf1yotI69bAufkerhuya3J/Zx+P+yyyy4VD6yPfvSj1XWuc501J9b49u1vf7t63/veVx111FHV73//+4q2lXaL+Ah1bWFMnMMOO6x65zvfWRGXhycvvLvvvnt1nvOcpy2L6p///Gd1wxvesPrhD39YPfWpT61e/OIXtx7XtlHBsI2K2yQgAQlIQAISmCWBMYLhv/71r+rmN795deqpp1ZHHnlkr+K88pWvrJ7whCdUj3nMY6rXvOY1Z4vzve99r7rvfe9bv1u9/vWvP9v+SRvufe97Vx/60IeWDmn2A8e8842Js1SAli8f+chHqs9+9rPVEUccUe/dcsstq9ve9rbVPe95z5aj/3/TmDidiU3ZMSavMXG6ivG73/2ueuMb31jzQXS68pWvXMHoEY94RLXRRhu1RhsTpzWhgRu///3vV/e6170qhOnDDz+8V2yOO+CAA3ode+tb3/psgvfxxx9ffeELX6j/vvWtb1Ubb7xx3dfhfkK3SZhF/5n+y2Uve9laC0I07OKfPPP5yEc+snrTm96Un/Un5TnXuc51lm0r4cfcBMMS1pvf/Oa6c1tuW1ffafzPe97z1tl/4xvfqLbaaqt1VZTB+X7605+udthhh6r5oBickBGmErjmNa9Z/fjHP67e/e53Vw94wOxXdJ1lPaSRvfzlL1+fEw+hTTbZZOr5rYQDzsn3w09/+tPq6le/en0Zvvvd73YOqiDCPf3pT2+9XNe4xjUqRFNeOsowJs7nPve5arvttquTQZDkZYHAyy1iZVvYY489qte+9rXV9a9//Qqx8fznP3/bYa3bFAxbsbhRAhKQgAQkIIEZEhgjGPLu/6AHPaguxemnnz61NBxzrWtdq+43dL3TPexhD6ve9ra3VXe5y12qj33sY1PTLA+IYHi3u92tFjLXX3/9arfddqsPGfPONyZOWZ7y+3/+85+K8n384x8vNy99v9Od7lQLSec73/mWto2JsxR54JcxeY2JM6lYn/nMZ2rhFGOlZqBP/8EPfrC6/e1vf5ZdY+KcJYGRPxDLb3CDG9TGAEP0BvoD9Av6hGc/+9nV8573vKVDEdOx9Ovigwh9latcpT6+q/88pC/8gQ98oO7fcD8hCvcN9LmOPvro6uSTT65e+MIX1tEUDCfQKy0Mn/GMZ1Q777zz0tGAQxH/4he/WD3/+c9f2v7rX/+6VouXNqyjL1S0u9/97nXujAZd9apXXUclGZ7tOVkgGX626zbGPATDWdXDspH8+c9/fjYBad2SXL7cz4n3A+3PoYceWj3wgQ+s/vCHP9Rwul4usT7ceuut62MQ8fbcc896RAxx7wUveEG9/Ta3uU3F75jDj4lDQlgsMqK33377VYygUad4+aWt/8EPflBtuummdX7594lPfKK6853vXP/80Y9+VHG/DAkKhkNoeawEJCABCUhAAmMIDBEMeed573vfW+26665LWfURDPPudaMb3ehsFlm8T+29997V6173ujrNtREM3/CGN5ylbMmXhJf7PXEJSOPLi170ouqZz3xmvRWrtIc85CH1O+nb3/72WghjB3rAs571rKWYY+IsRR74ZUxeY+J0Feukk06q+2XULQS4ffbZp9p8883rd2tmYWY7/TemsxPGxOnKf+j2pzzlKXUZiTdEMGTa/EEHHdSZHVP26WMQ3vWud9X9IL4fe+yxFfcNHKjDD37wg+sZqvTxXvWqV3FIdctb3rL68pe/XH/v0nGG9IWxfKUvRnnHTO0/7rjjqite8Yp1eRQMawzt/7iomZKMeS3mtG3hwAMPrEdS2IcJ58Mf/vC2w9zWk8A5USDpeWoLd9hyC4azPOEhjeQs813XaZ2T7gde6h73uMdVP/nJT842gtYlGN7xjnesPvWpT9W+ARnRSpsLd14aH/3oR9eX4Otf/3p1k5vcpP4+Jg4vJnlJ+fe//11lFPhRj3pU/XDfd999K74nlGIiL4TlgFGOmfapYDiNkPslIAEJSEACElhbAn0EQ8Q8ZsNldkWZZx/BkPcgXLq89a1vrR760IfW0bFQxPUVs5XKMEvBcMw735g4ZfnL77wzYu1IQCx8//vfvzQ9E27MUsF6jsA0UI4dE6dOYMS/MXmNiTOpaC9/+ctr92wc03zfp75d97rXraO/5CUvqRDrCGPi1BHX8h9TgjFESBgiGCZO2yeiGtZ8GBsgCL7jHe9YOuz+979/LdJjQfid73ynutjFLra076UvfWnt8ogNuGS6zGUus7Sv+aVvX5h+GDO0SIvpyF1ul5rpl78VDEsaE773FQxpLJj/zvH4deAGSOCGRFDE/xUXmUaE+epMuW368/ra175WH4MCzXRLLtSXvvSl2qcXNxjhL3/5S+0LDGsXvl/60peu08EK5oIXvGCyrT8//OEPV//73//qm+KSl7xk3XhRFgINHhaSKOV0xKnk+Bi4xz3ucZYOe33wmf8wTcUXBMejoN/0pjetbnWrW1X//e9/K8oOg9vd7nZllInfqcw8ZGhYyB8fkHe9613r9CdNST7hhBPqeJjuIgKg1NMQ4WcsFkjNjCkvU7PJC4e6XAPy23777ZcafeL8/e9/r5V45unzsGveYDwIPvnJT9bJo9Zf+MIXPhtXbkyEE/wc4I8ATvgxOPe5z11fM/bBnbqB5Scm7l2NQ64RjQvnhiUWoknp54DCYObM9E8spGgguE5Mn6SeXOlKV6r9a0Rs4XhM6jFFx3cbFmCIMlxL6hOfCZwDll3URfzI4fONa9O0xMrxbZ/NesgxCET/+Mc/ah8nTJ2nrPzRUHJf7Ljjjktm2fixxMEy1zqCDi89MKMczfuIURysyKgfXD/qNXXjale7WlvxWrcN5Vkmwj0HN/zBYLVGOalruAWAb1cYez/0rSPky4gVAiT1g/uIekRbwwMOPxdloD7BgYCPmz6+L8rBkzItvjdfINj2t7/9rR7Z4zu+cPDhUQbqKCIfbWtM+8fEIU3qBdaE1BeEyYS8sDznOc+pnvvc59abaY9oyxiZmzRdOWl0fSoYdpFxuwQkIAEJSEACsyLQRzB84hOfWL3iFa9ozXKaYPinP/2poi9J4L0ckYVA/6tNgJyVYDjmnW9MHM6Ffi7+CQn0GbI2Ae+CWGsR2t5lS0GM+PjGHhOnzqDnP/rezGqkHzkmrzFxKBp9Nfq5BLSHuOm5xS1uUWsBXdcdH4/0B+lP0AcmjIlTR1yLf+gB9AXo+77nPe+p3XHNSjDEupQpvLgwoq8fXabUk+COJlAG9BzuTQJWrHHBVPaf6SsP6QuTDhakTavXIf1ABcPyKk34Xl7gSRaGJIFVDMcjwDC/nYAYhr8strcFOqoIjAlM3WRhAPKiYmT0JhWZef4IXG2BYxDtMP8lUCGaPgx/9atf1eIA+xH9ELKagRVAEYiaU+8QLvHP0DwXxBBGCjA35gZB2JoWEFTwKxHT7vJ4zgMTeUyZc97lfm5upjm2BUQPRr2aYhoNE0JopkaWcRHXMNXNlO2SEcJifOYlDuJMhJOf/exntahVxoFd26phNCKYryP4lSuAJd3Seirb8D2AtWqTOfupJ4irCVi/MmrIFM5jjjmmHsXIvnwyrX6vvfaqf6a+Zl8+M80A3w4ImYyStAWuERZZXQJt4rTVQ/Yh2MKBxhDfdc2RSY7B9wkPngg9bGuG8pwQl+DMSE1bwI8ExzdF4LZjh/JMGohwjMDSqDcD9Rnz9PK6ccza3A9D6gj1lBE1pgO0haYfS0zqI9C+5S1vqfBLMy0gtJX3GaJwhNq2lywESdoNQtt+tsenTV5CxsQhHe4LxEIGGErn3rQ1tGHlQ5UBmqc97Wm11SOLsJQjgaTVNygY9iXlcRKQgAQkIAEJjCXQRzDknQxxIoHpjwyKEqYJhrzz40+Q939mfyQgsmFMkcA7Fa6w8s6W7X0+875XTkke8843Jg7lY5A//Wj8uvNeTMBIgQFt3uM53+biD7zH855If43ppY997GNHxakzm/KP6/eOMyzXEIPud7/71aznWT5Wy401ZemqJ/1BNBC0kGZI/WE7vDh+TJxmukN/x9LvZS97Wa3RIHi36Q1D0y01mqaffRbyoW6TD8J7n35os/9M/ULobAtlX5j9pbEFfb9MKx7aD1QwbKPdso0bP9PjJgmGiGRYMBFyo5x44om1mEQaiGqYRiPGcKNzY0c04riM2EQwpPHG3JlA5UKAQgi7xCUuUW/DnBUhCYsgGreMFiH2MdrADdisaFg3lcIW6VK2xz/+8bV4g9Ucqjeh6RyzHIUgj5122qm63OUuV8+zL52/9hUMo3qTF513BBSs1/AHWQotzRuY1bbSCFEOhBmYYFocYQsBkA5+phsyEsJoBoH0EBvh9/nPf762tGI7vxEQsBYsGY0RDEmP603ZKCM+QrBsLAMPW84bC65cu6aIgbCEOTMBiz/qDw9kHl4R17hmWOIRInDVP874RxwcyyLeICRyrQkRZXDYyjYeOgTy5/oxqoElF3/xH0c58KtA/tRLhGlCU2CqNzb+tdVDDolgmMN5MON8lnsJUZgARxpdHs7UGaw/s3oa5421HtaOWMsSMnrFd6wpeVlBwOLBltFP7pvmyk8c3wxDeRKfEU2E5whmlIu6h8jMlNZs5/x4YCWMvR+G1hEGCFIXWe2XBw+jhAiDaY+wcozAN0YwzDnlk5fTC13oQvXP1L3s45M6zEAEgbYx7W294cx/GbHLPTImDkmVo+M8TDOgwnQa6nKmHTPQE2tc6vrNbnazsjiDvisYDsLlwRKQgAQkIAEJjCDQRzBsJouRQxaCmyYYMqOH2XLMoKE/2xWYrcEA7KwEwzHvfGPicD5dgmH83cWoou3c846dmYZj4rSlm228p9MP5i99ukwNH5PXmDiUpU0wxGovGkWXv7xSUEP7QHQdGid6SZgM/aQPi4ZB3wxDKGYbzUIwpE+B0Ez/HGMDfHmWgf40/Wr0FQwwuIYYCtHnZWFI/KtjYVgaKTX7z/Sb+/SFyReDG/Kir8y9kJA6yu8+/UAFw5Cb8skNmQ5sm2BIBxTrNUZc0uFGeKJjy2o0WLUhUqHAxyqNLMsLgECWabwRDDmGePvvv38tFCEAMoUT0Yjwy1/+sp5mWv844x+VHoGIQGN+7Wtfu5dgSFlj9kpcGjlGhQiYOkcBj8kw54WpNUJOQqbz8buPYFjOvYcP5xhzZtIoVx8qBUNYb3LG1EmuCecK39KSkBW5YgGFIJtVtbDowywYURDhk+m5CayKyugMgfS4udZWMCQfRuxy0yOYIK6mcY+pesoA71iZcizCKQ0v5SQONzQNT0ZhsPyjzFgYIo5St9hXClw0Vow8JQ5Wb0wlJpAW+xO6fBgi3iHwwJTGLYEGDJGO86AcXL9JodngZbXuUjCkMYvwSVpp6PheCjZl3WkueoLYHBNveGIhlvpLQ86Uax6shDbhqt5R/BvDkxckXpQIWE5msRd+M9UabuTN/UP5EdLKcxpyPwytI4iueTA3hd5ycKP0wUpZeZkk8EKZqRn1hp7/pgmGeXiTXEYcm0mnjUGAp+0cEydp5mHJw5o6EatD9nMtmD5AO8b1YVoBD2eEcgRn7iNeCGgzc28l3a5PBcMuMm6XgAQkIAEJSGBWBJZTMMxAagZuJ5V51oLhmHe+MXE4pz/+8Y/14DHfGVTPonfxdY0rqq4FL+Izkf4DmsGYOOTbDPTV6SviO5JA35i+IQv3xS/3mLzGxCF/+mgxXKGPiIiHBkK/jtA2Y47tCM3pA9LXRjAcGieWcqQ3NGBQtdlmm9V96xhHZCp5qTcMTZfjo12QDhpN+ltJC+tUtAn6eeQdI5bs55O4sI1vxa7+c9lvbPaFkx79TdxP4SYqRhmlqNu3H1jqVRjgNC1rk985+XO9M0ZKpq8PP+UMS8FwyqH1bqz1YjGGCIJlE+IdUy7LUKaLxRSWUYRSMGwKSwiTWHkRyIOFBcpOKxefi0ljhuVhW0UrxbBSVKsTPeMfft9QuQlYHiFylmbdVL7mcuh08lHHqbR9BMN0/ss86gyLfxEoyxuYDn6sC0sT6CJaLahm8QQqeRoCjuFaxBItcSg7Iw2cK5aHNL4lIxrACH+JM21KMiJkzPsTJ5Zv5epH2VfyjaNTLAJ54CAscR6xlkwcLNXib456wUhJBC7i0FghPJYhD7Jyyjz7uwTDrKyEKIlolEadOFhwMiqCENqsD+wvQ1s9ZH8EQxrPAw44oIxS+3bEepD7pLxukxrJMEawRTQvRWgSJy0s52DXx8pwKE+am/gxbTq6zcmV91esDMfeD0PrCGJfLP2on4in8a1B+bjG1HfuYV4IZxWGCIZdTfarX/3qur2jbnOPlC+CfePkfMrBFdKL1SdT9THpj7Uh9yoDDNxfvDBSfxIYNUcoD89sb/tUMGyj4jYJSEACEpCABGZJYDkFQ/pHDCgz84Tvk8JyCoZ93/nW5j2x7dwiruEW7OCDD247pJ4tx6w7ZpgxY2VMnCTMeaIDMG2WTwL9MawC6beW7+/sG5PXmDjk1RZKYalLMMQIAWMUAqIZ/bQIgH3jZAZUWxkmbUMfiV/ycrp7dIJSb5iUTtu+0ugCl1hPfvKTz3ZYptpnB0LrLrvsUl9HZkJyLQj0S2JQ0tV/ntQXJg00AAwbSIt+XWZTlf2xvv3A8roqGEK3I5TCXsch9WZECqbN0dlsU1+56Pi7Q5FHbGGUIOp8m2DYdMpPJqSBeJUOLnkikjBdDsEoowwpZ1tFK8UwBB/EgTKUYlhU69IKr6uyIIjik7CPYJiHDgo604LbAg8ljitv4KjzTBWkYWkL+HOLWJfFSyISYpnXFJHa0igZjREMm34LyIOGAQvIpt8P9jEtGcGVEMEQ3xcsAIFFVawP6wOKf9nOwwQz5ghcXVMAwq9pMdglGOYaJEvSxcoMCy3Mt1nApU9oq4fEi2AYXx/NtNrKNamRzPF77rnn0lTqZppYlPGg4H5BCJoUhvIsy1ZaDTfzYMoy91YW8Bh7P4ypI7xk8BJF4N7iAYa1LvfU2Idw8/yav8sHVJtlJ8Ib09EJXS+C1HEewLEwHBOnLBejtbgKQKzHapmRONwixD8qbJimgIuCjBLi/5LBH9o5zmNSPSvzUjAsafhdAhKQgAQkIIHlILBcgiHuYuLHGSulfO86h1kLhmPe+cbE6ToftqdPMEkwjFVX+llj4rAYJroAs8EwfiDQZ8GikPS7+l5j8hoTpy5Qy7+y71zODCsPjZUq2+gHoZfwDk7oG6ecFVlH7PkPQyv6yvRjEXyj1cxCMIxRA0VhNmQ5+zHFw/AgrtPoj9PHL0NpzBBRvqv/XPY3o9WUaWW2G/dhFnLM/qH9QAXDkJvyWQqGiGIsWlEGLPwQ8ZrWXDkGf2GozUwf7QptgmHXVE8qBiMXTLFtBjr+VA4+CW0Vrbyhp4lhqYQ0WljepLNeJ974F1PcPoJhLNew6MMHW1vgZsZSshQM0xB3sSEdVn6NXw38UGA2zmhMLJPa8mpuG8KobdGTNq4RDJvWfeTdJhgyCpIRpWb5mr95iCBipOFnlALHss0wVDDE+hJxLVadZXpcFyxcWegmU/bL/eX3tnrI/giGCFj4w2iGCICl2XRXI0lZ8xDF8o5Rm7aQ6d9lvWo7jm1DeZYPQqa5djmmzUMDsQ6Be+z9MKaO4GMR4Y2HUTMwAMF9zkJLpeVy87ihv6cJhliXRtRn6i+ryDdDfH9QRu65MXGaaTZ/l/dh3BPEirP06ZoR0r5tioJhk7S/JSABCUhAAhKYNYHlEgxxS8TMnAhh08o9a8FwzDvfmDiTzit9qEkGB7hFwj0SxgnMihsTJz67KQvvxohcmca7COXrKkNpcIQxUKbVlseXohgz++hrxF1b3zg5vkx32vdSFMQYoJwxV+5j5e8xIauEt82YS3roR1kzIDM4sy+fzPBkJlqMi7r6z119YdIhTozLolEkfT6H9gMVDEt6E76XgmGbD8MJUc/ic5DOJRZaVCqsyZhDzxRBrAXbBMOIQF3p07llCiE3H1Nwy+lymTbcVtHGiGFYgDHVepLIkodDH8Ew064nCX+YdGPxU+aZeJNuyLIxwtIPP3KIWn079/CexggfCPGDmJtxWpyhgmFMlxFIysUx2uoDFqZMC+4rcCEIxZcf6bUJc2U+iDgI1EzPhG8WzeCYTNuMr8AyXr631UP29RUMEZWzKnZXI4llWkaL2qbapywRniaJ3zl2KM9yann8mCat8jMvFHnxSr0eej+MqSMpB+bzaT9wQszgQAIvK4xOzSpMEwxLVwtdQmumCTMYwgJHY+JMOh98XOIGoXxQczxtB1PGy+kLpThNu4sV4qSgYDiJjvskIAEJSEACEpgFgeUSDOPTvMsKrFn29Am7Zjw1jy9/5922fO8a8843Jk5ZjuZ3/MLj07rslzaPwVUU/XreoXmXHhMH9zjMYCFgAIPPeQwNJvWzOHZMXmPikFdbYN2DuM/qMtwo1xrAkhLBcGicHN9Whq5tsS5kP3pAGejjRkPJPmYx0v/uE+jvbbnllvWhiNRoFG0h/Qn2dc2mwiCMGahYQdI36+o/d/WFSTvrXUyyhOW4vv1ABUNo9QhrIxhGKWY0Aku3piUWv0m/TTBsm+7GlFo630yrLX1ncZNiEZOpuGmg2yraNGGrHCGIGFauNMW8+IhlJb6IIH0Ew0xfLhfsKNPie8xpy4YZ6yesHbmJEUzbrKDKJds5f8qO8Eg46aSTlqYX1hvO/EfjzAgD0zIRRktG3CiIS2UoH0JhVMaZhYUhJsRMgZzkXJfz4drjbBb/BEMFrpxTl2DItAMaNaYeRIwjDucMp5hWt01tT9p8ttVDts9SMCS93G8ZmWFbM+RFZBLXxBnKs2wr2vxYki4smOLKsfFzMfZ+GFpHuF8YPeOzaS7PSyCiXITDcsGj8Bj7OU0wLAX4rodtVuaLyDomzqTy80LGIjm0SQi/8Q2DpSriemnlSjp5KewaJSzzUjAsafhdAhKQgAQkIIHlILAcgmEGw9tcZXWdw6wFwzHvfGPidJ0P20v3XPSV8eFeBhZLieCUd8YxcUiTgXEGqxF/CPR7EZx4Ty/7//XOM/+NyWtMnDLP5vf069qm3HIsrrSY6VUabYyJ08x32u/Mgpx2XPZjhUh97xPSh+NYXKGl/9CMG5dHk47LTMr4we/qP08SDNE8MLpqLrxJvqQ3tB+oYNi8kh2/SxFgiIVheZERseLMMtmkAeZ3X8EwglmU56SVz1jhpFEvy4BVGCbN04StNsGQTnFEszYhs5wG3EcwjPUg5W5beh2BAR8FjNKUgmG5cm7JLOfPKAEdfgS7LC5STjPcZ5996gY3x/OJQIJPOUIs00pfHW3TZeMkljjLJRhmhIA88HUZH4f8Jhx77LFLU17DcKjA9f8ptVsY0qCwgAcBgRTRuwxl/ky3jyhbHpPvbfWQfXlItDFmf4TMPhaGHJ8HEd+p53Gky28CDwAsewmIsfgQnBTG8IywxT3IqFNzRDDTOsiXFwIs5sbeD0PrCEIhQimBe6zpRqEc+ePeY9GZWYRpgiF5xK8j9YwVxkuBmmkK2267bV0UXm4yMDImTtv5YGlJm0poWobizoB2oxzpLutzH7+oCoZt1N0mAQlIQAISkMAsCSyHYJhptekj9SnvrAVD8hzzzjcmTtf5TesLZxCf+IiV9HHGxCnzx4ch/vHwKU+gT8wUZXyfNwXLMXmNiVOWr/l9t912q11iUU4WNcninByHoMr1QFcpDTvGxGnmO+03s4iwqGsLvPcj1FFmFlQlIPw2+29tcdkWg6kubSbxyD/9KqwI0WzKUApzWZug7G9ExyFOl2CY7ZwL+TWtMePujTT69gPLcnWtY0F65+Qw81WShwiGgIsVCstZI3BlwY2mNQ8rKWGGSsj0xDZhjqWxsR4kYO5LnFRoBBL8BSCA5UZsq2hjBEPy22OPPeolw/nOFGUqOoIDnXsWXqEBIPQRDJnSx3RsrPqo1AhOlB1Bg0U/EATwAUFgf3wKEA8rQKYNsh21HjNtAvHwD8B0bEJpNs+UXhY4ILDiLubCCBKItjzUYilXLlYS608EDPLZ5AynrOSPTwpYJCyXYEhemKLjLw0RFFENZgTKieUTU4ThQCNM3RojcJFeRC582/HAIy38AcYnA+XAgjWiMWVDRMEqi9BmhVnvOPNfWz1k1xjBsGxwqTdMxaZB5HqmoSRtBDvuuYwQYQWJdSH3B8x4mE9z2jyGZyni4XuENgNrPiw1uYZMOyaUDxZ4jr0fhtQR7tE8rGgjuIaZTsuDg4VHYMpIJvWaAMNcZx5g+PIbGvoIhvGTStqIufjNhNvhhx9etzWI5lw32q9YR46J0yw7wujmm29eD060vQxHRI3VNvG57/A7mYGZZprN3wqGTSL+loAEJCABCUhg1gRmLRhiMbXhhhvWfbyuWVpt57AcguGYd74xcegnZBCZ93h8xCeUvsOx4Npxxx3rXfQ9836MIQAGAQlj4iRuPk844YS6P4HVYfrbvMez+GHpL31MXmPiYO1If4HAucZAg7437n0I8KCfEdGWAXiYEXBvtfXWW9ffx8SZdI3qRAf8m+TDsE8+0QuYecfU50kh1n/0Z9Aj6MMSEG7RUuLyK/daV/+5qy/MzDUsHvljunkzlPH69gMVDJsUO35zY2Yq8VDBMKo5SVM5EGaofHRSCSjYsaLD2okbcJJgiAUdjRg3F4E0t9hii7rxoMInYPGHv4m2ijZWMESUomFErGsLdKixlJq0gnEZj5uCRioNH+fCQykrR+dYtkcwZBsiHzdYyZDVobOSFMc0/T8isFGuxCFNrOewRExACMRJbULpQ4JtiAPcNJSXh0EEzeUSDMkTsQTLqjCiviDwUIcSYB7RdIzARTqpc0kzDzuE1KTNPuoa3I444oilMk3yu5f02uoh+8YIhghvlCFMSCe+QviONSKm3AmIX4wsldc6C1rkmK7PsTyz6nHShRt1MGVGeMWqDQvKhLH3w9A6Eou55IvgiDichxTbGcnEuTWBxVFiHY11JFOCh4Y+giGWerRZ5X2c9jH5ZYpHfo+Jk7h8UpcYzEHk5ZN7iUGLMnDNcB7MJxasvJhR36hPTFXGH+i0oGA4jZD7JSABCUhAAhJYWwKzFgx578JAZNIilW1lXg7BcMw735g4LJjJQDKBgXQYJGA1hxFHAv1JQt7v+Y6VWvl+PyYO6bQF3qcpD8JQ+oKxRuP4MXmNicNClQhehOb5NvtA9HnKPlhcC9WRz/w3NM6ka1Sm2+f7JMFwWj4IuWgXhC6/jWUZcPWFoJq+Dv2cpoZR9nW6+s9tfWEMfuinwbptZmLKMbQfqGAYclM+y84u1ibNVZInRScuptzEKwNWa1xUhL0s5hBlOjdh15RJlutm4Qas/JoBB5fsQwQglBUtfuamCYalD4aYVCcfGl6ENCwdI1ByLlgDIepxw09zspm0+MQfIgr3IYccUm6uBR+EKiwBm4IhByL8MapCR79spBH1GAmi898M3KQwxgy4DDRksEQ0K8UCnLFy85UjS8RjtInrmYdEGE3jSgNJvFznsgxwyDLxWErSgCRw4yNkxgoy27HkZOQM0TVhmsCVKbswZ4plAg0iAlFEI+oPQhQBH5AIJE2hmDIiiDNNPlauSa/52VYPOSajTl1TkmP5WE5DJR6CH9aQeVhSBuplAqIy912E9WzHihU/FrE6zPauz7E8SY9z4oWpFMCpM9RpRqBiJVfmPfZ+GFJHsGakvuOvLwJ6yoCwiTXhTjvtlE1zEwzJ8OSTT65HxZorOCP6Yv2X6dRLhRsZJ/FJk3pCXeYeiPVl9ucTK18GRMoXHtpnHFqXbUaOb34qGDaJ+FsCEpCABCQggVkTmLVgiLjBuzSzyXD11Dcsh2BI3vN4T5wmEuGSiX4Zs03KgLEF76+lWJj9Y+IkbtsnU0Ppn2HBh//20rJtTF5D40SroGxNwZA+H31MFogp++j0gXh35r272W8cGqe8RkPF7CZPxDv6m216Q5lPUzwmncMOO2zJZReLg8a6splH+RtrQjhglVr2KzBswoKUhUwTuvrP7G/2hem70V+mr4srp64wtB+oYNhFchm24xMPQYkpvFhVrb/++ku5MI0SIY6FRLD06RuIQ1yEMNJlmfFpUyz7pt3nOMRQQpx70njSQPCJtd6QwA2BpR7nj3DWlwMKOyILDxAa6JRlUt7cKFwLzOyZMt4lEiQNzpOGlDIhNDUbuRy33J88HLBSQ5jF9JsRiXkGhGT8W1IOFu1AaO17neZZzjIvFu7ItcMH5Lq4dixfTxmoZ9z7fQSmsffDkDrCsYyMYZ5O4F5o+kMpWc7zO+fPPcp9jVjYJq42yzMmDqOBtM3bbLPN0iBLM93yNw9N2g3aqLiXKPd3fVcw7CLjdglIQAISkIAEZkVgjGDYlTf9JdzS4PKH6ZJ93l+70hqyPYsTlr6jm/HHvPONidPMt/zNrD/6ZfSF6Jc1/YKXx+b7mDiJ2/VJP7Wt/zsmrzFxusrFdnQKRDH6jFji9alDfeNQP0kTQweMcc6Jgb4O/XpmL7F46dqEo446qrZcxDq2j2FM336gguHaXJVVFpcpnYgdNCSl38BgKE1ym1N7c4yfEpCABFYjAQXD1XjVPWcJSEACEpDAfAnMUjCcb8nX5NZHMFxztN9WIwHELnz04c6NWUDx8b8aWSz3OSsYLjfhFZZ+/NxhsovvrkyFZaoqU5GZHsrUPj67lnxfYUg8HQlIQAJTCSgYTkXkARKQgAQkIAEJrCWBlSQYshowfU8s+Lbaaqu1JGP0lUSAmVtY5eFia9999+1lubiSzn8e54Keg0EYU6gR8QkItVgcr7Qwk1WSVxqUsefD1F9WBo3fuGY6CIkHHHDAkpDY3O9vCUhAAquRgILharzqnrMEJCABCUhgvgRWkmAYcm2+5bLPz9VJgBmPxxxzTIU/f8PyEGguREMuCobLw3rFpYq/L1ZFYgEOfA4ScBS62Wab1b4L5+1Xb8UB9oQkIIEVR0DBcMVdUk9IAhKQgAQksHAEVoJgyKKgRx555BJb/POxUJ9BAhKYH4H9999/aTFUcuU+ZCHYPn4o51fK2eSkheFsOJqKBCQgAQmMJKBgOBKc0SQgAQlIQAIS6E1gJQiGvU/WAyUgAQnMgICC4QwgmoQEJCABCYwjwKp8ux+487jIxpKABCQgAQlIQAI9CSgY9gTlYRKQgATOJKBgaFWQgAQkIIF1RgB/H3sdumd1/CnHrbMymLEEJCABCUhAAiubwEYbbFw9e7u9V/ZJenYSkIAEZkxAwXDGQE1OAhKQgASGEfj80YdUb//aflV1+rB4Hi0BCUhAAhKQgASmElivqh5y812r2266/dRDPUACEpCABNYQUDBcw8JvEpCABCSwjgh85siDqkOOPqj63V9+U51+2v/WUSnMVgISkIAEJCCBlUJgvXOfq9rwoleott90h+p2191hpZyW5yEBCUhgbgQUDOeG2owkIAEJSEACEpCABCQgAQlIQAISkIAEJLD4BBQMF/8aWUIJSEACEpCABCQgAQlIQAISkIAEJCABCcyNgILh3FCbkQQkIAEJSEACEpCABCQgAQlIQAISkIAEFp+AguHiXyNLKAEJSEACEpCABCQgAQlIQAISkIAEJCCBuRFQMJwbajOSgAQkIAEJSEACEpCABCQgAQlIQAISkMDiE1AwXPxrZAklIAEJSEACEpCABCQgAQlIQAISkIAEJDA3AgqGc0NtRhKQgAQkIAEJSEACEpCABCQgAQlIQAISWHwCCoaLf40soQQkIAEJSEACEpCABCQgAQlIQAISkIAE5kZAwXBuqM1IAhKQgAQkIAEJSEACEpCABCQgAQlIQAKLT0DBcPGvkSWUgAQkIAEJSEACEpCABCQgAQlIQAISkMDcCCgYzg21GUlAAhKQgAQkIAEJSEACEpCABCQgAQlIYPEJKBgu/jWyhBKQgAQkIAEJSEACEpCABCQgAQlIQAISmBsBBcO5oTYjCUhAAhKQgAQkIAEJSEACEpCABCQgAQksPgEFw8W/RpZQAhKQgAQkIAEJSEACEpCABCQgAQlIQAJzI6BgODfUZiQBCUhAAhKQgAQkIAEJSEACEpCABCQggcUnoGC4+NfIEkpAAhKQgAQkIAEJSEACEpCABCQgAQlIYG4EFAznhtqMJCABCUhAAhKQgAQkIAEJSEACEpCABCSw+AQUDBf/GllCCUhAAhKQgAQkIAEJSEACEpCABCQgAQnMjYCC4dxQm5EEJCABCUhAAhKQgAQkIAEJSEACEpCABBafgILh4l8jSygBCUhAAhKQgAQkIAEJSEACEpCABCQggbkRUDCcG2ozkoAEJCABCUhAAhKQgAQkIAEJSEACEpDA4hNQMFz8a2QJJSABCUhAAhKQgAQkIAEJSEACEpCABCQwNwIKhnNDbUYSkIAEJCABCUhAAhKQgAQkIAEJSEACElh8AgqGi3+NLKEEJCABCUhAAhKQgAQkIAEJSEACEpCABOZGQMFwbqjNSAISkIAEJCABCUhAAhKQgAQkIAEJSEACi09AwXDxr5EllIAEJCABCUhAAhKQgAQkIAEJSEACEpDA3AgoGM4NtRlJQAISkIAEJCABCUhAAhKQgAQkIAEJSGDxCSgYLv41soQSkIAEJCABCUhAAhKQgAQkIAEJSEACEpgbAQXDuaE2IwlIQAISkIAEJCABCUhAAhKQgAQkIAEJLD4BBcPFv0aWUAISkIAEJCABCUhAAhKQgAQkIAEJSEACcyOgYDg31GYkAQlIQAISkIAEJCABCUhAAhKQgAQkIIHFJ6BguPjXyBJKQAISkIAEJCABCUhAAhKQgAQkIAEJSGBuBBQM54bajCQgAQlIQAISkIAEJCABCUhAAhKQgAQksPgE/g8AAP//mVdIaQAAOBtJREFU7Z0HmCxF1YYbryKKYg6omHMCcxazCOacA6KYA2ZRMaNgwpwwiznngFnMijnnHFBQMSP/ffv3uxyKntnZuXfnbu++53l2e6anq7r6rerqqq9OVW9z7EbrNAlIQAISkIAEJCABCUhAAhKQgAQkIAEJSEACGwlso2BoOZCABCQgAQlIQAISkIAEJCABCUhAAhKQgARCQMEwJNxKQAISkIAEJCABCUhAAhKQgAQkIAEJSEACehhaBiQgAQlIQAISkIAEJCABCUhAAhKQgAQkIIHjCOhheBwLP0lAAhKQgAQkIAEJSEACEpCABCQgAQlIYN0TUDBc90VAABKQgAQkIAEJSEACEpCABCQgAQlIQAISOI6AguFxLPwkAQlIQAISkIAEJCABCUhAAhKQgAQkIIF1T0DBcN0XAQFIQAISkIAEJCABCUhAAhKQgAQkIAEJSOA4AgqGx7HwkwQkIAEJSEACEpCABCQgAQlIQAISkIAE1j0BBcN1XwQEIAEJSGD1EvjQt47u3vbVf3Y/+eOxqzeRpkwCEphK4LW3PdHU3/1RAhKQwKIJbNiwodtuu+26bbfddtGn9nwSkIAERkNAwXA0WWVCJSABCawvAoiFz/zoP9bXRXu1EliDBBQM12CmekkSWCMEtt9+e0XDNZKXXoYEJLDlCSgYbnmmxigBCUhAAptJ4Jhjjunu+8aj9CzcTI4Gl8BqIKBguBpywTRIQAJDBPA03GGHHYZ+cp8EJCCBdU9AwXDdFwEBSEACElh9BP7zn/90N3zRUasvYaZIAhJYNgEFw2UjM4AEJLBAAqc5zWkWeDZPJQEJSGA8BBQMx5NXplQCEpDAuiKw+/OOWFfX68VKYK0SUDBcqznrdUlgbRBQMFwb+ehVSEACW56AguGWZ2qMEpCABCSwBQgoGG4BiEYhgVVAQMFwFWSCSZCABCYSUDCciMYfJCCBdU5AwXCdFwAvXwISkMBqJaBguFpzxnRJYHkEFAyXx8ujJSCBxRJQMFwsb88mAQmMh4CC4XjyypRKQAISWFcEFAzXVXZ7sWuYgILhGs5cL00Ca4CAguEayEQvQQISWBECCoYrgtVIJbAYAn//+9+73/zmN93Zzna27iQnOcliTupZJLBMAj/4wQ+6Y489tjvrWc/anfzkJ585tILhzKg8UAKrmoCC4arOHhMngXVPYFGC4Sc/+cnul7/8Zbfrrrt2O+6441bn/pa3vKX797//vSkd5zvf+bpLXepSm777QQISmJ1A7qeb3/zm3YlPfOKZA77rXe/qjj766E3Hn/GMZ+yucY1rbPq+tT9sEcHwL3/5y7JeR3+Tm9yke+tb37q1r33h5//iF7/YXeYyl+nPS+d5tRhvI43Y9LnPfa677GUvu1qSNsp0vP/97++ud73rdac85Sm7P//5zytyDZ/+9Ke7gw46qHvTm97Ux/+gBz2oe9rTnrYi51qPkV7hClfoPvvZz3YvetGLurvf/e6jQ/DXv/6122233brf//733bvf/e6OBmC1P/3pT93+++9fd53g861udasTNBr/+c9/ds985jO7T3ziE91vf/vbvq64xz3u0e28884nCJ8dr3/967vb3OY2/f3wrW99qxe389tSWwXDpQj5uwTGQUDBcBz5ZColsF4JzCoYfvWrX+1e97rXdYcffnj361//urvoRS/aXf7yl+/biic96Umn4mOQ/0xnOlNHvxnR8CxnOUt//Ec+8pGOvsMXvvCFvt9wkYtcpBcLbn/7208UHehTko6vf/3rfXvs4he/eN8m22uvvbqTnexkU9NRf9xhhx369GTfLW5xi+6Nb3xjvvb99Q996EP99bJzl1126a55zWt2CCKTjD7+csNMiutLX/pS94Y3vKFj+4c//KG7wAUu0PO+293u1rcrh8LNE2Yonln2UQboK1AefvzjH3fnOte5ekb0HZK/bTzzhGnjyHfa5S996Uv7PsvXvva17rSnPW13wQtesLvTne7Uc8pxdfvf//63z9d3vvOd3Te/+c1uw4YNHeXnWte6Vkfbf5tttqmHz/15nrRNOtmXv/zljv7ENHvEIx7RTbuP4XTggQd2j3rUo7o73OEO06Ka67foPOc///l7ruQF9/os2spOO+3U/eIXv9h03mtf+9rdBz/4wU3ft/aHrSIYXve61+0rxnkuHoHklre8Zd/p/PnPfz5PFCsehsrse9/7XvfqV7+6o7KP0clmRAlbrYIhIsnlLne5JNntHATe9773dbvvvvuKCYZU9Be60IX6MkbyGIXYe++9u8c//vFzpNYgQwQYXeXh9IIXvKBDEBub3fOe9+xe+MIX9sn+xje+0dH4rFbrorq/fj7kkEN6oS/7KHc8wGjYIoYzMk49h/GQHBqR/uEPf9id97zn7Y9h1O2mN71p/3nWfwqGs5LyOAmsbgIKhqs7f0ydBNY7gWlCQ9jst99+E9vaCC60c9LmSZi6Rfi69a1v3cVx5l//+lf3kIc8pHv2s59dD9v0+cIXvnB36KGHdmc+85k37ePDU57ylA5xZMgQKxAfEa5msQiGOB7QrmOA+YY3vGFH2uhvv+Md7xiM5gY3uEH35je/udt22203/T5PmE2BBz48/elP7x784AcP/NL17dDPf/7zvThWD5gnTA2/nM8IOginiEKt0U5GeGXwvto8YWr4+vmnP/1pn1cIhUN23/ve9wRlC9EaHQZP1yG72tWu1r3nPe9Z1mygoXjmSdtQPNn3xCc+sXv0ox+dr4Nbznn2s5998Ld//OMfvXMD/RbuN9hsabvXve7V9xuf85zn9H3HOGNVbeVXv/pVP9uKcyMwn/Oc5+yT8ZKXvKQfLKDvSf9rzQuGj3zkI7s73/nO/cVP+seUNKamzWNjFgwZBdp333277bbb7nijN/Nw2JJhqodhLdRb8hzrKa6VFgwPO+yw7kpXulKP9DOf+czEEaT1xHxLX+uYBUM8CmnIxYYEw+c///ndve99777s3PGOd8yhx9te5zrX6c5znvNs2kcjh0bGuc997u4rX/lK71XOw5uHOI1KGsLVmOJy1atetR/1RHRFfF2uKRgul5jHS2B1ElAwXJ35YqokIIH/J7CUYJjZEhy9xx579B6Fpz71qXtBLwP2TCFE4JtkeOYx6Pre9763n4n0vOc9r7vPfe7TH45nH15hpzjFKXoh7rnPfW6/H+EAAfBEJzpR/x2hh7YVhkiJtxReix/+8Ie7JzzhCf1+0sH3WTzFIhjSR8VbMvbkJz+577PynbTd5S536eN7+ctfvqkPy3VXEWeeMDlfu423FvsRsfbZZ5/e2xKvuAyII45++9vf3sRmnjDteWf9fsQRR/SiLGIh4iCeaxe72MU62tyInNn/ox/9qDv96U/fRztPmGnpQdhlKit2wAEH9G16PB1xWMJbFUM3qd6gCGUpWzgX3Pa2t+2XtuK4eJYiRpOXm2PzpG3a+ehnkEbukUmOTehPQ961CKrcZxFJV0IwJL+5lzA8YU91qlNtcpJgZlb6U1UwpGy0wn7qmTUvGK70FL4xC4bTboSt+ZuC4Zalv9KC4UrHv2VpjDO2sQqGTBNmdLiOdg4JhvFApIHB6PYsxujzM57xjL5RlBFfvLwZzRuafh8xkQYdAuNy1i5MehQMQ8KtBMZNQMFw3Pln6iWw1glMEwyZYYE4SNuKgVMGZuv6ZOmbwujjH//4JkGvMvv+97/f0R5iVhDTkfHGw3OQOBFtEHkiChKuehHiFZWlZa5//ev3HmCsXc500ogUhGFgFi8nbFaHgiHBkKmkOLdgiIWIGEkbM+Twkoy4hMcax84Tpj/BhH/MSHnb297WL+WFo0TlHVGFoHCNV+c8YSacfsnd1ZMRr7BLXOISm8IgUGWpnqc+9andQx/60P63ecJsirT5wNR4podjiIaUixh5cclLXrJjGSBENETeGOLy7373u94Drh3IT9+HsrU5szjnTVvSOLRFcENgm7Vcs/QSs/0Iw/VWWwnB8BWveEUvqjPV+VWvelU93fE+Kxj+T1WdVTCkMDNKgDGawppvrX3sYx/r1+BiTQjcsul0Umk861nP6juoBx98cB+EkYcznOEMHeu6kRGsE4iLJ4WdiptRE27Y2He/+91+dIffWWCSOeaE4SGw/fbb57DjbSn8VAikgYoRRRj1vHpK4rbNAwAVm8JJpZ20saXwMuKDUQFjlQP7WNfgU5/6VH9DHHPMMX1lcLOb3ex4D4Q+4P/+Md+dufG4ZcPpile8Yj/t+Tvf+U7HiwZ4OKXSquHq56UEw3AlDJ5tdU2GpJf1IhjJYtSLtTzqg5c84+GIYMCo3JBRuZIvhIMTxhoMjNZUY/QIF15c4G90oxvVnzrWbUNMwzhPFSh4MQicGHlhhIdRObhc+tKXPsEIXK53qXLEQ4o4KRfkFQ+LG9/4xn3eTZqSTD5RBih7jILg/s+x7ZTR413Y/74QBg9QWKc854GNNxgNGuIOZ8oWI0yMZpIvlEGMMk+FCwsqUhohPHTIt3ZkhvLFsdxfhOde4jv3A2WLePOQ5FjuWZhwHzFSyahWGh39yaf8wwX+b3/7W7+GBm7clBv+EMG4/8lTvNuGbDnXlPCci2uBJ2WKvKAOYOQqD82hKck0lkgX+c59Rhm98pWv3KcxDaqcgy0u8m9/+9t71uQRjUXuI3i2x9M44n5kZIo8XY6RLu4JGg4ve9nLuj333LMPPiQYsk4pZeMDH/jAzOdhHUIaaaSRch7LCDY8U34++tGPblqsl3K2VB2UuNqtgmFLxO8SGCcBBcNx5puplsB6IVD7Le0101aOhxDtbDwFW6OdQ1+GpagQ/1pjhhleW/HKQ+yLR1/r3UfYI488clNfCkGS9jT9HAZosSHRgz4o3myIkI95zGO6xz3ucf2x0/4NCYb0G3KNrRhGXFUQY61C1r6bJwxxpY/IZ/rVWQcywhbrNCJQVmN9btrSWBXL5glT413qM+UAMY0+KO1++ou0u2njt0Z+MUWd9j79emyeMJQDGGMIo+lzIUrhbUe/iOV/WsMjE882+rv02TDW1ad/gdF/jBbR79j4j7Xw40RAH7JON88xs2znSdu0eGu6+Zx7YFoY+vrx7GyPG7p3cgx9MLQE9Aa0A+oFNCWWEaB8TbL0q+gHZ/k58p9+Hx6/9Gvpc5EunDYwPD0px2gA9HOxiOF6GPY4/v8f7s0oshijNVVMogLJtEs8W1izjYVchyyd3owsIFqSMekwx/sFUYdFSOlMDxkCCB3dKogxcvKABzyge/GLXzwUZFPFz4+pdNsDEZ4Qiuq6YVnDEDGBgoghXvCyhdaonHhAsTZiNSoCKoTWKJgci9jxsIc9rB+lao+p36cJhoiy4c40RwpyhDimILLobPVmSrxVVKgu95PWF0B4wlUYZZ4KmUqY80VUTrw5ju/cyPXmpcLmhsZqhfKa17xm4uKmHM811obCUuWIm5+RPx7+rVHWmH6Ja3rKHcewdgLu1HEdb8MRhmmiEV/a3/nOIsKIx0PGgxrhG+GP8yLeUZYYXcJYG4F8pAJkQVsYt0ZlxQOkipevfe1r+8YPFRd/GSWrYbn/YDIk+vMgo+yf7nSnq0EGP2fBVypY3OGzPl49mDxuheLlXhPxIU6Tz+FTz0EZ5OFKg6gVDGmk3PWudx3MRwRA8pcBkNiTnvSkfrpIvtct9QKNgDy8+S11SC07Ncy0zyzmy/3IfcPgRcTIVjCs9zvlhHvoJz/5SV9u6v3Unut2t7tdv64G92SmPFOP5TxpXOCKTxli0IR1PDLdpo1vlu8KhrNQ8hgJrH4CCoarP49MoQTWM4HaD2g55GWG7McxhQHm1vDkeuUrXzko4FQhL/0g+nW0qzHa0G37n0HYOLHQj6ENhtMKnmPYkJDH/kzdnCRkcUy1tDuraImQwdRV2qIIVmnnJRzpjcclTjz3v//9e/FjuWGIjz5JnB8YhKf/SjsVjzmWtqEPiQNJNfoVmZaKcwxi7jxhapzTPuOswQwbhFucGBB5kl+T2rlZ+od4k7/zhKENH+eZKkbTP0PPgMPQ9OF4orYehnnPAv0TlpGrljBV5Ky/z/p53rRNih9HFJxaIo5yb/zsZz/r9RrK7ySjH4L2E+P6uG8mCYbcm2goQ31k4mjfTZF4I6CTPsoj+Vz7WpQf7pe2HCc8+UB+YAqGoVK2jJTgyYMwwM1H5x1h4aijjupHXejM0qlGmUdApCJGRc8cdG4C7IEPfGCvoEfoYSQC4BiVHSMfiC2s7cCIC0aBQHxCjMMLMaNBCHC4DMfw/orXGhU7ogDqMJUb6cPoqCMi3O9+9+vFs4igqPpU7Hgjct6lBEPSivjG9SCeIKIiBGFZILf/svFfFSJgRAeeAopQx80Q2xzBMA8M4mIUA9EpC3hm9IDfYMJNiLgKxwg9EYH/+Mc/bhKMGL1AAK4GR64X43oRa2CAIbRltKk+PPkNYYk8j7EmGxV0fUhWsZK8pswgrCEMR7xDKOZBmZGUpcpRRgk5L3mMtxWeVYwqIJ7FquhD/mdtEVhe5SpX6XlRThFHsUkVUeLDI4zro5JMucg9wIgQD9YIhpTvLFRMBYbgTWWFR2UMIYcHB6OcWQ+E3+rUhwiGCcP1MiIFK+6T6ubN9SKu05CCLw9WjLQx9XUpi2CY43gwUj/gAUhjCaOewBswojW/LfeaEJrJ8wjdMIQblTz3db2mKhhSFhHC8iChHHNv04CLtzP5ighIma2NQfbTKILbxzaWExa0xag3qD9iabjVspPfpm0z1YVwNLgY9EjDpBUMEUvz0MILmjIVSx4+/OEP38Q4vzENmTyv93DOS77QiEJApK6i7DEAxD2WdCSe5WwVDJdDy2MlsHoJKBiu3rwxZRKQQHc8x4GWRxWoahu5Hhehjn1xCsnvcWigXUTfCKNvRH+HdmEcZHI826wbzeeIg4TNgC195SGxJMvBVM8y4phkaXdWwRDnABwf4vAyFBanBISQ9JvnCUO8Q4Lh0PmyjzY47UxEmmnpy/Fs5wmD0ARvOKSfBlM0CcQg+pJY1qPsv5R/Nf8YSEd0XW4YNJFJgmE51fE+kjb6vvTHsDZ9dZ1JXlpDHxZdgz5bBvgJn6ntx4t8M78slbZJ0SOM0l+i70a/q/a16ePiSYugvlR/Ix6ekwTDlGnSgX5CX4kZkvSRo/nUKfBJb7w56SPFkasVDOnjoh8wIy79Y+oDZvChNcXbc90Ihogy5zjHOcJwcEvFGTdRKks66xiVLV5rLMKPeELnlQohHngcA2SO4zxM0awWoYd9hEXsQMhKAYqqTmd4//33r0F791AKRK186rQ6brD6RirEGVzJeXDgmYXoEst5WgFoFsEQHtwMsRRCvnNO1nCoAhziE4JD1nZgFIu1MBDTsHkFw7p2BmmgwswIE1OGyWMEF+KHZRgjqnB+hEtubMQLfkOUQfBtWZHGCJPkKeo+glDWo0BciWsvwi7iZIyKjZGdWASncIcTZYd0Eg7RuI4gpgIiPGlAcMSmlaO69gDefpSxCJqEJT0IxxhlEE9HLFNcW4GICoWKApEJbsS3lE1aw7DeS8SBcE6lnwfU1a9+9V6sQtyhbMf9mWNpjMAIVlWcroIhQiz3Z66XMFwXxrVGqOp3bPyXBW8Ryyj7S1nyj+MiNidMGlt858HN1HtsnmvKWnyEb9ea4cHAVOB4HlbBMPcE19repzQguE6MOoqpCNQzTB1Puc79wzEZvCAuGn25fw466KB+mQIE0Ty4OX6aUS/Q2ERQ5kFD4wtLnK1gWFkmXspEFUopG3huZooxx0UA5eGMUEsjE7d6xGgEfkZf4UUjg/hofLJF4IYXDR+m7GTEPOeetlUwnEbH3yQwHgIKhuPJK1MqgfVIoPYP2uuv/a62P8ixzD7Byy0D0XWJFn6P80mdocH+Scb56I8idNGXoh2Hw0bEBMLFa62NA9GCAd6hfnJ7LN+HBMOsc83MIQSnIYs3GjP3mNk3TxjipU0aRxP6SEMzkmjnIqjQ749TDO1eBK/2DdJJ6zxhCEve4aRA+z0OAnCg74AzBm3r6ujC7EC831qrIjN9W/oA9HOwWcOwRjhlK2sQImDRtxoyygb9Wtrbac/zndmc1RCzKcO8LGfI4jE69Nu8+2ZN26T4qxaSY+g/5X5jHzOsJs0GTZhpgiHaRvrL0RESDsGXvg/n4xycK0Z5yVqk8T7lt1YwjEds1REoX1XzIVzucZzUEJ1Xi22zseAcu7mJAeDQKMekeOmU12m/1QsMsQKxCaMSaacfzioYIsDgWRjLdGQqEFw/42GT3/fbb79+ejEVMx44WNYfqMJXjmeLizpvCKUSQMRJR3hewbCKVjkPIgDz4rFwi8DGPgpxW7lSMSGUYfMIhqwlF284xECEj2p4GvKAQAyg8o5nXo6hosrURoSUuDZHUKEyY5Qmlnn/sMw036wFQoWWtOSV6oiPeHHWvIqnE3GGSS1XcXPPObPNA68+WKtg2JajPIgJn/xIXNmmQqIyi2CYN6ORZoSXPDQIg7CCAEOFs9tuuyWaidtZBMMwTyRxl+Y7Dx4YtobgE+9PpqgiClfBEOGnppvwEaUY3Yn3buJNOR0SiXNM3UYwRIilEVCN6a6MwlDX4AVIw2GeayKfI64zdYJRptYijLG/CoZpVGV6dxsuQmQq+TqKV0VOwtEA4D4jLawNuGHDhja6mb+zHg71VwZcEjB50wqGESs5jjzCk5Np0dw31I1cH1Zd5PnOo4KRbdKN5WENUxo/PGxzX3PfcB8woJGpJn2gjf8ov7OUc45XMAw1txIYNwEFw3Hnn6mXwFonME0w5Nppt8SzqQp/tJ1oR+U3jsWLKDNhaE8jCtBnQmjKTC2OGzLe+kvfJGJVHdiOmEC4Sd13Bp7xLuN8CBhLWdq21cMw4h/retPXHTI805hNQn+CfsU8YYbiHdpHH6BdDx1Blb5gdX6oYZcbhr4rjkPMookQxbUhVmVKcOKn74uYh00S/6pTBX1UnC2WGybOMznvtG36AvUYroXp4un38BszNXGQqeW1hsF5hb7BUvdDDbPU51nTNimeOIfQ76BPgXCL9oBzB3pEPEC5pmnrv6d/PuRhWGcx4nDDzLHcw6SLviH3LzNH09dhP2tswqztgykYQqexKhiyFtu0zCIolW71XKHSQySIUMgx8Vjhc7VZBMNZBArcbxEGEZKY4kylg1URKsIfHexJSnwfqPmXcK1CvZSHIaJR1qZIlLxUJOJqlOi99967V7injfxEhFuuYEgFjEiJcWMiirGtRuXDzYZQQEU6ZNmf6YvkMSIto0hViOCaspBwRCrii9jCTYnAiEV0w/OJyoNyF9GO0S3KXsQajs+r4xn5oUIfMjzmsphuHvARDIfKUdhTWR166KFDUfZ5w3Fwi2DIiAT7YgjhpBX3Z8TR5QhGswiGeFMivsdy3/AdbnWdvRwDS/IUi1AawZD9rUcvxyEM85AdWpSYe4qRrSGOhG0tguGk0a32vprnmri/4wHN/RgRu6aFsoqABqcIhjS6MoqJ5x8jTa1RxkgTxoOCuiULWrMPEZdRZvKckab6IOL3eYzpIMRHWeNeiuc2cU0SDGkUUvfBgfu9tTQC2d+OYHNdlGUezgiEnJu1MeFFXDy8U+c89rGP7acJcA9Tf3JOpspgjJ7P0hhRMOxx+U8CoyegYDj6LPQCJLCmCSzVJqF9U9f4pm2LKMdMKIz2ZTzlqpgXsYSBXdpFk4z2FWvT17XC2wHWtMmJo56jxkm/i6WAJrXb67F8HhIMEWEYPJ4mGMZrMjOn5gnTpmXad5YNYhYbgiocMxNomkg0SxjapoistPcx2tO08xHV6otNa9qqY07rEJDjsu4e32mf41wUZ55Zw7TeZ4l7aEubnP4YAjZ6Cg4uWHUgYRYifS2Ow2iTo8mQNpYQou+NUa7r8k/9zs34N0vapkXPrDj6Ycwui+ia45mlxb3HNdUZcvm9bqcJhhyXlzvymXKACHi1jbPv0BEmibd5twKCYl4URHgFQyg0VgXDWd+S3ETRvxadDI9R6eYV8tnHNiLBUEUYoWfS1M4IAKjGqdRr3HwmDVQeeCRGkWe9Pl6EMKu1wkbCLSUYoly3ldOQYIggi6BD5QzvIUunP533oWOyrxbq7Mu2nfbL/pw/x0zb1vNnui4VES65iGR5sLUPpfr2MBiw/h4iMzcwYgNCFC7jEapyvYwORZjLw2xSeSDdiI8RTeKJNa0cRbSc9tr0LI5MWiMYIrzwMBqaZspxjAYyhWAWT91ZBEMeqNWLNpxrmtp8o/HBQwMLxzROEMiG3sIVwZD7Eo/cavMKhoygZlptja+9r+a5Jhp7EQmH1qHI+SK4RzDk4c6DZlbLC3l4yLHEAvd2a5QhGpARzNvfl/rOGrCIzTRE6mh3wk0SDPP7pC2NXwR5bMirdChc1g9lwAPhlBH0iK6MsuatbrlPqYPzUqqh+LJPwTAk3Epg3AQUDMedf6ZeAmudwFKCIddP25qZE3GsCBOcJGjn0K6r7WX6V/TrEDNoQw8NNhMH/Q/aRIkXkYc2dCtSMPsma52xbnzrdUdcmUVS08H+STYkGMbhoopNbfh4fbEMDTO65gnTxjnrd/rzrF9OXz4ejkuFHQpT3zpN/wh29C+X6ovVvjnOIziRtFaX0cIhgzZ5HIBmDZPj27hn+U47Oy8tTX+npqld+ok48RilnY5NE2L7Azbj31DaNiO6foYo/amlyvxSgiHlAbGdPnBrxI3DE/dp+lc4hvBSXjSpTDtPuKqt4NzhlOSNZLaEYFgXPQU2nXpe7BDRLhkwi2BYRaqEY1QCoSsvTEExprJDlEQkPPzww/sbK4IhAk+8vobWAEi8Q9tW2MgxmysY5mGTteGmCWG5KYZYJD3Z1kLNPioURJuMtrQVG4o7+cDNwyKj04zRgEw/ZNSDaaVYXOwz9TjTTGtcCCmIIYiCjIhwTRk9yPqDeBUyMpR19aroGuFvaIprzlMrT0ZTGP1JuCF2+W0a+1S4Q+IcD3jKIGWbc1OJxCaV+fye7SyCIaM5iKyxTA8eSlOOqe778Y5djYJhBPx5rolpBVkzldFJKvohQ1jk9wiGdfoza1fE23AoLPvwkE7jk8EHXoyCyE/ek38xxHNEa8r3cq0uV0A8rWUEkTxHbOc+iTdte2z9zj0XETP3af29/VzXRMxAT9b74dztGo2I49XLuI2vflcwrDT8LIHxElAwHG/emXIJrAcCabPNcq14OyHCMMjOWt70QbLOdRXZWMKF6cWtU0Q9B55dWZeONhOzbBAl0wetx9b1slvHgByX9wDQz2XQeikbEgwzy2tanyEOA3hQ4qk2T5hJacNxhzYu52+XJksYONHW5hj6PHgTLjcM/Z4sJ0a8iEUMgGcmUs7VblneLMtxTVqmKP1UwnIeRKblhsnx7fn5Tj+ceOkb03dtrTo+xQkka6tPm/mVfjnLLOGYMY/Nk7Z5zpMw9Fkp9xhelJOm/UcbGZqSnLjYolfgMUgfiHUE6RfFKOuUeSw8uffRDKpVbUXB8H9kNlcwjCcS0XHz45KNDb3yexbBsK5710e08V+8vvjOCA0iUjVeHIIQEMGQ3yIaTHrLKxVa1m3jpkqnf6UFw7yJCsGOCjJKd64Hrza8e7Ah0SvHZVsLNQ8uHm7VxRflHPGEShnDFZw166ZNiWbKNxUq6yvWGzcPMoQ+pjYj2BAvD986TZ3z5M3CjB7BlJfOIBJRmXPdiL2kDUERsa2deowowRqMkzhxjvraezjwgI4oOFSOSAMVA+WEkYWWPXFm+gHXFQ9DHmZ48CHixYuPYxGBKfNwx4ampfc/lH+zCIaMktSHYC3/k7zGqiiWNTlWs2A4zzXxkMy9MSRSg7nWZxEM69oWYVOypP/Ig5t8puFIw5N4uAfIhwjaHMgxrBfJmpzYpCnY/Y9T/iFCtksYTDm890bmXqJ8MyBC/RrPvxquiugRAOvv9TPT1KknuVZeapQHNssLMJWi9QRnnRtG5zIiXeMa+qxgOETFfRIYHwEFw/HlmSmWwHoisJRgiNMHxsDz0LEs0YIwQJueNh6WWRW8iJK2V2s4ZGS9fURFRKZpXmW03yNmDfVliT9910wVbs/Zfh8SDOlX4RiB4U3XDpKzDncGquNgME+YNi35XkXU9M3yW7ZpT/Kd9jcecRFeZw2DIEd/laV2aItnoJ01KeM1mvO1W/r8OKlMWg+d8OgZtR08T5j2vPkeYa++mTe/ZRtRN+8jwOMQ774qaufYbDOTjrY6x85j86Rt0nlYloy+N31nyvw0r1rKJHrCJJsmGFJm6LPTr2/vb2aZ0b+JcEjfDkE2a+tnebR6XuKL/qFg+D8ytYO93CnJEX6IKtNfs3Aq+9rO+byCIW87QglGxKkeL5wDi0hUBUM613SCCUOFWQUYwmS9Bn7HoybekCstGPLgyfTPIZdhKqisIbhcwbAW6ghTXCt5griGZcSMz0OCAkJqpsO2r3OPlyXMqGQZkaoPV+KMZbowxyIG4qFVX5iSipCH2iGHHNKv15DrJo7q+US5CbPEj7cf+U2Fj+CYxkDKwpBgGO9B4mivjX0ISwglPHRIN5VPFXDzEhiOjVVerDsRd/D83m6TL4k/v8Mm3nOtYFjfypT7LOGyxVsUjtiRRx7ZC2urWTCc95rSmGIqOmuMtKJvPBfhEMGQz3nTNfUCDZXWaJzR2CNe3o6Wac2U8fqGdcIh2O244459OZlU/tv4h77DYJJleQO8WamTEKtZNzH1E2/zxuuwtTQo2M9DMfVaexwPQ9bhZApz63GLKMrbxiijtb494IAD+kGMoZfktPHzXcFwiIr7JDA+AgqG48szUyyB9USgFQnaa0/bqW3vcBxTivNiDNp/tAPTRqUdhMdS6y1G+4p1rulHMXuKNdUjMLTnrt8z+wrRh35LdUKoAiQC3iyzSoYEw7qm+VBfKI4jpCtOCPOEqddVP2eqJ/uGltxhf2bbxVlknjDEE2MmIrPrENfIE4ypxjgNIea2fYUsxUP+4m1KnzRW35qNg0xm7M0TJnG22/TZcBygzLUeqelDEy5aQXWSoZ+aWX+Jm+WU6JtgcdDJb8vZzpO2SfEjAEawHhLeEYtZK50848WMlJdJNk0wrE4o9OVbJ6bqMQo7nCtYHiAzH9tzKhi2RDZ+r4IhXisZlRg4tN/FzYXnGZUlFR4uxHiBIXjwMgYEFjr1CDko86xnl/UEqmDDFFJGYlIJTxN6akZTwSIQYQgreIQdeOCB/feqTtd19Kg0qMx5qQAdYEY/sq5hFdOIJIIElQyVKt5F3MgRyzgmi9XWhVPrdFqOweo6CZmSjKrNzRGvPzylcD1HAKOyw3sntjmCIXGwlgMeoFgW9ETs4GHI2mQIbpwznkrkCYIDIgX5TKVZvau47rz8pI9047+6xln2seVYKomM+BAfQkQqxZo2js8aDXzGSCcPkrios+ZhRp+ogAif6aF1Adpp5Yg4uVa88UgPAh9lgwcJcfJwRkDB+J2yjGW0BW68kIRyjREfZS9vosZjK7/1Bwz8m0cwJBrKAoINxn2KpypiEJUt5RTPSSwjUXxezYIh6ZvnmqqQTF2Fqz55xX2FsMyit7EqGFahHO9XPFgp2zQyqJfSMEOwZ9pt3ppM3DyImJ4fq158tVHHCDXlnQc24vLmWBo3WZszcVVmuOVTf1EOeLghILKWJlbLQcLWLWWIhiTllRep1CnwHBdBP16z3M+77rprPy17FmGcOBQMoaBJYPwEFAzHn4degQTWMoGlBMN4jMGAfg/9CUQFHF9Y4oo+XHU6yRRlBowZOG6NdhD9DYy+YISa9ji+VyGRthntT4zBV5wASDuiEV5QCCe0O4lzqWsijiHBkP11vXqEmj322IPdfb8JkQSj71mnPc8ThjYnLDDi4qUWtEcZpKevRb+c39OGRnOgzxIvThyC6M/ME6Y/afOPfhnTUOmb0ZfFmJ1EP40ludKnrWubw4PjI+aSHphhtPd5MQY2Txi82hicxxCo0lerfRn6/Zw/L1PkPPRJKJOUBURd2uhMY+daMHQQ+vfoLxj9d3QNwmIIsIjk2FAe9T9M+DdP2qadJw4bXAt96HjloglQ/uPsMuSUU5M4TTCsy6Yh8sIzLwhFQMTrlHIIL/SYlHX65Fl6rZ5rkmBYz0N8hEXHivBP3UI/tL7Etca7tT5vs7ETd+zmnrwKhrPEFQ+cTDklTL2h+M5NmrfNUIBZtwyrXlT9jo3/EGjoiE4TesjcungsghuZieiGUQDiakqFj6cdFS2LaGauOsfxW0YeEo4Oce0sJx38jqVC3VKCIXEigjF1OGIa+2LcUNxccBkaGcpx2U4q1PyO5yTThjlPFW95MHHDkvcYFTo3VhiyDxElAh3fY1l3gu/kA3k6yaik8qYnKkrE0RgCYETbSesxMG2TmzGcSCeib/KduFpRNfk3iR1lg4oi1w5vHvS1XBAv+yMYVvd6fuO6mRrL2pmJZ2jUkmNbm1cwpGJllIyyEyMdPJBjlFWEsTx0VrtgOM810RhgfRjKT4zyg1ibvMj+KhiyL1Pq8zvhalliAARPWBqR1DmIy4mTOgavP0Zhc5/UuoY403CrZSfnWu52kmDIgw9hMvnOuRDxaRwkrTQkuI40jNpzH3bYYf1gD/tbL/Acm0Yt18g9hjcn0z6oR7j+2gBOmHarYNgS8bsExklAwXCc+WaqJbBeCCwlrtGPoG+FCBOrfUfaUggWtKsZgGZdOY6lbzD0Es+sf5a4pm0RcSKYMUhNOmrbk75N+jnEk2nC0+LMb2l3MvCLx2MMJwzabzGuD0s7kc9VVOL7PGHqOwxqfO2yO7QduU6cTGL0aXh5R2bCzBMmcQ1tiY+BdGYbYpyf/MzSRrxkk3ZtjDTW8jE0LXy5Yar3KqIVeRur7NiHtyHloKYBEZa+XazOomIfeQy/Wp7wRMSZIFbPU/Movw9taxh+nyVtNUw9T52JR1xDZWGSMM/xsWmCIcdkybccj16FkxL9/hj5jaMQmhLlgT5dyl+OYTtJW0F2o/zU+widibURsTUtGNIBbafr9lc94R+FBuC84QibNCUvb1zimCo+IeLhxRPYcbVNQZs03Q0vH9yB01knXipAKm28Aek05zduuLjqIvYgXLWCEBUBb1RqR4W4uVkPMQUsAuk8gmFdJyJu36Qbw91933337T3/UjkwAhSPSQodIxF01qdZLdTxBqrH15GCuggq52RUAz7VyF/EAkS1IavX1FZK7fE8fOMN2h6bddIIU2+2Ng7ykvUSEcJSZjgGsQc+uDBXW6occSznZgSCslcNIQqRlDUyKVsRDDmGhxrprIId+6lwKJeMGA5VOhxTbZJgWD1ih9ypiYPFYFk7j6UDauOCNMAoXoc536yCIWWkXZg4gu4kMTfnyDYjc5Rb8qC1eO5WjzyOWe41EYYKG+861h7NvcN+Gn+IhNQx3L9Dixmz3il1TA1HWOoQHli10UldwEhoRho5LsbUZspfncaQhltbdhJmOdvE1XoYEgdCK6Nn3KfVeAjjHclfPHnr73wmLA1irh8vw4x0t8dRr1BfwDlGWaAur4M3+W1oq2A4RMV9EhgfAQXD8eWZKZbAeiJQ226Trps+Iu3DOuDMsQgyvCmYNg7GjCw8hBAWmCY8ZDgztH2IoePYhxMNXowxlg2ivdm+zZU2LIIQa8zPamkrtoIh4RFq6OfF0y5xcr2cOx5o2T9PmPS5CFtFIr7jvUn/m3ZjNfosOB3RD2/XtJsnTI176DPtXfKXP5wBog/QzqXPQF+89i9pw9NPQMdo+3XLDTNNMCQuZlAiNtU+HddAn4w+J+311uhH4hRTxVeOof+Ox2Y8+BJuWh7lmHY7T9qmnQcxGs7MBKvGPUdfbpYyv5RgiEMJTk0waHnCEb0I7784k9GfJk1DxvXHMaLVVvCSpM8YzYl+FP0pbE0LhkOgVus+Rn3wJELIwdOsLi5L5lIg6cCz/lZrTF9GtKMAsOhspkK3x22N73TiqaComBBDMu130osdtmQaYYorM0xxJYfrNMPTiKnCGIvNDrGeFn7e3+CC0MeDlodcvOjmjY9wlBkeHogrrF04SWSp50AwZao53Lh2RJpZwtU4tsRnKkOmLNDAYImAtWDzXBP5wXRyvP9maTCGE+WIhgnexZT7afUB9wYC/9FHH92PLHGudn2MxLvILWWQOo10zXr9jEKyfieNNETztjHUpp+lErjvGFGr9W173NB3BcMhKu6TwPgIKBiOL89MsQTWE4HltP8YlKf9zDRC2tARBsIL8Y22EusaRkTMb1tySx+EdNAeJR3LuYakY5pgmGNox9HPo69Ce3eW9us8YXK+dssSZrQjcRBALIxg1x5Xv88TpoYf+ky+T+o70sZHWKRPhyNRZvkMxZN984RJ2HbLVFfWIGRmEHk0aYZQDYfICVeEMsoPOsIky9Ju095CPCnsPGmbFBcetpRF7r1Zy+KkuCbtp28ES9KNoQtlHUW+MwjAb8yEXUrz4PjlmILhcmh57FQCmQ7MQXg91kLMvrrWGtN9h0YXOG5rWdYebF2rt1Z6PK8EJLA6CSgYrs58MVUSWC4BBcPlEvN4CUhgkQTmEdsWmb6VOtcsguFKndt4x0EAxxi8SpkKz4wybeUIKBiuHNt1FzMjSnimMTLAdGcWfmWLtx4LtSLIYbgjr5YbG5GTUQG2uOFivMGJykeTgAQkMERAwXCIivskMD4CCobjyzNTLIH1RGC9C4asjYeXGf1LZqlpEggB1m9HT8gandnvdssRYL1MvHJx+uIlSWvypSdbDpcxzUoAYZDFXicZLvCsLUjlvxqMdRSY8x/bc889u4MPPjhf3UpAAhI4AQEFwxMgcYcERklAwXCU2WaiJbBuCKx3wTAZ3b5gMvvdrl8CvCQRPSFvDV6/JFbuynfaaafjrY2vYLhyrNddzKxbwdujeRsy6yawRhijQrwBda+99jrBmhpbExCLlPKCB9a92H333bt99tnnBAvVbs30eW4JSGD1EVAwXH15YookMA8BBcN5qBlGAhJYFIH1KhjyskM8m2K77LJL/zKRfHcrAQmsPAFekMlM0djOO+/cazn5vrW322x8EcSxWzsRnl8CEpCABCTQElAwbIn4XQLjJKBgOM58M9USWC8E1qtguF7y1+uUgATmJ6BgOD87Q0pAAhKQwAoRYK3WG77oqBWK3WglIIFFElAwXCRtzyUBCSyXgILhcol5vAQksF4IKBiul5z2OiUgAQmMiMAxxxzT3feNR3U/+aNO8CPKNpMqgUECCoaDWNwpAQmsAgIbNmzoeFuwJgEJSEACJySgYHhCJu6RgAQkIIFVQOBdX/pd94z3/noVpMQkSEACm0Pgrfc5++YEN6wEJCCBFSOw/fbbd9tuu+2KxW/EEpCABMZMQMFwzLln2iUgAQmscQJv/9yvurd+4YjuZ0f8qzv2v8es8av18iSwNgm8/QHnW5sX5lVJQAKjJYBn4XbbbadYONocNOESkMAiCCgYLoKy55CABCQgAQlIQAISkIAEJCABCUhAAhKQwEgIKBiOJKNMpgQkIAEJSEACEpCABCQgAQlIQAISkIAEFkFAwXARlD2HBCQgAQlIQAISkIAEJCABCUhAAhKQgARGQkDBcCQZZTIlIAEJSEACEpCABCQgAQlIQAISkIAEJLAIAgqGi6DsOSQgAQlIQAISkIAEJCABCUhAAhKQgAQkMBICCoYjySiTKQEJSEACEpCABCQgAQlIQAISkIAEJCCBRRBQMFwEZc8hAQlIQAISkIAEJCABCUhAAhKQgAQkIIGREFAwHElGmUwJSEACEpCABCQgAQlIQAISkIAEJCABCSyCgILhIih7DglIQAISkIAEJCABCUhAAhKQgAQkIAEJjISAguFIMspkSkACEpCABCQgAQlIQAISkIAEJCABCUhgEQQUDBdB2XNIQAISkIAEJCABCUhAAhKQgAQkIAEJSGAkBBQMR5JRJlMCEpCABCQgAQlIQAISkIAEJCABCUhAAosgoGC4CMqeQwISkIAEJCABCUhAAhKQgAQkIAEJSEACIyGgYDiSjDKZEpCABCQgAQlIQAISkIAEJCABCUhAAhJYBAEFw0VQ9hwSkIAEJCABCUhAAhKQgAQkIAEJSEACEhgJAQXDkWSUyZSABCQgAQlIQAISkIAEJCABCUhAAhKQwCIIKBgugrLnkIAEJCABCUhAAhKQgAQkIAEJSEACEpDASAgoGI4ko0ymBCQgAQlIQAISkIAEJCABCUhAAhKQgAQWQUDBcBGUPYcEJCABCUhAAhKQgAQkIAEJSEACEpCABEZCQMFwJBllMiUgAQlIQAISkIAEJCABCUhAAhKQgAQksAgCCoaLoOw5JCABCUhAAhKQgAQkIAEJSEACEpCABCQwEgIKhiPJKJMpAQlIQAISkIAEJCABCUhAAhKQgAQkIIFFEFAwXARlzyEBCUhAAhKQgAQkIAEJSEACEpCABCQggZEQUDAcSUaZTAlIQAISkIAEJCABCUhAAhKQgAQkIAEJLIKAguEiKHsOCUhAAhKQgAQkIAEJSEACEpCABCQgAQmMhICC4UgyymRKQAISkIAEJCABCUhAAhKQgAQkIAEJSGARBBQMF0HZc0hAAhKQgAQkIAEJSEACEpCABCQgAQlIYCQEFAxHklEmUwISkIAEJCABCUhAAhKQgAQkIAEJSEACiyCgYLgIyp5DAhKQgAQkIAEJSEACEpCABCQgAQlIQAIjIaBgOJKMMpkSkIAEJCABCUhAAhKQgAQkIAEJSEACElgEAQXDRVD2HBKQgAQkIAEJSEACEpCABCQgAQlIQAISGAkBBcORZJTJlIAEJCABCUhAAhKQgAQkIAEJSEACEpDAIggoGC6CsueQgAQkIAEJSEACEpCABCQgAQlIQAISkMBICCgYjiSjTKYEJCABCUhAAhKQgAQkIAEJSEACEpCABBZBQMFwEZQ9hwQkIAEJSEACEpCABCQgAQlIQAISkIAERkJAwXAkGWUyJSABCUhAAhKQgAQkIAEJSEACEpCABCSwCAIKhoug7DkkIAEJSEACEpCABCQgAQlIQAISkIAEJDASAgqGI8kokykBCUhAAhKQgAQkIAEJSEACEpCABCQggUUQUDBcBGXPIQEJSEACEpCABCQgAQlIQAISkIAEJCCBkRD4PyL3CjJY3KKcAAAAAElFTkSuQmCC"
-    }
-   },
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Show tqdm progress bars for all primrary index creation operations\n",
-    "\n",
-    "When creating an index, you can optionally set the `show_progress` flag from the `from_documents` index creation call to see tqdm progress bars for the slowest parts of the indexing process (e.g parsing nodes from a document, creating embeddings...etc.)\n",
-    "\n",
-    "`KeywordTableIndex.from_documents(documents=documents, show_progress=True)`\n",
-    "\n",
-    "![CleanShot%202023-06-25%20at%2011.59.55@2x.png](attachment:CleanShot%202023-06-25%20at%2011.59.55@2x.png)\n",
-    "\n",
-    "Install and upgrade `ipywidgets` if the tqdm progress bars don't look like the image above.\n",
-    "\n",
-    "`pip install ipywidgets --upgrade`\n",
-    "\n",
-    "`jupyter nbextension enable --py widgetsnbextension`\n",
-    "\n",
-    "run `jupyter notebook` from the root directory to have access to the `paul_graham` data in the `/examples` folder."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import (\n",
-    "    VectorStoreIndex,\n",
-    "    SimpleDirectoryReader,\n",
-    "    get_response_synthesizer,\n",
-    "    DocumentSummaryIndex,\n",
-    "    LLMPredictor,\n",
-    "    ServiceContext,\n",
-    "    KeywordTableIndex,\n",
-    "    KnowledgeGraphIndex,\n",
-    "    ListIndex,\n",
-    "    TreeIndex,\n",
-    ")\n",
-    "import os\n",
-    "import openai\n",
-    "from llama_index.llms import OpenAI, MockLLM\n",
-    "from llama_index.storage.storage_context import StorageContext\n",
-    "from llama_index.graph_stores import SimpleGraphStore"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Set environment variable\n",
-    "os.environ[\"OPENAI_API_KEY\"] = \"OPENAI_API_KEY_HERE\"\n",
-    "openai.api_key = os.getenv(\"OPENAI_API_KEY\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Load documents\n",
-    "documents = SimpleDirectoryReader(\"../../../examples/data/paul_graham\").load_data()"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### VectorStoreIndex"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "VectorStoreIndex with show_progress=True\n",
-      "\n"
-     ]
+    "cells": [
+        {
+            "attachments": {
+                "CleanShot%202023-06-25%20at%2011.59.55@2x.png": {
+                    "image/png": "iVBORw0KGgoAAAANSUhEUgAABQwAAACoCAYAAAClr6NdAAAMQmlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnluSkEBoAQSkhN4EkRpASggtgPQiiEpIAoQSYkJQsaOLCq5dRMCGrooodkAsKGJnUWzYFwsqyrpYsCtvkgC67ivfm++bO//958x/zpw7c+8dANROcESiHFQdgFxhvjgm2J8+PimZTnoKcIABZWAPiByuRMSMigoHsAy1fy/vbgBE1l61l2n9s/+/Fg0eX8IFAImCOI0n4eZCfBAAvIorEucDQJTxZlPzRTIMK9ASwwAhXiTDGQpcJcNpCrxXbhMXw4K4FQAlFQ5HnAGA6mXI0wu4GVBDtQ9iRyFPIARAjQ6xT25uHg/iVIitoY0IYpk+I+0HnYy/aaYNa3I4GcNYMRd5UQoQSEQ5nOn/Zzr+d8nNkQ75sIRVJVMcEiObM8zbzey8MBlWgbhXmBYRCbEmxB8EPLk9xCglUxoSr7BHDbgSFswZ0IHYkccJCIPYAOIgYU5E+CCfli4IYkMMVwg6TZDPjoNYF+JFfElg7KDNJnFezKAvtD5dzGIO8uc4Yrlfma/70ux45qD+60w+e1AfUy3MjEuEmAKxeYEgIQJiVYgdJNmxYYM2YwszWRFDNmJpjCx+c4hj+MJgf4U+VpAuDooZtC/JlQzNF9uUKWBHDOL9+ZlxIYr8YK1cjjx+OBfsMl/IjB/S4UvGhw/NhccPCFTMHXvGF8bHDup8EOX7xyjG4hRRTtSgPW7KzwmW8aYQu0gKYgfH4gn5cEEq9PF0UX5UnCJOvDCLExqliAdfDsIBCwQAOpDCmgbyQBYQtPc29MI7RU8Q4AAxyAB8uCsVzNCIRHmPEF5jQSH4EyI+kAyP85f38kEB5L8Os4qrPUiX9xbIR2SDJxDngjCQA++l8lHCYW8J4DFkBP/wzoGVC+PNgVXW/+/5IfY7w4RM+CAjHfJIVxuyJAYSA4ghxCCiDa6P++BeeDi8+sHqhDNwj6F5fLcnPCF0EB4SrhO6CLcmC4rEP0U5DnRB/aDBXKT9mAvcEmq64v64N1SHyrgOrg/scRfoh4n7Qs+ukGUNxi3LCv0n7b/N4IenMWhHdiSj5BFkP7L1zyNVbVVdh1Vkuf4xP4pY04bzzRru+dk/64fs82Ab9rMltgg7gJ3FTmLnsaNYA6BjzVgj1oYdk+Hh1fVYvrqGvMXI48mGOoJ/+Bt6srJMShxrHXscvyj68vnTZO9owMoTTRcLMjLz6Uz4ReDT2UKuwyi6k6OTMwCy74vi9fUmWv7dQHTavnPz/wDAu3lgYODIdy60GYB97nD7H/7OWTPgp0MZgHOHuVJxgYLDZRcCfEuowZ2mB4yAGbCG83ECbsAL+IFAEAoiQRxIApNg9JlwnYvBVDATzAPFoBQsB2tABdgItoAdYDfYDxrAUXASnAEXwWVwHdyBq6cbvAB94B34jCAICaEiNEQPMUYsEDvECWEgPkggEo7EIElIKpKBCBEpMhOZj5QiK5EKZDNSg+xDDiMnkfNIB3ILeYD0IK+RTyiGqqBaqCFqiY5GGSgTDUPj0IloBjoFLUQXoEvRcrQa3YXWoyfRi+h1tAt9gfZjAFPGdDATzB5jYCwsEkvG0jExNhsrwcqwaqwOa4LP+SrWhfViH3EiTsPpuD1cwSF4PM7Fp+Cz8SV4Bb4Dr8db8av4A7wP/0agEgwIdgRPApswnpBBmEooJpQRthEOEU7DvdRNeEckEnWIVkR3uBeTiFnEGcQlxPXEPcQTxA7iI2I/iUTSI9mRvEmRJA4pn1RMWkfaRWomXSF1kz4oKSsZKzkpBSklKwmVipTKlHYqHVe6ovRU6TNZnWxB9iRHknnk6eRl5K3kJvIlcjf5M0WDYkXxpsRRsijzKOWUOsppyl3KG2VlZVNlD+VoZYHyXOVy5b3K55QfKH9U0VSxVWGppKhIVZaqbFc5oXJL5Q2VSrWk+lGTqfnUpdQa6inqfeoHVZqqgypblac6R7VStV71iupLNbKahRpTbZJaoVqZ2gG1S2q96mR1S3WWOkd9tnql+mH1TvV+DZrGGI1IjVyNJRo7Nc5rPNMkaVpqBmryNBdobtE8pfmIhtHMaCwalzaftpV2mtatRdSy0mJrZWmVau3Watfq09bUdtFO0J6mXal9TLtLB9Ox1GHr5Ogs09mvc0Pn0wjDEcwR/BGLR9SNuDLive5IXT9dvm6J7h7d67qf9Oh6gXrZeiv0GvTu6eP6tvrR+lP1N+if1u8dqTXSayR3ZMnI/SNvG6AGtgYxBjMMthi0GfQbGhkGG4oM1xmeMuw10jHyM8oyWm103KjHmGbsYywwXm3cbPycrk1n0nPo5fRWep+JgUmIidRks0m7yWdTK9N40yLTPab3zChmDLN0s9VmLWZ95sbm48xnmtea37YgWzAsMi3WWpy1eG9pZZloudCywfKZla4V26rQqtbqrjXV2td6inW19TUbog3DJttmvc1lW9TW1TbTttL2kh1q52YnsFtv1zGKMMpjlHBU9ahOexV7pn2Bfa39Awcdh3CHIocGh5ejzUcnj14x+uzob46ujjmOWx3vjNEcEzqmaEzTmNdOtk5cp0qna85U5yDnOc6Nzq9c7Fz4LhtcbrrSXMe5LnRtcf3q5u4mdqtz63E3d091r3LvZGgxohhLGOc8CB7+HnM8jnp89HTzzPfc7/mXl71XttdOr2djrcbyx24d+8jb1Jvjvdm7y4fuk+qzyafL18SX41vt+9DPzI/nt83vKdOGmcXcxXzp7+gv9j/k/57lyZrFOhGABQQHlAS0B2oGxgdWBN4PMg3KCKoN6gt2DZ4RfCKEEBIWsiKkk23I5rJr2H2h7qGzQlvDVMJiwyrCHobbhovDm8ah40LHrRp3N8IiQhjREAki2ZGrIu9FWUVNiToSTYyOiq6MfhIzJmZmzNlYWuzk2J2x7+L845bF3Ym3jpfGtySoJaQk1CS8TwxIXJnYNX70+FnjLybpJwmSGpNJyQnJ25L7JwROWDOhO8U1pTjlxkSridMmnp+kPyln0rHJapM5kw+kElITU3emfuFEcqo5/WnstKq0Pi6Lu5b7gufHW83r4XvzV/Kfpnunr0x/luGdsSqjJ9M3syyzV8ASVAheZYVkbcx6nx2ZvT17ICcxZ0+uUm5q7mGhpjBb2JpnlDctr0NkJyoWdU3xnLJmSp84TLxNgkgmShrzteCPfJvUWvqL9EGBT0FlwYepCVMPTNOYJpzWNt12+uLpTwuDCn+bgc/gzmiZaTJz3swHs5izNs9GZqfNbpljNmfBnO65wXN3zKPMy573e5Fj0cqit/MT5zctMFwwd8GjX4J/qS1WLRYXdy70WrhxEb5IsKh9sfPidYu/lfBKLpQ6lpaVflnCXXLh1zG/lv86sDR9afsyt2UblhOXC5ffWOG7YsdKjZWFKx+tGreqfjV9dcnqt2smrzlf5lK2cS1lrXRtV3l4eeM683XL132pyKy4XulfuafKoGpx1fv1vPVXNvhtqNtouLF046dNgk03Nwdvrq+2rC7bQtxSsOXJ1oStZ39j/FazTX9b6bav24Xbu3bE7Gitca+p2Wmwc1ktWiut7dmVsuvy7oDdjXX2dZv36Owp3Qv2Svc+35e678b+sP0tBxgH6g5aHKw6RDtUUo/UT6/va8hs6GpMauw4HHq4pcmr6dARhyPbj5ocrTymfWzZccrxBccHmgub+0+ITvSezDj5qGVyy51T409da41ubT8ddvrcmaAzp84yzzaf8z539Lzn+cMXGBcaLrpdrG9zbTv0u+vvh9rd2usvuV9qvOxxualjbMfxK75XTl4NuHrmGvvaxesR1ztuxN+42ZnS2XWTd/PZrZxbr24X3P58Z+5dwt2Se+r3yu4b3K/+w+aPPV1uXcceBDxoexj78M4j7qMXjyWPv3QveEJ9UvbU+GnNM6dnR3uCei4/n/C8+4Xoxefe4j81/qx6af3y4F9+f7X1je/rfiV+NfB6yRu9N9vfurxt6Y/qv/8u993n9yUf9D7s+Mj4ePZT4qenn6d+IX0p/2rztelb2Le7A7kDAyKOmCP/FcBgRdPTAXi9HQBqEgA0eD6jTFCc/+QFUZxZ5Qj8J6w4I8qLGwB18P89uhf+3XQCsHcrPH5BfbUUAKKoAMR5ANTZebgOndXk50pZIcJzwKaor2m5aeDfFMWZ84e4f26BTNUF/Nz+C6nzfE+Jwo3bAAAAimVYSWZNTQAqAAAACAAEARoABQAAAAEAAAA+ARsABQAAAAEAAABGASgAAwAAAAEAAgAAh2kABAAAAAEAAABOAAAAAAAAAJAAAAABAAAAkAAAAAEAA5KGAAcAAAASAAAAeKACAAQAAAABAAAFDKADAAQAAAABAAAAqAAAAABBU0NJSQAAAFNjcmVlbnNob3Q7WdvJAAAACXBIWXMAABYlAAAWJQFJUiTwAAAB12lUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyI+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj4xNjg8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICAgICA8ZXhpZjpQaXhlbFhEaW1lbnNpb24+MTI5MjwvZXhpZjpQaXhlbFhEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlVzZXJDb21tZW50PlNjcmVlbnNob3Q8L2V4aWY6VXNlckNvbW1lbnQ+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgo/j0nSAAAAHGlET1QAAAACAAAAAAAAAFQAAAAoAAAAVAAAAFQAADBwb3xS7QAAMDxJREFUeAHsnQe49ETZhoMVC/YGimIvFLFiBQuIgr1jRUVFQewdu4iKvSD2ihUbKoIFOyo2EBGxK4qooCh2QX7u/DznG0Kym+Tbs99yzj3Xdc7uJpmSO5NJ5pl33lnv9DNCZZCABCQgAQlIQAISkIAEJCABCUhAAhKQgAQkcAaB9RQMrQcSkIAEJCABCUhAAhKQgAQkIAEJSEACEpBACCgYhoSfEpCABCQgAQlIQAISkIAEJCABCUhAAhKQgBaG1gEJSEACEpCABCQgAQlIQAISkIAEJCABCUhgDQEtDNew8JsEJCABCUhAAhKQgAQkIAEJSEACEpCABFY9AQXDVV8FBCABCUhAAhKQgAQkIAEJSEACEpCABCQggTUEFAzXsPCbBCQgAQlIQAISkIAEJCABCUhAAhKQgARWPQEFw1VfBQQgAQlIQAISkIAEJCABCUhAAhKQgAQkIIE1BBQM17DwmwQkIAEJSEACEpCABCQgAQlIQAISkIAEVj0BBcNVXwUEIAEJSEACEpCABCQgAQlIQAISkIAEJCCBNQQUDNew8JsEJCABCUhAAhKQgAQkIAEJSEACEpCABFY9AQXDVV8FBCABCUhAAhKQgAQkIAEJSEACEpCABCQggTUEFAzXsPCbBCQgAQlIQAISkIAEJCABCUhAAhKQgARWPQEFw1VfBQQgAQlIQAISkIAEJCABCUhAAhKQgAQkIIE1BBQM17DwmwQkIAEJSEACEpCABCQgAQlIQAISkIAEVj0BBcNVXwUEIAEJSGDdEzjsF1+uPvfTT1fHn3Lcui+MJZCABCQgAQlIYEUQ2GiDjattr3aH6mZX3npFnI8nIQEJSGCeBBQM50nbvCQgAQlI4GwEEAvf9b03nW27GyQgAQlIQAISkMAsCDzoeo9QNJwFSNOQgARWFQEFw1V1uT1ZCUhAAotF4LTTTqv2OnRPLQsX67JYGglIQAISkMCKIoCl4bO323tFnZMnIwEJSGC5CSgYLjdh05eABCQggU4Cp556arX7gTt37neHBCQgAQlIQAISmAWB/e7+nlkkYxoSkIAEVg0BBcNVc6k9UQlIQAKLSWDXjzxgMQtmqSQgAQlIQAISWDEEFAxXzKX0RCQggTkRUDCcE2izkYAEJCCBdgIKhu1c3CoBCUhAAhKQwOwIKBjOjqUpSUACq4OAguHquM6epQQkIIGFJaBguLCXxoJJQAISkIAEVgwBBcMVcyk9EQlIYE4EFAznBNpsJCABCUignYCCYTsXt0pAAhKQgAQkMDsCCoazY2lKEpDA6iCgYLg6rrNnKQEJSGDZCfz5z3+uTjzxxOoCF7hAdYUrXKF3fgqGvVF5oAQkIAEJSEACIwnMSzDkfeiQQw6pLnWpS1XbbrvtyNK2R/vqV79a/eY3v1naedGLXrS6wx3usPTbLxKQwDACX/nKV6rf/va31TbbbFNtuOGGvSJ/85vfrH7xi1+c5dj73Oc+1XrrrXeWbSvhx0wEw1NOOaW6yEUuMpHHFltsUW222WbVk570pOp617vexGPnufO0006rLn7xi1ecAxf+xje+8TyzX6u8Dj744PoBscEGG1R//etf1yotI69bAufkerhuya3J/Zx+P+yyyy4VD6yPfvSj1XWuc501J9b49u1vf7t63/veVx111FHV73//+4q2lXaL+Ah1bWFMnMMOO6x65zvfWRGXhycvvLvvvnt1nvOcpy2L6p///Gd1wxvesPrhD39YPfWpT61e/OIXtx7XtlHBsI2K2yQgAQlIQAISmCWBMYLhv/71r+rmN795deqpp1ZHHnlkr+K88pWvrJ7whCdUj3nMY6rXvOY1Z4vzve99r7rvfe9bv1u9/vWvP9v+SRvufe97Vx/60IeWDmn2A8e8842Js1SAli8f+chHqs9+9rPVEUccUe/dcsstq9ve9rbVPe95z5aj/3/TmDidiU3ZMSavMXG6ivG73/2ueuMb31jzQXS68pWvXMHoEY94RLXRRhu1RhsTpzWhgRu///3vV/e6170qhOnDDz+8V2yOO+CAA3ode+tb3/psgvfxxx9ffeELX6j/vvWtb1Ubb7xx3dfhfkK3SZhF/5n+y2Uve9laC0I07OKfPPP5yEc+snrTm96Un/Un5TnXuc51lm0r4cfcBMMS1pvf/Oa6c1tuW1ffafzPe97z1tl/4xvfqLbaaqt1VZTB+X7605+udthhh6r5oBickBGmErjmNa9Z/fjHP67e/e53Vw94wOxXdJ1lPaSRvfzlL1+fEw+hTTbZZOr5rYQDzsn3w09/+tPq6le/en0Zvvvd73YOqiDCPf3pT2+9XNe4xjUqRFNeOsowJs7nPve5arvttquTQZDkZYHAyy1iZVvYY489qte+9rXV9a9//Qqx8fznP3/bYa3bFAxbsbhRAhKQgAQkIIEZEhgjGPLu/6AHPaguxemnnz61NBxzrWtdq+43dL3TPexhD6ve9ra3VXe5y12qj33sY1PTLA+IYHi3u92tFjLXX3/9arfddqsPGfPONyZOWZ7y+3/+85+K8n384x8vNy99v9Od7lQLSec73/mWto2JsxR54JcxeY2JM6lYn/nMZ2rhFGOlZqBP/8EPfrC6/e1vf5ZdY+KcJYGRPxDLb3CDG9TGAEP0BvoD9Av6hGc/+9nV8573vKVDEdOx9Ovigwh9latcpT6+q/88pC/8gQ98oO7fcD8hCvcN9LmOPvro6uSTT65e+MIX1tEUDCfQKy0Mn/GMZ1Q777zz0tGAQxH/4he/WD3/+c9f2v7rX/+6VouXNqyjL1S0u9/97nXujAZd9apXXUclGZ7tOVkgGX626zbGPATDWdXDspH8+c9/fjYBad2SXL7cz4n3A+3PoYceWj3wgQ+s/vCHP9Rwul4usT7ceuut62MQ8fbcc896RAxx7wUveEG9/Ta3uU3F75jDj4lDQlgsMqK33377VYygUad4+aWt/8EPflBtuummdX7594lPfKK6853vXP/80Y9+VHG/DAkKhkNoeawEJCABCUhAAmMIDBEMeed573vfW+26665LWfURDPPudaMb3ehsFlm8T+29997V6173ujrNtREM3/CGN5ylbMmXhJf7PXEJSOPLi170ouqZz3xmvRWrtIc85CH1O+nb3/72WghjB3rAs571rKWYY+IsRR74ZUxeY+J0Feukk06q+2XULQS4ffbZp9p8883rd2tmYWY7/TemsxPGxOnKf+j2pzzlKXUZiTdEMGTa/EEHHdSZHVP26WMQ3vWud9X9IL4fe+yxFfcNHKjDD37wg+sZqvTxXvWqV3FIdctb3rL68pe/XH/v0nGG9IWxfKUvRnnHTO0/7rjjqite8Yp1eRQMawzt/7iomZKMeS3mtG3hwAMPrEdS2IcJ58Mf/vC2w9zWk8A5USDpeWoLd9hyC4azPOEhjeQs813XaZ2T7gde6h73uMdVP/nJT842gtYlGN7xjnesPvWpT9W+ARnRSpsLd14aH/3oR9eX4Otf/3p1k5vcpP4+Jg4vJnlJ+fe//11lFPhRj3pU/XDfd999K74nlGIiL4TlgFGOmfapYDiNkPslIAEJSEACElhbAn0EQ8Q8ZsNldkWZZx/BkPcgXLq89a1vrR760IfW0bFQxPUVs5XKMEvBcMw735g4ZfnL77wzYu1IQCx8//vfvzQ9E27MUsF6jsA0UI4dE6dOYMS/MXmNiTOpaC9/+ctr92wc03zfp75d97rXraO/5CUvqRDrCGPi1BHX8h9TgjFESBgiGCZO2yeiGtZ8GBsgCL7jHe9YOuz+979/LdJjQfid73ynutjFLra076UvfWnt8ogNuGS6zGUus7Sv+aVvX5h+GDO0SIvpyF1ul5rpl78VDEsaE773FQxpLJj/zvH4deAGSOCGRFDE/xUXmUaE+epMuW368/ra175WH4MCzXRLLtSXvvSl2qcXNxjhL3/5S+0LDGsXvl/60peu08EK5oIXvGCyrT8//OEPV//73//qm+KSl7xk3XhRFgINHhaSKOV0xKnk+Bi4xz3ucZYOe33wmf8wTcUXBMejoN/0pjetbnWrW1X//e9/K8oOg9vd7nZllInfqcw8ZGhYyB8fkHe9613r9CdNST7hhBPqeJjuIgKg1NMQ4WcsFkjNjCkvU7PJC4e6XAPy23777ZcafeL8/e9/r5V45unzsGveYDwIPvnJT9bJo9Zf+MIXPhtXbkyEE/wc4I8ATvgxOPe5z11fM/bBnbqB5Scm7l2NQ64RjQvnhiUWoknp54DCYObM9E8spGgguE5Mn6SeXOlKV6r9a0Rs4XhM6jFFx3cbFmCIMlxL6hOfCZwDll3URfzI4fONa9O0xMrxbZ/NesgxCET/+Mc/ah8nTJ2nrPzRUHJf7Ljjjktm2fixxMEy1zqCDi89MKMczfuIURysyKgfXD/qNXXjale7WlvxWrcN5Vkmwj0HN/zBYLVGOalruAWAb1cYez/0rSPky4gVAiT1g/uIekRbwwMOPxdloD7BgYCPmz6+L8rBkzItvjdfINj2t7/9rR7Z4zu+cPDhUQbqKCIfbWtM+8fEIU3qBdaE1BeEyYS8sDznOc+pnvvc59abaY9oyxiZmzRdOWl0fSoYdpFxuwQkIAEJSEACsyLQRzB84hOfWL3iFa9ozXKaYPinP/2poi9J4L0ckYVA/6tNgJyVYDjmnW9MHM6Ffi7+CQn0GbI2Ae+CWGsR2t5lS0GM+PjGHhOnzqDnP/rezGqkHzkmrzFxKBp9Nfq5BLSHuOm5xS1uUWsBXdcdH4/0B+lP0AcmjIlTR1yLf+gB9AXo+77nPe+p3XHNSjDEupQpvLgwoq8fXabUk+COJlAG9BzuTQJWrHHBVPaf6SsP6QuTDhakTavXIf1ABcPyKk34Xl7gSRaGJIFVDMcjwDC/nYAYhr8strcFOqoIjAlM3WRhAPKiYmT0JhWZef4IXG2BYxDtMP8lUCGaPgx/9atf1eIA+xH9ELKagRVAEYiaU+8QLvHP0DwXxBBGCjA35gZB2JoWEFTwKxHT7vJ4zgMTeUyZc97lfm5upjm2BUQPRr2aYhoNE0JopkaWcRHXMNXNlO2SEcJifOYlDuJMhJOf/exntahVxoFd26phNCKYryP4lSuAJd3Seirb8D2AtWqTOfupJ4irCVi/MmrIFM5jjjmmHsXIvnwyrX6vvfaqf6a+Zl8+M80A3w4ImYyStAWuERZZXQJt4rTVQ/Yh2MKBxhDfdc2RSY7B9wkPngg9bGuG8pwQl+DMSE1bwI8ExzdF4LZjh/JMGohwjMDSqDcD9Rnz9PK6ccza3A9D6gj1lBE1pgO0haYfS0zqI9C+5S1vqfBLMy0gtJX3GaJwhNq2lywESdoNQtt+tsenTV5CxsQhHe4LxEIGGErn3rQ1tGHlQ5UBmqc97Wm11SOLsJQjgaTVNygY9iXlcRKQgAQkIAEJjCXQRzDknQxxIoHpjwyKEqYJhrzz40+Q939mfyQgsmFMkcA7Fa6w8s6W7X0+875XTkke8843Jg7lY5A//Wj8uvNeTMBIgQFt3uM53+biD7zH855If43ppY997GNHxakzm/KP6/eOMyzXEIPud7/71aznWT5Wy401ZemqJ/1BNBC0kGZI/WE7vDh+TJxmukN/x9LvZS97Wa3RIHi36Q1D0y01mqaffRbyoW6TD8J7n35os/9M/ULobAtlX5j9pbEFfb9MKx7aD1QwbKPdso0bP9PjJgmGiGRYMBFyo5x44om1mEQaiGqYRiPGcKNzY0c04riM2EQwpPHG3JlA5UKAQgi7xCUuUW/DnBUhCYsgGreMFiH2MdrADdisaFg3lcIW6VK2xz/+8bV4g9Ucqjeh6RyzHIUgj5122qm63OUuV8+zL52/9hUMo3qTF513BBSs1/AHWQotzRuY1bbSCFEOhBmYYFocYQsBkA5+phsyEsJoBoH0EBvh9/nPf762tGI7vxEQsBYsGY0RDEmP603ZKCM+QrBsLAMPW84bC65cu6aIgbCEOTMBiz/qDw9kHl4R17hmWOIRInDVP874RxwcyyLeICRyrQkRZXDYyjYeOgTy5/oxqoElF3/xH0c58KtA/tRLhGlCU2CqNzb+tdVDDolgmMN5MON8lnsJUZgARxpdHs7UGaw/s3oa5421HtaOWMsSMnrFd6wpeVlBwOLBltFP7pvmyk8c3wxDeRKfEU2E5whmlIu6h8jMlNZs5/x4YCWMvR+G1hEGCFIXWe2XBw+jhAiDaY+wcozAN0YwzDnlk5fTC13oQvXP1L3s45M6zEAEgbYx7W294cx/GbHLPTImDkmVo+M8TDOgwnQa6nKmHTPQE2tc6vrNbnazsjiDvisYDsLlwRKQgAQkIAEJjCDQRzBsJouRQxaCmyYYMqOH2XLMoKE/2xWYrcEA7KwEwzHvfGPicD5dgmH83cWoou3c846dmYZj4rSlm228p9MP5i99ukwNH5PXmDiUpU0wxGovGkWXv7xSUEP7QHQdGid6SZgM/aQPi4ZB3wxDKGYbzUIwpE+B0Ez/HGMDfHmWgf40/Wr0FQwwuIYYCtHnZWFI/KtjYVgaKTX7z/Sb+/SFyReDG/Kir8y9kJA6yu8+/UAFw5Cb8skNmQ5sm2BIBxTrNUZc0uFGeKJjy2o0WLUhUqHAxyqNLMsLgECWabwRDDmGePvvv38tFCEAMoUT0Yjwy1/+sp5mWv844x+VHoGIQGN+7Wtfu5dgSFlj9kpcGjlGhQiYOkcBj8kw54WpNUJOQqbz8buPYFjOvYcP5xhzZtIoVx8qBUNYb3LG1EmuCecK39KSkBW5YgGFIJtVtbDowywYURDhk+m5CayKyugMgfS4udZWMCQfRuxy0yOYIK6mcY+pesoA71iZcizCKQ0v5SQONzQNT0ZhsPyjzFgYIo5St9hXClw0Vow8JQ5Wb0wlJpAW+xO6fBgi3iHwwJTGLYEGDJGO86AcXL9JodngZbXuUjCkMYvwSVpp6PheCjZl3WkueoLYHBNveGIhlvpLQ86Uax6shDbhqt5R/BvDkxckXpQIWE5msRd+M9UabuTN/UP5EdLKcxpyPwytI4iueTA3hd5ycKP0wUpZeZkk8EKZqRn1hp7/pgmGeXiTXEYcm0mnjUGAp+0cEydp5mHJw5o6EatD9nMtmD5AO8b1YVoBD2eEcgRn7iNeCGgzc28l3a5PBcMuMm6XgAQkIAEJSGBWBJZTMMxAagZuJ5V51oLhmHe+MXE4pz/+8Y/14DHfGVTPonfxdY0rqq4FL+Izkf4DmsGYOOTbDPTV6SviO5JA35i+IQv3xS/3mLzGxCF/+mgxXKGPiIiHBkK/jtA2Y47tCM3pA9LXRjAcGieWcqQ3NGBQtdlmm9V96xhHZCp5qTcMTZfjo12QDhpN+ltJC+tUtAn6eeQdI5bs55O4sI1vxa7+c9lvbPaFkx79TdxP4SYqRhmlqNu3H1jqVRjgNC1rk985+XO9M0ZKpq8PP+UMS8FwyqH1bqz1YjGGCIJlE+IdUy7LUKaLxRSWUYRSMGwKSwiTWHkRyIOFBcpOKxefi0ljhuVhW0UrxbBSVKsTPeMfft9QuQlYHiFylmbdVL7mcuh08lHHqbR9BMN0/ss86gyLfxEoyxuYDn6sC0sT6CJaLahm8QQqeRoCjuFaxBItcSg7Iw2cK5aHNL4lIxrACH+JM21KMiJkzPsTJ5Zv5epH2VfyjaNTLAJ54CAscR6xlkwcLNXib456wUhJBC7i0FghPJYhD7Jyyjz7uwTDrKyEKIlolEadOFhwMiqCENqsD+wvQ1s9ZH8EQxrPAw44oIxS+3bEepD7pLxukxrJMEawRTQvRWgSJy0s52DXx8pwKE+am/gxbTq6zcmV91esDMfeD0PrCGJfLP2on4in8a1B+bjG1HfuYV4IZxWGCIZdTfarX/3qur2jbnOPlC+CfePkfMrBFdKL1SdT9THpj7Uh9yoDDNxfvDBSfxIYNUcoD89sb/tUMGyj4jYJSEACEpCABGZJYDkFQ/pHDCgz84Tvk8JyCoZ93/nW5j2x7dwiruEW7OCDD247pJ4tx6w7ZpgxY2VMnCTMeaIDMG2WTwL9MawC6beW7+/sG5PXmDjk1RZKYalLMMQIAWMUAqIZ/bQIgH3jZAZUWxkmbUMfiV/ycrp7dIJSb5iUTtu+0ugCl1hPfvKTz3ZYptpnB0LrLrvsUl9HZkJyLQj0S2JQ0tV/ntQXJg00AAwbSIt+XWZTlf2xvv3A8roqGEK3I5TCXsch9WZECqbN0dlsU1+56Pi7Q5FHbGGUIOp8m2DYdMpPJqSBeJUOLnkikjBdDsEoowwpZ1tFK8UwBB/EgTKUYlhU69IKr6uyIIjik7CPYJiHDgo604LbAg8ljitv4KjzTBWkYWkL+HOLWJfFSyISYpnXFJHa0igZjREMm34LyIOGAQvIpt8P9jEtGcGVEMEQ3xcsAIFFVawP6wOKf9nOwwQz5ghcXVMAwq9pMdglGOYaJEvSxcoMCy3Mt1nApU9oq4fEi2AYXx/NtNrKNamRzPF77rnn0lTqZppYlPGg4H5BCJoUhvIsy1ZaDTfzYMoy91YW8Bh7P4ypI7xk8BJF4N7iAYa1LvfU2Idw8/yav8sHVJtlJ8Ib09EJXS+C1HEewLEwHBOnLBejtbgKQKzHapmRONwixD8qbJimgIuCjBLi/5LBH9o5zmNSPSvzUjAsafhdAhKQgAQkIIHlILBcgiHuYuLHGSulfO86h1kLhmPe+cbE6ToftqdPMEkwjFVX+llj4rAYJroAs8EwfiDQZ8GikPS7+l5j8hoTpy5Qy7+y71zODCsPjZUq2+gHoZfwDk7oG6ecFVlH7PkPQyv6yvRjEXyj1cxCMIxRA0VhNmQ5+zHFw/AgrtPoj9PHL0NpzBBRvqv/XPY3o9WUaWW2G/dhFnLM/qH9QAXDkJvyWQqGiGIsWlEGLPwQ8ZrWXDkGf2GozUwf7QptgmHXVE8qBiMXTLFtBjr+VA4+CW0Vrbyhp4lhqYQ0WljepLNeJ974F1PcPoJhLNew6MMHW1vgZsZSshQM0xB3sSEdVn6NXw38UGA2zmhMLJPa8mpuG8KobdGTNq4RDJvWfeTdJhgyCpIRpWb5mr95iCBipOFnlALHss0wVDDE+hJxLVadZXpcFyxcWegmU/bL/eX3tnrI/giGCFj4w2iGCICl2XRXI0lZ8xDF8o5Rm7aQ6d9lvWo7jm1DeZYPQqa5djmmzUMDsQ6Be+z9MKaO4GMR4Y2HUTMwAMF9zkJLpeVy87ihv6cJhliXRtRn6i+ryDdDfH9QRu65MXGaaTZ/l/dh3BPEirP06ZoR0r5tioJhk7S/JSABCUhAAhKYNYHlEgxxS8TMnAhh08o9a8FwzDvfmDiTzit9qEkGB7hFwj0SxgnMihsTJz67KQvvxohcmca7COXrKkNpcIQxUKbVlseXohgz++hrxF1b3zg5vkx32vdSFMQYoJwxV+5j5e8xIauEt82YS3roR1kzIDM4sy+fzPBkJlqMi7r6z119YdIhTozLolEkfT6H9gMVDEt6E76XgmGbD8MJUc/ic5DOJRZaVCqsyZhDzxRBrAXbBMOIQF3p07llCiE3H1Nwy+lymTbcVtHGiGFYgDHVepLIkodDH8Ew064nCX+YdGPxU+aZeJNuyLIxwtIPP3KIWn079/CexggfCPGDmJtxWpyhgmFMlxFIysUx2uoDFqZMC+4rcCEIxZcf6bUJc2U+iDgI1EzPhG8WzeCYTNuMr8AyXr631UP29RUMEZWzKnZXI4llWkaL2qbapywRniaJ3zl2KM9yann8mCat8jMvFHnxSr0eej+MqSMpB+bzaT9wQszgQAIvK4xOzSpMEwxLVwtdQmumCTMYwgJHY+JMOh98XOIGoXxQczxtB1PGy+kLpThNu4sV4qSgYDiJjvskIAEJSEACEpgFgeUSDOPTvMsKrFn29Am7Zjw1jy9/5922fO8a8843Jk5ZjuZ3/MLj07rslzaPwVUU/XreoXmXHhMH9zjMYCFgAIPPeQwNJvWzOHZMXmPikFdbYN2DuM/qMtwo1xrAkhLBcGicHN9Whq5tsS5kP3pAGejjRkPJPmYx0v/uE+jvbbnllvWhiNRoFG0h/Qn2dc2mwiCMGahYQdI36+o/d/WFSTvrXUyyhOW4vv1ABUNo9QhrIxhGKWY0Aku3piUWv0m/TTBsm+7GlFo630yrLX1ncZNiEZOpuGmg2yraNGGrHCGIGFauNMW8+IhlJb6IIH0Ew0xfLhfsKNPie8xpy4YZ6yesHbmJEUzbrKDKJds5f8qO8Eg46aSTlqYX1hvO/EfjzAgD0zIRRktG3CiIS2UoH0JhVMaZhYUhJsRMgZzkXJfz4drjbBb/BEMFrpxTl2DItAMaNaYeRIwjDucMp5hWt01tT9p8ttVDts9SMCS93G8ZmWFbM+RFZBLXxBnKs2wr2vxYki4smOLKsfFzMfZ+GFpHuF8YPeOzaS7PSyCiXITDcsGj8Bj7OU0wLAX4rodtVuaLyDomzqTy80LGIjm0SQi/8Q2DpSriemnlSjp5KewaJSzzUjAsafhdAhKQgAQkIIHlILAcgmEGw9tcZXWdw6wFwzHvfGPidJ0P20v3XPSV8eFeBhZLieCUd8YxcUiTgXEGqxF/CPR7EZx4Ty/7//XOM/+NyWtMnDLP5vf069qm3HIsrrSY6VUabYyJ08x32u/Mgpx2XPZjhUh97xPSh+NYXKGl/9CMG5dHk47LTMr4we/qP08SDNE8MLpqLrxJvqQ3tB+oYNi8kh2/SxFgiIVheZERseLMMtmkAeZ3X8EwglmU56SVz1jhpFEvy4BVGCbN04StNsGQTnFEszYhs5wG3EcwjPUg5W5beh2BAR8FjNKUgmG5cm7JLOfPKAEdfgS7LC5STjPcZ5996gY3x/OJQIJPOUIs00pfHW3TZeMkljjLJRhmhIA88HUZH4f8Jhx77LFLU17DcKjA9f8ptVsY0qCwgAcBgRTRuwxl/ky3jyhbHpPvbfWQfXlItDFmf4TMPhaGHJ8HEd+p53Gky28CDwAsewmIsfgQnBTG8IywxT3IqFNzRDDTOsiXFwIs5sbeD0PrCEIhQimBe6zpRqEc+ePeY9GZWYRpgiF5xK8j9YwVxkuBmmkK2267bV0UXm4yMDImTtv5YGlJm0poWobizoB2oxzpLutzH7+oCoZt1N0mAQlIQAISkMAsCSyHYJhptekj9SnvrAVD8hzzzjcmTtf5TesLZxCf+IiV9HHGxCnzx4ch/vHwKU+gT8wUZXyfNwXLMXmNiVOWr/l9t912q11iUU4WNcninByHoMr1QFcpDTvGxGnmO+03s4iwqGsLvPcj1FFmFlQlIPw2+29tcdkWg6kubSbxyD/9KqwI0WzKUApzWZug7G9ExyFOl2CY7ZwL+TWtMePujTT69gPLcnWtY0F65+Qw81WShwiGgIsVCstZI3BlwY2mNQ8rKWGGSsj0xDZhjqWxsR4kYO5LnFRoBBL8BSCA5UZsq2hjBEPy22OPPeolw/nOFGUqOoIDnXsWXqEBIPQRDJnSx3RsrPqo1AhOlB1Bg0U/EATwAUFgf3wKEA8rQKYNsh21HjNtAvHwD8B0bEJpNs+UXhY4ILDiLubCCBKItjzUYilXLlYS608EDPLZ5AynrOSPTwpYJCyXYEhemKLjLw0RFFENZgTKieUTU4ThQCNM3RojcJFeRC582/HAIy38AcYnA+XAgjWiMWVDRMEqi9BmhVnvOPNfWz1k1xjBsGxwqTdMxaZB5HqmoSRtBDvuuYwQYQWJdSH3B8x4mE9z2jyGZyni4XuENgNrPiw1uYZMOyaUDxZ4jr0fhtQR7tE8rGgjuIaZTsuDg4VHYMpIJvWaAMNcZx5g+PIbGvoIhvGTStqIufjNhNvhhx9etzWI5lw32q9YR46J0yw7wujmm29eD060vQxHRI3VNvG57/A7mYGZZprN3wqGTSL+loAEJCABCUhg1gRmLRhiMbXhhhvWfbyuWVpt57AcguGYd74xcegnZBCZ93h8xCeUvsOx4Npxxx3rXfQ9836MIQAGAQlj4iRuPk844YS6P4HVYfrbvMez+GHpL31MXmPiYO1If4HAucZAg7437n0I8KCfEdGWAXiYEXBvtfXWW9ffx8SZdI3qRAf8m+TDsE8+0QuYecfU50kh1n/0Z9Aj6MMSEG7RUuLyK/daV/+5qy/MzDUsHvljunkzlPH69gMVDJsUO35zY2Yq8VDBMKo5SVM5EGaofHRSCSjYsaLD2okbcJJgiAUdjRg3F4E0t9hii7rxoMInYPGHv4m2ijZWMESUomFErGsLdKixlJq0gnEZj5uCRioNH+fCQykrR+dYtkcwZBsiHzdYyZDVobOSFMc0/T8isFGuxCFNrOewRExACMRJbULpQ4JtiAPcNJSXh0EEzeUSDMkTsQTLqjCiviDwUIcSYB7RdIzARTqpc0kzDzuE1KTNPuoa3I444oilMk3yu5f02uoh+8YIhghvlCFMSCe+QviONSKm3AmIX4wsldc6C1rkmK7PsTyz6nHShRt1MGVGeMWqDQvKhLH3w9A6Eou55IvgiDichxTbGcnEuTWBxVFiHY11JFOCh4Y+giGWerRZ5X2c9jH5ZYpHfo+Jk7h8UpcYzEHk5ZN7iUGLMnDNcB7MJxasvJhR36hPTFXGH+i0oGA4jZD7JSABCUhAAhJYWwKzFgx578JAZNIilW1lXg7BcMw735g4LJjJQDKBgXQYJGA1hxFHAv1JQt7v+Y6VWvl+PyYO6bQF3qcpD8JQ+oKxRuP4MXmNicNClQhehOb5NvtA9HnKPlhcC9WRz/w3NM6ka1Sm2+f7JMFwWj4IuWgXhC6/jWUZcPWFoJq+Dv2cpoZR9nW6+s9tfWEMfuinwbptZmLKMbQfqGAYclM+y84u1ibNVZInRScuptzEKwNWa1xUhL0s5hBlOjdh15RJlutm4Qas/JoBB5fsQwQglBUtfuamCYalD4aYVCcfGl6ENCwdI1ByLlgDIepxw09zspm0+MQfIgr3IYccUm6uBR+EKiwBm4IhByL8MapCR79spBH1GAmi898M3KQwxgy4DDRksEQ0K8UCnLFy85UjS8RjtInrmYdEGE3jSgNJvFznsgxwyDLxWErSgCRw4yNkxgoy27HkZOQM0TVhmsCVKbswZ4plAg0iAlFEI+oPQhQBH5AIJE2hmDIiiDNNPlauSa/52VYPOSajTl1TkmP5WE5DJR6CH9aQeVhSBuplAqIy912E9WzHihU/FrE6zPauz7E8SY9z4oWpFMCpM9RpRqBiJVfmPfZ+GFJHsGakvuOvLwJ6yoCwiTXhTjvtlE1zEwzJ8OSTT65HxZorOCP6Yv2X6dRLhRsZJ/FJk3pCXeYeiPVl9ucTK18GRMoXHtpnHFqXbUaOb34qGDaJ+FsCEpCABCQggVkTmLVgiLjBuzSzyXD11Dcsh2BI3vN4T5wmEuGSiX4Zs03KgLEF76+lWJj9Y+IkbtsnU0Ppn2HBh//20rJtTF5D40SroGxNwZA+H31MFogp++j0gXh35r272W8cGqe8RkPF7CZPxDv6m216Q5lPUzwmncMOO2zJZReLg8a6splH+RtrQjhglVr2KzBswoKUhUwTuvrP7G/2hem70V+mr4srp64wtB+oYNhFchm24xMPQYkpvFhVrb/++ku5MI0SIY6FRLD06RuIQ1yEMNJlmfFpUyz7pt3nOMRQQpx70njSQPCJtd6QwA2BpR7nj3DWlwMKOyILDxAa6JRlUt7cKFwLzOyZMt4lEiQNzpOGlDIhNDUbuRy33J88HLBSQ5jF9JsRiXkGhGT8W1IOFu1AaO17neZZzjIvFu7ItcMH5Lq4dixfTxmoZ9z7fQSmsffDkDrCsYyMYZ5O4F5o+kMpWc7zO+fPPcp9jVjYJq42yzMmDqOBtM3bbLPN0iBLM93yNw9N2g3aqLiXKPd3fVcw7CLjdglIQAISkIAEZkVgjGDYlTf9JdzS4PKH6ZJ93l+70hqyPYsTlr6jm/HHvPONidPMt/zNrD/6ZfSF6Jc1/YKXx+b7mDiJ2/VJP7Wt/zsmrzFxusrFdnQKRDH6jFji9alDfeNQP0kTQweMcc6Jgb4O/XpmL7F46dqEo446qrZcxDq2j2FM336gguHaXJVVFpcpnYgdNCSl38BgKE1ym1N7c4yfEpCABFYjAQXD1XjVPWcJSEACEpDAfAnMUjCcb8nX5NZHMFxztN9WIwHELnz04c6NWUDx8b8aWSz3OSsYLjfhFZZ+/NxhsovvrkyFZaoqU5GZHsrUPj67lnxfYUg8HQlIQAJTCSgYTkXkARKQgAQkIAEJrCWBlSQYshowfU8s+Lbaaqu1JGP0lUSAmVtY5eFia9999+1lubiSzn8e54Keg0EYU6gR8QkItVgcr7Qwk1WSVxqUsefD1F9WBo3fuGY6CIkHHHDAkpDY3O9vCUhAAquRgILharzqnrMEJCABCUhgvgRWkmAYcm2+5bLPz9VJgBmPxxxzTIU/f8PyEGguREMuCobLw3rFpYq/L1ZFYgEOfA4ScBS62Wab1b4L5+1Xb8UB9oQkIIEVR0DBcMVdUk9IAhKQgAQksHAEVoJgyKKgRx555BJb/POxUJ9BAhKYH4H9999/aTFUcuU+ZCHYPn4o51fK2eSkheFsOJqKBCQgAQmMJKBgOBKc0SQgAQlIQAIS6E1gJQiGvU/WAyUgAQnMgICC4QwgmoQEJCABCYwjwKp8ux+487jIxpKABCQgAQlIQAI9CSgY9gTlYRKQgATOJKBgaFWQgAQkIIF1RgB/H3sdumd1/CnHrbMymLEEJCABCUhAAiubwEYbbFw9e7u9V/ZJenYSkIAEZkxAwXDGQE1OAhKQgASGEfj80YdUb//aflV1+rB4Hi0BCUhAAhKQgASmElivqh5y812r2266/dRDPUACEpCABNYQUDBcw8JvEpCABCSwjgh85siDqkOOPqj63V9+U51+2v/WUSnMVgISkIAEJCCBlUJgvXOfq9rwoleott90h+p2191hpZyW5yEBCUhgbgQUDOeG2owkIAEJSEACEpCABCQgAQlIQAISkIAEJLD4BBQMF/8aWUIJSEACEpCABCQgAQlIQAISkIAEJCABCcyNgILh3FCbkQQkIAEJSEACEpCABCQgAQlIQAISkIAEFp+AguHiXyNLKAEJSEACEpCABCQgAQlIQAISkIAEJCCBuRFQMJwbajOSgAQkIAEJSEACEpCABCQgAQlIQAISkMDiE1AwXPxrZAklIAEJSEACEpCABCQgAQlIQAISkIAEJDA3AgqGc0NtRhKQgAQkIAEJSEACEpCABCQgAQlIQAISWHwCCoaLf40soQQkIAEJSEACEpCABCQgAQlIQAISkIAE5kZAwXBuqM1IAhKQgAQkIAEJSEACEpCABCQgAQlIQAKLT0DBcPGvkSWUgAQkIAEJSEACEpCABCQgAQlIQAISkMDcCCgYzg21GUlAAhKQgAQkIAEJSEACEpCABCQgAQlIYPEJKBgu/jWyhBKQgAQkIAEJSEACEpCABCQgAQlIQAISmBsBBcO5oTYjCUhAAhKQgAQkIAEJSEACEpCABCQgAQksPgEFw8W/RpZQAhKQgAQkIAEJSEACEpCABCQgAQlIQAJzI6BgODfUZiQBCUhAAhKQgAQkIAEJSEACEpCABCQggcUnoGC4+NfIEkpAAhKQgAQkIAEJSEACEpCABCQgAQlIYG4EFAznhtqMJCABCUhAAhKQgAQkIAEJSEACEpCABCSw+AQUDBf/GllCCUhAAhKQgAQkIAEJSEACEpCABCQgAQnMjYCC4dxQm5EEJCABCUhAAhKQgAQkIAEJSEACEpCABBafgILh4l8jSygBCUhAAhKQgAQkIAEJSEACEpCABCQggbkRUDCcG2ozkoAEJCABCUhAAhKQgAQkIAEJSEACEpDA4hNQMFz8a2QJJSABCUhAAhKQgAQkIAEJSEACEpCABCQwNwIKhnNDbUYSkIAEJCABCUhAAhKQgAQkIAEJSEACElh8AgqGi3+NLKEEJCABCUhAAhKQgAQkIAEJSEACEpCABOZGQMFwbqjNSAISkIAEJCABCUhAAhKQgAQkIAEJSEACi09AwXDxr5EllIAEJCABCUhAAhKQgAQkIAEJSEACEpDA3AgoGM4NtRlJQAISkIAEJCABCUhAAhKQgAQkIAEJSGDxCSgYLv41soQSkIAEJCABCUhAAhKQgAQkIAEJSEACEpgbAQXDuaE2IwlIQAISkIAEJCABCUhAAhKQgAQkIAEJLD4BBcPFv0aWUAISkIAEJCABCUhAAhKQgAQkIAEJSEACcyOgYDg31GYkAQlIQAISkIAEJCABCUhAAhKQgAQkIIHFJ6BguPjXyBJKQAISkIAEJCABCUhAAhKQgAQkIAEJSGBuBBQM54bajCQgAQlIQAISkIAEJCABCUhAAhKQgAQksPgE/g8AAP//mVdIaQAAOBtJREFU7Z0HmCxF1YYbryKKYg6omHMCcxazCOacA6KYA2ZRMaNgwpwwiznngFnMijnnHFBQMSP/ffv3uxyKntnZuXfnbu++53l2e6anq7r6rerqqq9OVW9z7EbrNAlIQAISkIAEJCABCUhAAhKQgAQkIAEJSEACGwlso2BoOZCABCQgAQlIQAISkIAEJCABCUhAAhKQgARCQMEwJNxKQAISkIAEJCABCUhAAhKQgAQkIAEJSEACehhaBiQgAQlIQAISkIAEJCABCUhAAhKQgAQkIIHjCOhheBwLP0lAAhKQgAQkIAEJSEACEpCABCQgAQlIYN0TUDBc90VAABKQgAQkIAEJSEACEpCABCQgAQlIQAISOI6AguFxLPwkAQlIQAISkIAEJCABCUhAAhKQgAQkIIF1T0DBcN0XAQFIQAISkIAEJCABCUhAAhKQgAQkIAEJSOA4AgqGx7HwkwQkIAEJSEACEpCABCQgAQlIQAISkIAE1j0BBcN1XwQEIAEJSGD1EvjQt47u3vbVf3Y/+eOxqzeRpkwCEphK4LW3PdHU3/1RAhKQwKIJbNiwodtuu+26bbfddtGn9nwSkIAERkNAwXA0WWVCJSABCawvAoiFz/zoP9bXRXu1EliDBBQM12CmekkSWCMEtt9+e0XDNZKXXoYEJLDlCSgYbnmmxigBCUhAAptJ4Jhjjunu+8aj9CzcTI4Gl8BqIKBguBpywTRIQAJDBPA03GGHHYZ+cp8EJCCBdU9AwXDdFwEBSEACElh9BP7zn/90N3zRUasvYaZIAhJYNgEFw2UjM4AEJLBAAqc5zWkWeDZPJQEJSGA8BBQMx5NXplQCEpDAuiKw+/OOWFfX68VKYK0SUDBcqznrdUlgbRBQMFwb+ehVSEACW56AguGWZ2qMEpCABCSwBQgoGG4BiEYhgVVAQMFwFWSCSZCABCYSUDCciMYfJCCBdU5AwXCdFwAvXwISkMBqJaBguFpzxnRJYHkEFAyXx8ujJSCBxRJQMFwsb88mAQmMh4CC4XjyypRKQAISWFcEFAzXVXZ7sWuYgILhGs5cL00Ca4CAguEayEQvQQISWBECCoYrgtVIJbAYAn//+9+73/zmN93Zzna27iQnOcliTupZJLBMAj/4wQ+6Y489tjvrWc/anfzkJ585tILhzKg8UAKrmoCC4arOHhMngXVPYFGC4Sc/+cnul7/8Zbfrrrt2O+6441bn/pa3vKX797//vSkd5zvf+bpLXepSm777QQISmJ1A7qeb3/zm3YlPfOKZA77rXe/qjj766E3Hn/GMZ+yucY1rbPq+tT9sEcHwL3/5y7JeR3+Tm9yke+tb37q1r33h5//iF7/YXeYyl+nPS+d5tRhvI43Y9LnPfa677GUvu1qSNsp0vP/97++ud73rdac85Sm7P//5zytyDZ/+9Ke7gw46qHvTm97Ux/+gBz2oe9rTnrYi51qPkV7hClfoPvvZz3YvetGLurvf/e6jQ/DXv/6122233brf//733bvf/e6OBmC1P/3pT93+++9fd53g861udasTNBr/+c9/ds985jO7T3ziE91vf/vbvq64xz3u0e28884nCJ8dr3/967vb3OY2/f3wrW99qxe389tSWwXDpQj5uwTGQUDBcBz5ZColsF4JzCoYfvWrX+1e97rXdYcffnj361//urvoRS/aXf7yl+/biic96Umn4mOQ/0xnOlNHvxnR8CxnOUt//Ec+8pGOvsMXvvCFvt9wkYtcpBcLbn/7208UHehTko6vf/3rfXvs4he/eN8m22uvvbqTnexkU9NRf9xhhx369GTfLW5xi+6Nb3xjvvb99Q996EP99bJzl1126a55zWt2CCKTjD7+csNMiutLX/pS94Y3vKFj+4c//KG7wAUu0PO+293u1rcrh8LNE2Yonln2UQboK1AefvzjH3fnOte5ekb0HZK/bTzzhGnjyHfa5S996Uv7PsvXvva17rSnPW13wQtesLvTne7Uc8pxdfvf//63z9d3vvOd3Te/+c1uw4YNHeXnWte6Vkfbf5tttqmHz/15nrRNOtmXv/zljv7ENHvEIx7RTbuP4XTggQd2j3rUo7o73OEO06Ka67foPOc///l7ruQF9/os2spOO+3U/eIXv9h03mtf+9rdBz/4wU3ft/aHrSIYXve61+0rxnkuHoHklre8Zd/p/PnPfz5PFCsehsrse9/7XvfqV7+6o7KP0clmRAlbrYIhIsnlLne5JNntHATe9773dbvvvvuKCYZU9Be60IX6MkbyGIXYe++9u8c//vFzpNYgQwQYXeXh9IIXvKBDEBub3fOe9+xe+MIX9sn+xje+0dH4rFbrorq/fj7kkEN6oS/7KHc8wGjYIoYzMk49h/GQHBqR/uEPf9id97zn7Y9h1O2mN71p/3nWfwqGs5LyOAmsbgIKhqs7f0ydBNY7gWlCQ9jst99+E9vaCC60c9LmSZi6Rfi69a1v3cVx5l//+lf3kIc8pHv2s59dD9v0+cIXvnB36KGHdmc+85k37ePDU57ylA5xZMgQKxAfEa5msQiGOB7QrmOA+YY3vGFH2uhvv+Md7xiM5gY3uEH35je/udt22203/T5PmE2BBz48/elP7x784AcP/NL17dDPf/7zvThWD5gnTA2/nM8IOginiEKt0U5GeGXwvto8YWr4+vmnP/1pn1cIhUN23/ve9wRlC9EaHQZP1yG72tWu1r3nPe9Z1mygoXjmSdtQPNn3xCc+sXv0ox+dr4Nbznn2s5998Ld//OMfvXMD/RbuN9hsabvXve7V9xuf85zn9H3HOGNVbeVXv/pVP9uKcyMwn/Oc5+yT8ZKXvKQfLKDvSf9rzQuGj3zkI7s73/nO/cVP+seUNKamzWNjFgwZBdp333277bbb7nijN/Nw2JJhqodhLdRb8hzrKa6VFgwPO+yw7kpXulKP9DOf+czEEaT1xHxLX+uYBUM8CmnIxYYEw+c///ndve99777s3PGOd8yhx9te5zrX6c5znvNs2kcjh0bGuc997u4rX/lK71XOw5uHOI1KGsLVmOJy1atetR/1RHRFfF2uKRgul5jHS2B1ElAwXJ35YqokIIH/J7CUYJjZEhy9xx579B6Fpz71qXtBLwP2TCFE4JtkeOYx6Pre9763n4n0vOc9r7vPfe7TH45nH15hpzjFKXoh7rnPfW6/H+EAAfBEJzpR/x2hh7YVhkiJtxReix/+8Ie7JzzhCf1+0sH3WTzFIhjSR8VbMvbkJz+577PynbTd5S536eN7+ctfvqkPy3VXEWeeMDlfu423FvsRsfbZZ5/e2xKvuAyII45++9vf3sRmnjDteWf9fsQRR/SiLGIh4iCeaxe72MU62tyInNn/ox/9qDv96U/fRztPmGnpQdhlKit2wAEH9G16PB1xWMJbFUM3qd6gCGUpWzgX3Pa2t+2XtuK4eJYiRpOXm2PzpG3a+ehnkEbukUmOTehPQ961CKrcZxFJV0IwJL+5lzA8YU91qlNtcpJgZlb6U1UwpGy0wn7qmTUvGK70FL4xC4bTboSt+ZuC4Zalv9KC4UrHv2VpjDO2sQqGTBNmdLiOdg4JhvFApIHB6PYsxujzM57xjL5RlBFfvLwZzRuafh8xkQYdAuNy1i5MehQMQ8KtBMZNQMFw3Pln6iWw1glMEwyZYYE4SNuKgVMGZuv6ZOmbwujjH//4JkGvMvv+97/f0R5iVhDTkfHGw3OQOBFtEHkiChKuehHiFZWlZa5//ev3HmCsXc500ogUhGFgFi8nbFaHgiHBkKmkOLdgiIWIGEkbM+Twkoy4hMcax84Tpj/BhH/MSHnb297WL+WFo0TlHVGFoHCNV+c8YSacfsnd1ZMRr7BLXOISm8IgUGWpnqc+9andQx/60P63ecJsirT5wNR4podjiIaUixh5cclLXrJjGSBENETeGOLy7373u94Drh3IT9+HsrU5szjnTVvSOLRFcENgm7Vcs/QSs/0Iw/VWWwnB8BWveEUvqjPV+VWvelU93fE+Kxj+T1WdVTCkMDNKgDGawppvrX3sYx/r1+BiTQjcsul0Umk861nP6juoBx98cB+EkYcznOEMHeu6kRGsE4iLJ4WdiptRE27Y2He/+91+dIffWWCSOeaE4SGw/fbb57DjbSn8VAikgYoRRRj1vHpK4rbNAwAVm8JJpZ20saXwMuKDUQFjlQP7WNfgU5/6VH9DHHPMMX1lcLOb3ex4D4Q+4P/+Md+dufG4ZcPpile8Yj/t+Tvf+U7HiwZ4OKXSquHq56UEw3AlDJ5tdU2GpJf1IhjJYtSLtTzqg5c84+GIYMCo3JBRuZIvhIMTxhoMjNZUY/QIF15c4G90oxvVnzrWbUNMwzhPFSh4MQicGHlhhIdRObhc+tKXPsEIXK53qXLEQ4o4KRfkFQ+LG9/4xn3eTZqSTD5RBih7jILg/s+x7ZTR413Y/74QBg9QWKc854GNNxgNGuIOZ8oWI0yMZpIvlEGMMk+FCwsqUhohPHTIt3ZkhvLFsdxfhOde4jv3A2WLePOQ5FjuWZhwHzFSyahWGh39yaf8wwX+b3/7W7+GBm7clBv+EMG4/8lTvNuGbDnXlPCci2uBJ2WKvKAOYOQqD82hKck0lkgX+c59Rhm98pWv3KcxDaqcgy0u8m9/+9t71uQRjUXuI3i2x9M44n5kZIo8XY6RLu4JGg4ve9nLuj333LMPPiQYsk4pZeMDH/jAzOdhHUIaaaSRch7LCDY8U34++tGPblqsl3K2VB2UuNqtgmFLxO8SGCcBBcNx5puplsB6IVD7Le0101aOhxDtbDwFW6OdQ1+GpagQ/1pjhhleW/HKQ+yLR1/r3UfYI488clNfCkGS9jT9HAZosSHRgz4o3myIkI95zGO6xz3ucf2x0/4NCYb0G3KNrRhGXFUQY61C1r6bJwxxpY/IZ/rVWQcywhbrNCJQVmN9btrSWBXL5glT413qM+UAMY0+KO1++ou0u2njt0Z+MUWd9j79emyeMJQDGGMIo+lzIUrhbUe/iOV/WsMjE882+rv02TDW1ad/gdF/jBbR79j4j7Xw40RAH7JON88xs2znSdu0eGu6+Zx7YFoY+vrx7GyPG7p3cgx9MLQE9Aa0A+oFNCWWEaB8TbL0q+gHZ/k58p9+Hx6/9Gvpc5EunDYwPD0px2gA9HOxiOF6GPY4/v8f7s0oshijNVVMogLJtEs8W1izjYVchyyd3owsIFqSMekwx/sFUYdFSOlMDxkCCB3dKogxcvKABzyge/GLXzwUZFPFz4+pdNsDEZ4Qiuq6YVnDEDGBgoghXvCyhdaonHhAsTZiNSoCKoTWKJgci9jxsIc9rB+lao+p36cJhoiy4c40RwpyhDimILLobPVmSrxVVKgu95PWF0B4wlUYZZ4KmUqY80VUTrw5ju/cyPXmpcLmhsZqhfKa17xm4uKmHM811obCUuWIm5+RPx7+rVHWmH6Ja3rKHcewdgLu1HEdb8MRhmmiEV/a3/nOIsKIx0PGgxrhG+GP8yLeUZYYXcJYG4F8pAJkQVsYt0ZlxQOkipevfe1r+8YPFRd/GSWrYbn/YDIk+vMgo+yf7nSnq0EGP2fBVypY3OGzPl49mDxuheLlXhPxIU6Tz+FTz0EZ5OFKg6gVDGmk3PWudx3MRwRA8pcBkNiTnvSkfrpIvtct9QKNgDy8+S11SC07Ncy0zyzmy/3IfcPgRcTIVjCs9zvlhHvoJz/5SV9u6v3Unut2t7tdv64G92SmPFOP5TxpXOCKTxli0IR1PDLdpo1vlu8KhrNQ8hgJrH4CCoarP49MoQTWM4HaD2g55GWG7McxhQHm1vDkeuUrXzko4FQhL/0g+nW0qzHa0G37n0HYOLHQj6ENhtMKnmPYkJDH/kzdnCRkcUy1tDuraImQwdRV2qIIVmnnJRzpjcclTjz3v//9e/FjuWGIjz5JnB8YhKf/SjsVjzmWtqEPiQNJNfoVmZaKcwxi7jxhapzTPuOswQwbhFucGBB5kl+T2rlZ+od4k7/zhKENH+eZKkbTP0PPgMPQ9OF4orYehnnPAv0TlpGrljBV5Ky/z/p53rRNih9HFJxaIo5yb/zsZz/r9RrK7ySjH4L2E+P6uG8mCYbcm2goQ31k4mjfTZF4I6CTPsoj+Vz7WpQf7pe2HCc8+UB+YAqGoVK2jJTgyYMwwM1H5x1h4aijjupHXejM0qlGmUdApCJGRc8cdG4C7IEPfGCvoEfoYSQC4BiVHSMfiC2s7cCIC0aBQHxCjMMLMaNBCHC4DMfw/orXGhU7ogDqMJUb6cPoqCMi3O9+9+vFs4igqPpU7Hgjct6lBEPSivjG9SCeIKIiBGFZILf/svFfFSJgRAeeAopQx80Q2xzBMA8M4mIUA9EpC3hm9IDfYMJNiLgKxwg9EYH/+Mc/bhKMGL1AAK4GR64X43oRa2CAIbRltKk+PPkNYYk8j7EmGxV0fUhWsZK8pswgrCEMR7xDKOZBmZGUpcpRRgk5L3mMtxWeVYwqIJ7FquhD/mdtEVhe5SpX6XlRThFHsUkVUeLDI4zro5JMucg9wIgQD9YIhpTvLFRMBYbgTWWFR2UMIYcHB6OcWQ+E3+rUhwiGCcP1MiIFK+6T6ubN9SKu05CCLw9WjLQx9XUpi2CY43gwUj/gAUhjCaOewBswojW/LfeaEJrJ8wjdMIQblTz3db2mKhhSFhHC8iChHHNv04CLtzP5ighIma2NQfbTKILbxzaWExa0xag3qD9iabjVspPfpm0z1YVwNLgY9EjDpBUMEUvz0MILmjIVSx4+/OEP38Q4vzENmTyv93DOS77QiEJApK6i7DEAxD2WdCSe5WwVDJdDy2MlsHoJKBiu3rwxZRKQQHc8x4GWRxWoahu5Hhehjn1xCsnvcWigXUTfCKNvRH+HdmEcZHI826wbzeeIg4TNgC195SGxJMvBVM8y4phkaXdWwRDnABwf4vAyFBanBISQ9JvnCUO8Q4Lh0PmyjzY47UxEmmnpy/Fs5wmD0ARvOKSfBlM0CcQg+pJY1qPsv5R/Nf8YSEd0XW4YNJFJgmE51fE+kjb6vvTHsDZ9dZ1JXlpDHxZdgz5bBvgJn6ntx4t8M78slbZJ0SOM0l+i70a/q/a16ePiSYugvlR/Ix6ekwTDlGnSgX5CX4kZkvSRo/nUKfBJb7w56SPFkasVDOnjoh8wIy79Y+oDZvChNcXbc90Ihogy5zjHOcJwcEvFGTdRKks66xiVLV5rLMKPeELnlQohHngcA2SO4zxM0awWoYd9hEXsQMhKAYqqTmd4//33r0F791AKRK186rQ6brD6RirEGVzJeXDgmYXoEst5WgFoFsEQHtwMsRRCvnNO1nCoAhziE4JD1nZgFIu1MBDTsHkFw7p2BmmgwswIE1OGyWMEF+KHZRgjqnB+hEtubMQLfkOUQfBtWZHGCJPkKeo+glDWo0BciWsvwi7iZIyKjZGdWASncIcTZYd0Eg7RuI4gpgIiPGlAcMSmlaO69gDefpSxCJqEJT0IxxhlEE9HLFNcW4GICoWKApEJbsS3lE1aw7DeS8SBcE6lnwfU1a9+9V6sQtyhbMf9mWNpjMAIVlWcroIhQiz3Z66XMFwXxrVGqOp3bPyXBW8Ryyj7S1nyj+MiNidMGlt858HN1HtsnmvKWnyEb9ea4cHAVOB4HlbBMPcE19repzQguE6MOoqpCNQzTB1Puc79wzEZvCAuGn25fw466KB+mQIE0Ty4OX6aUS/Q2ERQ5kFD4wtLnK1gWFkmXspEFUopG3huZooxx0UA5eGMUEsjE7d6xGgEfkZf4UUjg/hofLJF4IYXDR+m7GTEPOeetlUwnEbH3yQwHgIKhuPJK1MqgfVIoPYP2uuv/a62P8ixzD7Byy0D0XWJFn6P80mdocH+Scb56I8idNGXoh2Hw0bEBMLFa62NA9GCAd6hfnJ7LN+HBMOsc83MIQSnIYs3GjP3mNk3TxjipU0aRxP6SEMzkmjnIqjQ749TDO1eBK/2DdJJ6zxhCEve4aRA+z0OAnCg74AzBm3r6ujC7EC831qrIjN9W/oA9HOwWcOwRjhlK2sQImDRtxoyygb9Wtrbac/zndmc1RCzKcO8LGfI4jE69Nu8+2ZN26T4qxaSY+g/5X5jHzOsJs0GTZhpgiHaRvrL0RESDsGXvg/n4xycK0Z5yVqk8T7lt1YwjEds1REoX1XzIVzucZzUEJ1Xi22zseAcu7mJAeDQKMekeOmU12m/1QsMsQKxCaMSaacfzioYIsDgWRjLdGQqEFw/42GT3/fbb79+ejEVMx44WNYfqMJXjmeLizpvCKUSQMRJR3hewbCKVjkPIgDz4rFwi8DGPgpxW7lSMSGUYfMIhqwlF284xECEj2p4GvKAQAyg8o5nXo6hosrURoSUuDZHUKEyY5Qmlnn/sMw036wFQoWWtOSV6oiPeHHWvIqnE3GGSS1XcXPPObPNA68+WKtg2JajPIgJn/xIXNmmQqIyi2CYN6ORZoSXPDQIg7CCAEOFs9tuuyWaidtZBMMwTyRxl+Y7Dx4YtobgE+9PpqgiClfBEOGnppvwEaUY3Yn3buJNOR0SiXNM3UYwRIilEVCN6a6MwlDX4AVIw2GeayKfI64zdYJRptYijLG/CoZpVGV6dxsuQmQq+TqKV0VOwtEA4D4jLawNuGHDhja6mb+zHg71VwZcEjB50wqGESs5jjzCk5Np0dw31I1cH1Zd5PnOo4KRbdKN5WENUxo/PGxzX3PfcB8woJGpJn2gjf8ov7OUc45XMAw1txIYNwEFw3Hnn6mXwFonME0w5Nppt8SzqQp/tJ1oR+U3jsWLKDNhaE8jCtBnQmjKTC2OGzLe+kvfJGJVHdiOmEC4Sd13Bp7xLuN8CBhLWdq21cMw4h/retPXHTI805hNQn+CfsU8YYbiHdpHH6BdDx1Blb5gdX6oYZcbhr4rjkPMookQxbUhVmVKcOKn74uYh00S/6pTBX1UnC2WGybOMznvtG36AvUYroXp4un38BszNXGQqeW1hsF5hb7BUvdDDbPU51nTNimeOIfQ76BPgXCL9oBzB3pEPEC5pmnrv6d/PuRhWGcx4nDDzLHcw6SLviH3LzNH09dhP2tswqztgykYQqexKhiyFtu0zCIolW71XKHSQySIUMgx8Vjhc7VZBMNZBArcbxEGEZKY4kylg1URKsIfHexJSnwfqPmXcK1CvZSHIaJR1qZIlLxUJOJqlOi99967V7injfxEhFuuYEgFjEiJcWMiirGtRuXDzYZQQEU6ZNmf6YvkMSIto0hViOCaspBwRCrii9jCTYnAiEV0w/OJyoNyF9GO0S3KXsQajs+r4xn5oUIfMjzmsphuHvARDIfKUdhTWR166KFDUfZ5w3Fwi2DIiAT7YgjhpBX3Z8TR5QhGswiGeFMivsdy3/AdbnWdvRwDS/IUi1AawZD9rUcvxyEM85AdWpSYe4qRrSGOhG0tguGk0a32vprnmri/4wHN/RgRu6aFsoqABqcIhjS6MoqJ5x8jTa1RxkgTxoOCuiULWrMPEZdRZvKckab6IOL3eYzpIMRHWeNeiuc2cU0SDGkUUvfBgfu9tTQC2d+OYHNdlGUezgiEnJu1MeFFXDy8U+c89rGP7acJcA9Tf3JOpspgjJ7P0hhRMOxx+U8CoyegYDj6LPQCJLCmCSzVJqF9U9f4pm2LKMdMKIz2ZTzlqpgXsYSBXdpFk4z2FWvT17XC2wHWtMmJo56jxkm/i6WAJrXb67F8HhIMEWEYPJ4mGMZrMjOn5gnTpmXad5YNYhYbgiocMxNomkg0SxjapoistPcx2tO08xHV6otNa9qqY07rEJDjsu4e32mf41wUZ55Zw7TeZ4l7aEubnP4YAjZ6Cg4uWHUgYRYifS2Ow2iTo8mQNpYQou+NUa7r8k/9zs34N0vapkXPrDj6Ycwui+ia45mlxb3HNdUZcvm9bqcJhhyXlzvymXKACHi1jbPv0BEmibd5twKCYl4URHgFQyg0VgXDWd+S3ETRvxadDI9R6eYV8tnHNiLBUEUYoWfS1M4IAKjGqdRr3HwmDVQeeCRGkWe9Pl6EMKu1wkbCLSUYoly3ldOQYIggi6BD5QzvIUunP533oWOyrxbq7Mu2nfbL/pw/x0zb1vNnui4VES65iGR5sLUPpfr2MBiw/h4iMzcwYgNCFC7jEapyvYwORZjLw2xSeSDdiI8RTeKJNa0cRbSc9tr0LI5MWiMYIrzwMBqaZspxjAYyhWAWT91ZBEMeqNWLNpxrmtp8o/HBQwMLxzROEMiG3sIVwZD7Eo/cavMKhoygZlptja+9r+a5Jhp7EQmH1qHI+SK4RzDk4c6DZlbLC3l4yLHEAvd2a5QhGpARzNvfl/rOGrCIzTRE6mh3wk0SDPP7pC2NXwR5bMirdChc1g9lwAPhlBH0iK6MsuatbrlPqYPzUqqh+LJPwTAk3Epg3AQUDMedf6ZeAmudwFKCIddP25qZE3GsCBOcJGjn0K6r7WX6V/TrEDNoQw8NNhMH/Q/aRIkXkYc2dCtSMPsma52xbnzrdUdcmUVS08H+STYkGMbhoopNbfh4fbEMDTO65gnTxjnrd/rzrF9OXz4ejkuFHQpT3zpN/wh29C+X6ovVvjnOIziRtFaX0cIhgzZ5HIBmDZPj27hn+U47Oy8tTX+npqld+ok48RilnY5NE2L7Azbj31DaNiO6foYo/amlyvxSgiHlAbGdPnBrxI3DE/dp+lc4hvBSXjSpTDtPuKqt4NzhlOSNZLaEYFgXPQU2nXpe7BDRLhkwi2BYRaqEY1QCoSsvTEExprJDlEQkPPzww/sbK4IhAk+8vobWAEi8Q9tW2MgxmysY5mGTteGmCWG5KYZYJD3Z1kLNPioURJuMtrQVG4o7+cDNwyKj04zRgEw/ZNSDaaVYXOwz9TjTTGtcCCmIIYiCjIhwTRk9yPqDeBUyMpR19aroGuFvaIprzlMrT0ZTGP1JuCF2+W0a+1S4Q+IcD3jKIGWbc1OJxCaV+fye7SyCIaM5iKyxTA8eSlOOqe778Y5djYJhBPx5rolpBVkzldFJKvohQ1jk9wiGdfoza1fE23AoLPvwkE7jk8EHXoyCyE/ek38xxHNEa8r3cq0uV0A8rWUEkTxHbOc+iTdte2z9zj0XETP3af29/VzXRMxAT9b74dztGo2I49XLuI2vflcwrDT8LIHxElAwHG/emXIJrAcCabPNcq14OyHCMMjOWt70QbLOdRXZWMKF6cWtU0Q9B55dWZeONhOzbBAl0wetx9b1slvHgByX9wDQz2XQeikbEgwzy2tanyEOA3hQ4qk2T5hJacNxhzYu52+XJksYONHW5hj6PHgTLjcM/Z4sJ0a8iEUMgGcmUs7VblneLMtxTVqmKP1UwnIeRKblhsnx7fn5Tj+ceOkb03dtrTo+xQkka6tPm/mVfjnLLOGYMY/Nk7Z5zpMw9Fkp9xhelJOm/UcbGZqSnLjYolfgMUgfiHUE6RfFKOuUeSw8uffRDKpVbUXB8H9kNlcwjCcS0XHz45KNDb3yexbBsK5710e08V+8vvjOCA0iUjVeHIIQEMGQ3yIaTHrLKxVa1m3jpkqnf6UFw7yJCsGOCjJKd64Hrza8e7Ah0SvHZVsLNQ8uHm7VxRflHPGEShnDFZw166ZNiWbKNxUq6yvWGzcPMoQ+pjYj2BAvD986TZ3z5M3CjB7BlJfOIBJRmXPdiL2kDUERsa2deowowRqMkzhxjvraezjwgI4oOFSOSAMVA+WEkYWWPXFm+gHXFQ9DHmZ48CHixYuPYxGBKfNwx4ampfc/lH+zCIaMktSHYC3/k7zGqiiWNTlWs2A4zzXxkMy9MSRSg7nWZxEM69oWYVOypP/Ig5t8puFIw5N4uAfIhwjaHMgxrBfJmpzYpCnY/Y9T/iFCtksYTDm890bmXqJ8MyBC/RrPvxquiugRAOvv9TPT1KknuVZeapQHNssLMJWi9QRnnRtG5zIiXeMa+qxgOETFfRIYHwEFw/HlmSmWwHoisJRgiNMHxsDz0LEs0YIwQJueNh6WWRW8iJK2V2s4ZGS9fURFRKZpXmW03yNmDfVliT9910wVbs/Zfh8SDOlX4RiB4U3XDpKzDncGquNgME+YNi35XkXU9M3yW7ZpT/Kd9jcecRFeZw2DIEd/laV2aItnoJ01KeM1mvO1W/r8OKlMWg+d8OgZtR08T5j2vPkeYa++mTe/ZRtRN+8jwOMQ774qaufYbDOTjrY6x85j86Rt0nlYloy+N31nyvw0r1rKJHrCJJsmGFJm6LPTr2/vb2aZ0b+JcEjfDkE2a+tnebR6XuKL/qFg+D8ytYO93CnJEX6IKtNfs3Aq+9rO+byCIW87QglGxKkeL5wDi0hUBUM613SCCUOFWQUYwmS9Bn7HoybekCstGPLgyfTPIZdhKqisIbhcwbAW6ghTXCt5griGZcSMz0OCAkJqpsO2r3OPlyXMqGQZkaoPV+KMZbowxyIG4qFVX5iSipCH2iGHHNKv15DrJo7q+US5CbPEj7cf+U2Fj+CYxkDKwpBgGO9B4mivjX0ISwglPHRIN5VPFXDzEhiOjVVerDsRd/D83m6TL4k/v8Mm3nOtYFjfypT7LOGyxVsUjtiRRx7ZC2urWTCc95rSmGIqOmuMtKJvPBfhEMGQz3nTNfUCDZXWaJzR2CNe3o6Wac2U8fqGdcIh2O244459OZlU/tv4h77DYJJleQO8WamTEKtZNzH1E2/zxuuwtTQo2M9DMfVaexwPQ9bhZApz63GLKMrbxiijtb494IAD+kGMoZfktPHzXcFwiIr7JDA+AgqG48szUyyB9USgFQnaa0/bqW3vcBxTivNiDNp/tAPTRqUdhMdS6y1G+4p1rulHMXuKNdUjMLTnrt8z+wrRh35LdUKoAiQC3iyzSoYEw7qm+VBfKI4jpCtOCPOEqddVP2eqJ/uGltxhf2bbxVlknjDEE2MmIrPrENfIE4ypxjgNIea2fYUsxUP+4m1KnzRW35qNg0xm7M0TJnG22/TZcBygzLUeqelDEy5aQXWSoZ+aWX+Jm+WU6JtgcdDJb8vZzpO2SfEjAEawHhLeEYtZK50848WMlJdJNk0wrE4o9OVbJ6bqMQo7nCtYHiAzH9tzKhi2RDZ+r4IhXisZlRg4tN/FzYXnGZUlFR4uxHiBIXjwMgYEFjr1CDko86xnl/UEqmDDFFJGYlIJTxN6akZTwSIQYQgreIQdeOCB/feqTtd19Kg0qMx5qQAdYEY/sq5hFdOIJIIElQyVKt5F3MgRyzgmi9XWhVPrdFqOweo6CZmSjKrNzRGvPzylcD1HAKOyw3sntjmCIXGwlgMeoFgW9ETs4GHI2mQIbpwznkrkCYIDIgX5TKVZvau47rz8pI9047+6xln2seVYKomM+BAfQkQqxZo2js8aDXzGSCcPkrios+ZhRp+ogAif6aF1Adpp5Yg4uVa88UgPAh9lgwcJcfJwRkDB+J2yjGW0BW68kIRyjREfZS9vosZjK7/1Bwz8m0cwJBrKAoINxn2KpypiEJUt5RTPSSwjUXxezYIh6ZvnmqqQTF2Fqz55xX2FsMyit7EqGFahHO9XPFgp2zQyqJfSMEOwZ9pt3ppM3DyImJ4fq158tVHHCDXlnQc24vLmWBo3WZszcVVmuOVTf1EOeLghILKWJlbLQcLWLWWIhiTllRep1CnwHBdBP16z3M+77rprPy17FmGcOBQMoaBJYPwEFAzHn4degQTWMoGlBMN4jMGAfg/9CUQFHF9Y4oo+XHU6yRRlBowZOG6NdhD9DYy+YISa9ji+VyGRthntT4zBV5wASDuiEV5QCCe0O4lzqWsijiHBkP11vXqEmj322IPdfb8JkQSj71mnPc8ThjYnLDDi4qUWtEcZpKevRb+c39OGRnOgzxIvThyC6M/ME6Y/afOPfhnTUOmb0ZfFmJ1EP40ludKnrWubw4PjI+aSHphhtPd5MQY2Txi82hicxxCo0lerfRn6/Zw/L1PkPPRJKJOUBURd2uhMY+daMHQQ+vfoLxj9d3QNwmIIsIjk2FAe9T9M+DdP2qadJw4bXAt96HjloglQ/uPsMuSUU5M4TTCsy6Yh8sIzLwhFQMTrlHIIL/SYlHX65Fl6rZ5rkmBYz0N8hEXHivBP3UI/tL7Etca7tT5vs7ETd+zmnrwKhrPEFQ+cTDklTL2h+M5NmrfNUIBZtwyrXlT9jo3/EGjoiE4TesjcungsghuZieiGUQDiakqFj6cdFS2LaGauOsfxW0YeEo4Oce0sJx38jqVC3VKCIXEigjF1OGIa+2LcUNxccBkaGcpx2U4q1PyO5yTThjlPFW95MHHDkvcYFTo3VhiyDxElAh3fY1l3gu/kA3k6yaik8qYnKkrE0RgCYETbSesxMG2TmzGcSCeib/KduFpRNfk3iR1lg4oi1w5vHvS1XBAv+yMYVvd6fuO6mRrL2pmJZ2jUkmNbm1cwpGJllIyyEyMdPJBjlFWEsTx0VrtgOM810RhgfRjKT4zyg1ibvMj+KhiyL1Pq8zvhalliAARPWBqR1DmIy4mTOgavP0Zhc5/UuoY403CrZSfnWu52kmDIgw9hMvnOuRDxaRwkrTQkuI40jNpzH3bYYf1gD/tbL/Acm0Yt18g9hjcn0z6oR7j+2gBOmHarYNgS8bsExklAwXCc+WaqJbBeCCwlrtGPoG+FCBOrfUfaUggWtKsZgGZdOY6lbzD0Es+sf5a4pm0RcSKYMUhNOmrbk75N+jnEk2nC0+LMb2l3MvCLx2MMJwzabzGuD0s7kc9VVOL7PGHqOwxqfO2yO7QduU6cTGL0aXh5R2bCzBMmcQ1tiY+BdGYbYpyf/MzSRrxkk3ZtjDTW8jE0LXy5Yar3KqIVeRur7NiHtyHloKYBEZa+XazOomIfeQy/Wp7wRMSZIFbPU/Movw9taxh+nyVtNUw9T52JR1xDZWGSMM/xsWmCIcdkybccj16FkxL9/hj5jaMQmhLlgT5dyl+OYTtJW0F2o/zU+widibURsTUtGNIBbafr9lc94R+FBuC84QibNCUvb1zimCo+IeLhxRPYcbVNQZs03Q0vH9yB01knXipAKm28Aek05zduuLjqIvYgXLWCEBUBb1RqR4W4uVkPMQUsAuk8gmFdJyJu36Qbw91933337T3/UjkwAhSPSQodIxF01qdZLdTxBqrH15GCuggq52RUAz7VyF/EAkS1IavX1FZK7fE8fOMN2h6bddIIU2+2Ng7ykvUSEcJSZjgGsQc+uDBXW6occSznZgSCslcNIQqRlDUyKVsRDDmGhxrprIId+6lwKJeMGA5VOhxTbZJgWD1ih9ypiYPFYFk7j6UDauOCNMAoXoc536yCIWWkXZg4gu4kMTfnyDYjc5Rb8qC1eO5WjzyOWe41EYYKG+861h7NvcN+Gn+IhNQx3L9Dixmz3il1TA1HWOoQHli10UldwEhoRho5LsbUZspfncaQhltbdhJmOdvE1XoYEgdCK6Nn3KfVeAjjHclfPHnr73wmLA1irh8vw4x0t8dRr1BfwDlGWaAur4M3+W1oq2A4RMV9EhgfAQXD8eWZKZbAeiJQ226Trps+Iu3DOuDMsQgyvCmYNg7GjCw8hBAWmCY8ZDgztH2IoePYhxMNXowxlg2ivdm+zZU2LIIQa8zPamkrtoIh4RFq6OfF0y5xcr2cOx5o2T9PmPS5CFtFIr7jvUn/m3ZjNfosOB3RD2/XtJsnTI176DPtXfKXP5wBog/QzqXPQF+89i9pw9NPQMdo+3XLDTNNMCQuZlAiNtU+HddAn4w+J+311uhH4hRTxVeOof+Ox2Y8+BJuWh7lmHY7T9qmnQcxGs7MBKvGPUdfbpYyv5RgiEMJTk0waHnCEb0I7784k9GfJk1DxvXHMaLVVvCSpM8YzYl+FP0pbE0LhkOgVus+Rn3wJELIwdOsLi5L5lIg6cCz/lZrTF9GtKMAsOhspkK3x22N73TiqaComBBDMu130osdtmQaYYorM0xxJYfrNMPTiKnCGIvNDrGeFn7e3+CC0MeDlodcvOjmjY9wlBkeHogrrF04SWSp50AwZao53Lh2RJpZwtU4tsRnKkOmLNDAYImAtWDzXBP5wXRyvP9maTCGE+WIhgnexZT7afUB9wYC/9FHH92PLHGudn2MxLvILWWQOo10zXr9jEKyfieNNETztjHUpp+lErjvGFGr9W173NB3BcMhKu6TwPgIKBiOL89MsQTWE4HltP8YlKf9zDRC2tARBsIL8Y22EusaRkTMb1tySx+EdNAeJR3LuYakY5pgmGNox9HPo69Ce3eW9us8YXK+dssSZrQjcRBALIxg1x5Xv88TpoYf+ky+T+o70sZHWKRPhyNRZvkMxZN984RJ2HbLVFfWIGRmEHk0aYZQDYfICVeEMsoPOsIky9Ju095CPCnsPGmbFBcetpRF7r1Zy+KkuCbtp28ES9KNoQtlHUW+MwjAb8yEXUrz4PjlmILhcmh57FQCmQ7MQXg91kLMvrrWGtN9h0YXOG5rWdYebF2rt1Z6PK8EJLA6CSgYrs58MVUSWC4BBcPlEvN4CUhgkQTmEdsWmb6VOtcsguFKndt4x0EAxxi8SpkKz4wybeUIKBiuHNt1FzMjSnimMTLAdGcWfmWLtx4LtSLIYbgjr5YbG5GTUQG2uOFivMGJykeTgAQkMERAwXCIivskMD4CCobjyzNTLIH1RGC9C4asjYeXGf1LZqlpEggB1m9HT8gandnvdssRYL1MvHJx+uIlSWvypSdbDpcxzUoAYZDFXicZLvCsLUjlvxqMdRSY8x/bc889u4MPPjhf3UpAAhI4AQEFwxMgcYcERklAwXCU2WaiJbBuCKx3wTAZ3b5gMvvdrl8CvCQRPSFvDV6/JFbuynfaaafjrY2vYLhyrNddzKxbwdujeRsy6yawRhijQrwBda+99jrBmhpbExCLlPKCB9a92H333bt99tnnBAvVbs30eW4JSGD1EVAwXH15YookMA8BBcN5qBlGAhJYFIH1KhjyskM8m2K77LJL/zKRfHcrAQmsPAFekMlM0djOO+/cazn5vrW322x8EcSxWzsRnl8CEpCABCTQElAwbIn4XQLjJKBgOM58M9USWC8E1qtguF7y1+uUgATmJ6BgOD87Q0pAAhKQwAoRYK3WG77oqBWK3WglIIFFElAwXCRtzyUBCSyXgILhcol5vAQksF4IKBiul5z2OiUgAQmMiMAxxxzT3feNR3U/+aNO8CPKNpMqgUECCoaDWNwpAQmsAgIbNmzoeFuwJgEJSEACJySgYHhCJu6RgAQkIIFVQOBdX/pd94z3/noVpMQkSEACm0Pgrfc5++YEN6wEJCCBFSOw/fbbd9tuu+2KxW/EEpCABMZMQMFwzLln2iUgAQmscQJv/9yvurd+4YjuZ0f8qzv2v8es8av18iSwNgm8/QHnW5sX5lVJQAKjJYBn4XbbbadYONocNOESkMAiCCgYLoKy55CABCQgAQlIQAISkIAEJCABCUhAAhKQwEgIKBiOJKNMpgQkIAEJSEACEpCABCQgAQlIQAISkIAEFkFAwXARlD2HBCQgAQlIQAISkIAEJCABCUhAAhKQgARGQkDBcCQZZTIlIAEJSEACEpCABCQgAQlIQAISkIAEJLAIAgqGi6DsOSQgAQlIQAISkIAEJCABCUhAAhKQgAQkMBICCoYjySiTKQEJSEACEpCABCQgAQlIQAISkIAEJCCBRRBQMFwEZc8hAQlIQAISkIAEJCABCUhAAhKQgAQkIIGREFAwHElGmUwJSEACEpCABCQgAQlIQAISkIAEJCABCSyCgILhIih7DglIQAISkIAEJCABCUhAAhKQgAQkIAEJjISAguFIMspkSkACEpCABCQgAQlIQAISkIAEJCABCUhgEQQUDBdB2XNIQAISkIAEJCABCUhAAhKQgAQkIAEJSGAkBBQMR5JRJlMCEpCABCQgAQlIQAISkIAEJCABCUhAAosgoGC4CMqeQwISkIAEJCABCUhAAhKQgAQkIAEJSEACIyGgYDiSjDKZEpCABCQgAQlIQAISkIAEJCABCUhAAhJYBAEFw0VQ9hwSkIAEJCABCUhAAhKQgAQkIAEJSEACEhgJAQXDkWSUyZSABCQgAQlIQAISkIAEJCABCUhAAhKQwCIIKBgugrLnkIAEJCABCUhAAhKQgAQkIAEJSEACEpDASAgoGI4ko0ymBCQgAQlIQAISkIAEJCABCUhAAhKQgAQWQUDBcBGUPYcEJCABCUhAAhKQgAQkIAEJSEACEpCABEZCQMFwJBllMiUgAQlIQAISkIAEJCABCUhAAhKQgAQksAgCCoaLoOw5JCABCUhAAhKQgAQkIAEJSEACEpCABCQwEgIKhiPJKJMpAQlIQAISkIAEJCABCUhAAhKQgAQkIIFFEFAwXARlzyEBCUhAAhKQgAQkIAEJSEACEpCABCQggZEQUDAcSUaZTAlIQAISkIAEJCABCUhAAhKQgAQkIAEJLIKAguEiKHsOCUhAAhKQgAQkIAEJSEACEpCABCQgAQmMhICC4UgyymRKQAISkIAEJCABCUhAAhKQgAQkIAEJSGARBBQMF0HZc0hAAhKQgAQkIAEJSEACEpCABCQgAQlIYCQEFAxHklEmUwISkIAEJCABCUhAAhKQgAQkIAEJSEACiyCgYLgIyp5DAhKQgAQkIAEJSEACEpCABCQgAQlIQAIjIaBgOJKMMpkSkIAEJCABCUhAAhKQgAQkIAEJSEACElgEAQXDRVD2HBKQgAQkIAEJSEACEpCABCQgAQlIQAISGAkBBcORZJTJlIAEJCABCUhAAhKQgAQkIAEJSEACEpDAIggoGC6CsueQgAQkIAEJSEACEpCABCQgAQlIQAISkMBICCgYjiSjTKYEJCABCUhAAhKQgAQkIAEJSEACEpCABBZBQMFwEZQ9hwQkIAEJSEACEpCABCQgAQlIQAISkIAERkJAwXAkGWUyJSABCUhAAhKQgAQkIAEJSEACEpCABCSwCAIKhoug7DkkIAEJSEACEpCABCQgAQlIQAISkIAEJDASAgqGI8kokykBCUhAAhKQgAQkIAEJSEACEpCABCQggUUQUDBcBGXPIQEJSEACEpCABCQgAQlIQAISkIAEJCCBkRD4PyL3CjJY3KKcAAAAAElFTkSuQmCC"
+                }
+            },
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "# Show tqdm progress bars for all primrary index creation operations\n",
+                "\n",
+                "When creating an index, you can optionally set the `show_progress` flag from the `from_documents` index creation call to see tqdm progress bars for the slowest parts of the indexing process (e.g parsing nodes from a document, creating embeddings...etc.)\n",
+                "\n",
+                "`KeywordTableIndex.from_documents(documents=documents, show_progress=True)`\n",
+                "\n",
+                "![CleanShot%202023-06-25%20at%2011.59.55@2x.png](attachment:CleanShot%202023-06-25%20at%2011.59.55@2x.png)\n",
+                "\n",
+                "Install and upgrade `ipywidgets` if the tqdm progress bars don't look like the image above.\n",
+                "\n",
+                "`pip install ipywidgets --upgrade`\n",
+                "\n",
+                "`jupyter nbextension enable --py widgetsnbextension`\n",
+                "\n",
+                "run `jupyter notebook` from the root directory to have access to the `paul_graham` data in the `/examples` folder."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 13,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import (\n",
+                "    VectorStoreIndex,\n",
+                "    SimpleDirectoryReader,\n",
+                "    get_response_synthesizer,\n",
+                "    DocumentSummaryIndex,\n",
+                "    LLMPredictor,\n",
+                "    ServiceContext,\n",
+                "    KeywordTableIndex,\n",
+                "    KnowledgeGraphIndex,\n",
+                "    SummaryIndex,\n",
+                "    TreeIndex,\n",
+                ")\n",
+                "import os\n",
+                "import openai\n",
+                "from llama_index.llms import OpenAI, MockLLM\n",
+                "from llama_index.storage.storage_context import StorageContext\n",
+                "from llama_index.graph_stores import SimpleGraphStore"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# Set environment variable\n",
+                "os.environ[\"OPENAI_API_KEY\"] = \"OPENAI_API_KEY_HERE\"\n",
+                "openai.api_key = os.getenv(\"OPENAI_API_KEY\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 5,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# Load documents\n",
+                "documents = SimpleDirectoryReader(\"../../../examples/data/paul_graham\").load_data()"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### VectorStoreIndex"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 7,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 8,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "VectorStoreIndex with show_progress=True\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.78it/s]\n",
+                        "Generating embeddings: 100%|██████████| 20/20 [00:01<00:00, 12.04it/s]\n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "VectorStoreIndex with show_progress=False\n",
+                        "\n",
+                        "\n",
+                        "VectorStoreIndex with show_progress=True, use_async=True\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.82it/s]\n",
+                        "Generating embeddings: 100%|██████████| 2/2 [00:01<00:00,  1.39it/s]\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "<llama_index.indices.vector_store.base.VectorStoreIndex at 0x105a5b370>"
+                        ]
+                    },
+                    "execution_count": 8,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "print(\"\\nVectorStoreIndex with show_progress=True\\n\")\n",
+                "VectorStoreIndex.from_documents(documents, show_progress=True)\n",
+                "\n",
+                "print(\"\\nVectorStoreIndex with show_progress=False\\n\")\n",
+                "VectorStoreIndex.from_documents(documents, show_progress=False)\n",
+                "\n",
+                "print(\"\\nVectorStoreIndex with show_progress=True, use_async=True\\n\")\n",
+                "VectorStoreIndex.from_documents(documents, show_progress=True, use_async=True)\n",
+                "\n",
+                "# print(\"\\nVectorStoreIndex with show_progress=True, use_async=False\\n\")\n",
+                "# VectorStoreIndex.from_documents(documents, show_progress=False, use_async=False)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### DocumentSummaryIndex"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 9,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "DocumentSummaryIndex with show_progress=True\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.09it/s]\n",
+                        "Summarizing documents:   0%|          | 0/1 [00:00<?, ?it/s]"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "current doc id: 0e06a6b5-e808-4508-8051-63458a29c196\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Summarizing documents: 100%|██████████| 1/1 [00:15<00:00, 15.24s/it]\n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "DocumentSummaryIndex with show_progress=False\n",
+                        "\n",
+                        "current doc id: 0e06a6b5-e808-4508-8051-63458a29c196\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "<llama_index.indices.document_summary.base.DocumentSummaryIndex at 0x13acbb4c0>"
+                        ]
+                    },
+                    "execution_count": 9,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "llm_chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
+                "\n",
+                "service_context = ServiceContext.from_defaults(llm=llm_chatgpt, chunk_size=1024)\n",
+                "\n",
+                "print(\"\\nDocumentSummaryIndex with show_progress=True\\n\")\n",
+                "response_synthesizer = get_response_synthesizer(\n",
+                "    response_mode=\"tree_summarize\", use_async=True, service_context=service_context\n",
+                ")\n",
+                "DocumentSummaryIndex.from_documents(\n",
+                "    documents,\n",
+                "    service_context=service_context,\n",
+                "    response_synthesizer=response_synthesizer,\n",
+                "    show_progress=True,\n",
+                ")\n",
+                "\n",
+                "print(\"\\nDocumentSummaryIndex with show_progress=False\\n\")\n",
+                "DocumentSummaryIndex.from_documents(\n",
+                "    documents,\n",
+                "    service_context=service_context,\n",
+                "    response_synthesizer=response_synthesizer,\n",
+                "    show_progress=False,\n",
+                ")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### KeywordTableIndex"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 10,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "KeywordTableIndex with show_progress=True, use_async=True\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.25it/s]\n",
+                        "Extracting keywords from nodes: 100%|██████████| 20/20 [00:54<00:00,  2.71s/it]\n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "KeywordTableIndex with show_progress=True, use_async=False\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  3.29it/s]\n",
+                        "Extracting keywords from nodes: 100%|██████████| 20/20 [00:46<00:00,  2.31s/it]\n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "KeywordTableIndex with show_progress=False, use_async=True\n",
+                        "\n",
+                        "\n",
+                        "KeywordTableIndex with show_progress=False, use_async=False\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "<llama_index.indices.keyword_table.base.KeywordTableIndex at 0x12fbbb250>"
+                        ]
+                    },
+                    "execution_count": 10,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "print(\"\\nKeywordTableIndex with show_progress=True, use_async=True\\n\")\n",
+                "KeywordTableIndex.from_documents(\n",
+                "    documents=documents, show_progress=True, use_async=True\n",
+                ")\n",
+                "\n",
+                "print(\"\\nKeywordTableIndex with show_progress=True, use_async=False\\n\")\n",
+                "KeywordTableIndex.from_documents(\n",
+                "    documents=documents, show_progress=True, use_async=False\n",
+                ")\n",
+                "\n",
+                "print(\"\\nKeywordTableIndex with show_progress=False, use_async=True\\n\")\n",
+                "KeywordTableIndex.from_documents(documents=documents, use_async=True)\n",
+                "\n",
+                "print(\"\\nKeywordTableIndex with show_progress=False, use_async=False\\n\")\n",
+                "KeywordTableIndex.from_documents(documents=documents)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### KnowledgeGraphIndex"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "KnowledgeGraphIndex with show_progress=True, use_async=False\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  1.86it/s]\n",
+                        "Processing nodes: 100%|██████████| 40/40 [00:30<00:00,  1.30it/s]\n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "KnowledgeGraphIndex with show_progress=True, use_async=True\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.09it/s]\n",
+                        "Processing nodes: 100%|██████████| 40/40 [00:27<00:00,  1.47it/s]\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "<llama_index.indices.knowledge_graph.base.KnowledgeGraphIndex at 0x2c907d460>"
+                        ]
+                    },
+                    "execution_count": 11,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "print(\"\\nKnowledgeGraphIndex with show_progress=True, use_async=False\\n\")\n",
+                "llm = OpenAI(temperature=0, model=\"text-davinci-002\")\n",
+                "service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)\n",
+                "graph_store = SimpleGraphStore()\n",
+                "storage_context = StorageContext.from_defaults(graph_store=graph_store)\n",
+                "KnowledgeGraphIndex.from_documents(\n",
+                "    documents,\n",
+                "    max_triplets_per_chunk=2,\n",
+                "    storage_context=storage_context,\n",
+                "    service_context=service_context,\n",
+                "    show_progress=True,\n",
+                "    use_async=False,\n",
+                ")\n",
+                "\n",
+                "print(\"\\nKnowledgeGraphIndex with show_progress=True, use_async=True\\n\")\n",
+                "llm = OpenAI(temperature=0, model=\"text-davinci-002\")\n",
+                "service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)\n",
+                "graph_store = SimpleGraphStore()\n",
+                "storage_context = StorageContext.from_defaults(graph_store=graph_store)\n",
+                "KnowledgeGraphIndex.from_documents(\n",
+                "    documents,\n",
+                "    max_triplets_per_chunk=2,\n",
+                "    storage_context=storage_context,\n",
+                "    service_context=service_context,\n",
+                "    show_progress=True,\n",
+                "    use_async=True,\n",
+                ")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### SummaryIndex"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 12,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "ListIndex with show_progress=True\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  1.86it/s]\n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "ListIndex with show_progress=False\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "<llama_index.indices.list.base.ListIndex at 0x12fbba3d0>"
+                        ]
+                    },
+                    "execution_count": 12,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "print(\"\\nSummaryIndex with show_progress=True\\n\")\n",
+                "SummaryIndex.from_documents(documents=documents, show_progress=True)\n",
+                "\n",
+                "print(\"\\nSummaryIndex with show_progress=False\\n\")\n",
+                "SummaryIndex.from_documents(documents=documents)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### TreeIndex"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 14,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "TreeIndex with show_progress=True,  use_async=True\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  1.80it/s]\n",
+                        "Generating summaries: 100%|██████████| 2/2 [00:00<00:00, 624.62it/s]\n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "TreeIndex with show_progress=True, use_async=False\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.59it/s]\n",
+                        "Generating summaries: 100%|██████████| 2/2 [00:00<00:00, 651.29it/s]\n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "TreeIndex with show_progress=False, use_async=True\n",
+                        "\n",
+                        "\n",
+                        "TreeIndex with show_progress=False, use_async=False\n",
+                        "\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "<llama_index.indices.tree.base.TreeIndex at 0x13a2f3070>"
+                        ]
+                    },
+                    "execution_count": 14,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "print(\"\\nTreeIndex with show_progress=True,  use_async=True\\n\")\n",
+                "llm = MockLLM(max_tokens=256)\n",
+                "service_context = ServiceContext.from_defaults(llm=llm)\n",
+                "TreeIndex.from_documents(\n",
+                "    documents, service_context=service_context, show_progress=True, use_async=True\n",
+                ")\n",
+                "\n",
+                "print(\"\\nTreeIndex with show_progress=True, use_async=False\\n\")\n",
+                "TreeIndex.from_documents(\n",
+                "    documents, service_context=service_context, show_progress=True, use_async=False\n",
+                ")\n",
+                "\n",
+                "print(\"\\nTreeIndex with show_progress=False, use_async=True\\n\")\n",
+                "TreeIndex.from_documents(documents, service_context=service_context, use_async=True)\n",
+                "\n",
+                "print(\"\\nTreeIndex with show_progress=False, use_async=False\\n\")\n",
+                "TreeIndex.from_documents(documents, service_context=service_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "llama-index",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.9.6"
+        }
     },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.78it/s]\n",
-      "Generating embeddings: 100%|██████████| 20/20 [00:01<00:00, 12.04it/s]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "VectorStoreIndex with show_progress=False\n",
-      "\n",
-      "\n",
-      "VectorStoreIndex with show_progress=True, use_async=True\n",
-      "\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.82it/s]\n",
-      "Generating embeddings: 100%|██████████| 2/2 [00:01<00:00,  1.39it/s]\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "<llama_index.indices.vector_store.base.VectorStoreIndex at 0x105a5b370>"
-      ]
-     },
-     "execution_count": 8,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "print(\"\\nVectorStoreIndex with show_progress=True\\n\")\n",
-    "VectorStoreIndex.from_documents(documents, show_progress=True)\n",
-    "\n",
-    "print(\"\\nVectorStoreIndex with show_progress=False\\n\")\n",
-    "VectorStoreIndex.from_documents(documents, show_progress=False)\n",
-    "\n",
-    "print(\"\\nVectorStoreIndex with show_progress=True, use_async=True\\n\")\n",
-    "VectorStoreIndex.from_documents(documents, show_progress=True, use_async=True)\n",
-    "\n",
-    "# print(\"\\nVectorStoreIndex with show_progress=True, use_async=False\\n\")\n",
-    "# VectorStoreIndex.from_documents(documents, show_progress=False, use_async=False)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### DocumentSummaryIndex"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "DocumentSummaryIndex with show_progress=True\n",
-      "\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.09it/s]\n",
-      "Summarizing documents:   0%|          | 0/1 [00:00<?, ?it/s]"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "current doc id: 0e06a6b5-e808-4508-8051-63458a29c196\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Summarizing documents: 100%|██████████| 1/1 [00:15<00:00, 15.24s/it]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "DocumentSummaryIndex with show_progress=False\n",
-      "\n",
-      "current doc id: 0e06a6b5-e808-4508-8051-63458a29c196\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "<llama_index.indices.document_summary.base.DocumentSummaryIndex at 0x13acbb4c0>"
-      ]
-     },
-     "execution_count": 9,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "llm_chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
-    "\n",
-    "service_context = ServiceContext.from_defaults(llm=llm_chatgpt, chunk_size=1024)\n",
-    "\n",
-    "print(\"\\nDocumentSummaryIndex with show_progress=True\\n\")\n",
-    "response_synthesizer = get_response_synthesizer(\n",
-    "    response_mode=\"tree_summarize\", use_async=True, service_context=service_context\n",
-    ")\n",
-    "DocumentSummaryIndex.from_documents(\n",
-    "    documents,\n",
-    "    service_context=service_context,\n",
-    "    response_synthesizer=response_synthesizer,\n",
-    "    show_progress=True,\n",
-    ")\n",
-    "\n",
-    "print(\"\\nDocumentSummaryIndex with show_progress=False\\n\")\n",
-    "DocumentSummaryIndex.from_documents(\n",
-    "    documents,\n",
-    "    service_context=service_context,\n",
-    "    response_synthesizer=response_synthesizer,\n",
-    "    show_progress=False,\n",
-    ")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### KeywordTableIndex"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "KeywordTableIndex with show_progress=True, use_async=True\n",
-      "\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.25it/s]\n",
-      "Extracting keywords from nodes: 100%|██████████| 20/20 [00:54<00:00,  2.71s/it]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "KeywordTableIndex with show_progress=True, use_async=False\n",
-      "\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  3.29it/s]\n",
-      "Extracting keywords from nodes: 100%|██████████| 20/20 [00:46<00:00,  2.31s/it]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "KeywordTableIndex with show_progress=False, use_async=True\n",
-      "\n",
-      "\n",
-      "KeywordTableIndex with show_progress=False, use_async=False\n",
-      "\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "<llama_index.indices.keyword_table.base.KeywordTableIndex at 0x12fbbb250>"
-      ]
-     },
-     "execution_count": 10,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "print(\"\\nKeywordTableIndex with show_progress=True, use_async=True\\n\")\n",
-    "KeywordTableIndex.from_documents(\n",
-    "    documents=documents, show_progress=True, use_async=True\n",
-    ")\n",
-    "\n",
-    "print(\"\\nKeywordTableIndex with show_progress=True, use_async=False\\n\")\n",
-    "KeywordTableIndex.from_documents(\n",
-    "    documents=documents, show_progress=True, use_async=False\n",
-    ")\n",
-    "\n",
-    "print(\"\\nKeywordTableIndex with show_progress=False, use_async=True\\n\")\n",
-    "KeywordTableIndex.from_documents(documents=documents, use_async=True)\n",
-    "\n",
-    "print(\"\\nKeywordTableIndex with show_progress=False, use_async=False\\n\")\n",
-    "KeywordTableIndex.from_documents(documents=documents)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### KnowledgeGraphIndex"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "KnowledgeGraphIndex with show_progress=True, use_async=False\n",
-      "\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  1.86it/s]\n",
-      "Processing nodes: 100%|██████████| 40/40 [00:30<00:00,  1.30it/s]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "KnowledgeGraphIndex with show_progress=True, use_async=True\n",
-      "\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.09it/s]\n",
-      "Processing nodes: 100%|██████████| 40/40 [00:27<00:00,  1.47it/s]\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "<llama_index.indices.knowledge_graph.base.KnowledgeGraphIndex at 0x2c907d460>"
-      ]
-     },
-     "execution_count": 11,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "print(\"\\nKnowledgeGraphIndex with show_progress=True, use_async=False\\n\")\n",
-    "llm = OpenAI(temperature=0, model=\"text-davinci-002\")\n",
-    "service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)\n",
-    "graph_store = SimpleGraphStore()\n",
-    "storage_context = StorageContext.from_defaults(graph_store=graph_store)\n",
-    "KnowledgeGraphIndex.from_documents(\n",
-    "    documents,\n",
-    "    max_triplets_per_chunk=2,\n",
-    "    storage_context=storage_context,\n",
-    "    service_context=service_context,\n",
-    "    show_progress=True,\n",
-    "    use_async=False,\n",
-    ")\n",
-    "\n",
-    "print(\"\\nKnowledgeGraphIndex with show_progress=True, use_async=True\\n\")\n",
-    "llm = OpenAI(temperature=0, model=\"text-davinci-002\")\n",
-    "service_context = ServiceContext.from_defaults(llm=llm, chunk_size=512)\n",
-    "graph_store = SimpleGraphStore()\n",
-    "storage_context = StorageContext.from_defaults(graph_store=graph_store)\n",
-    "KnowledgeGraphIndex.from_documents(\n",
-    "    documents,\n",
-    "    max_triplets_per_chunk=2,\n",
-    "    storage_context=storage_context,\n",
-    "    service_context=service_context,\n",
-    "    show_progress=True,\n",
-    "    use_async=True,\n",
-    ")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### ListIndex"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "ListIndex with show_progress=True\n",
-      "\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  1.86it/s]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "ListIndex with show_progress=False\n",
-      "\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "<llama_index.indices.list.base.ListIndex at 0x12fbba3d0>"
-      ]
-     },
-     "execution_count": 12,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "print(\"\\nListIndex with show_progress=True\\n\")\n",
-    "ListIndex.from_documents(documents=documents, show_progress=True)\n",
-    "\n",
-    "print(\"\\nListIndex with show_progress=False\\n\")\n",
-    "ListIndex.from_documents(documents=documents)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### TreeIndex"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "TreeIndex with show_progress=True,  use_async=True\n",
-      "\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  1.80it/s]\n",
-      "Generating summaries: 100%|██████████| 2/2 [00:00<00:00, 624.62it/s]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "TreeIndex with show_progress=True, use_async=False\n",
-      "\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Parsing documents into nodes: 100%|██████████| 1/1 [00:00<00:00,  2.59it/s]\n",
-      "Generating summaries: 100%|██████████| 2/2 [00:00<00:00, 651.29it/s]\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "TreeIndex with show_progress=False, use_async=True\n",
-      "\n",
-      "\n",
-      "TreeIndex with show_progress=False, use_async=False\n",
-      "\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "<llama_index.indices.tree.base.TreeIndex at 0x13a2f3070>"
-      ]
-     },
-     "execution_count": 14,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "print(\"\\nTreeIndex with show_progress=True,  use_async=True\\n\")\n",
-    "llm = MockLLM(max_tokens=256)\n",
-    "service_context = ServiceContext.from_defaults(llm=llm)\n",
-    "TreeIndex.from_documents(\n",
-    "    documents, service_context=service_context, show_progress=True, use_async=True\n",
-    ")\n",
-    "\n",
-    "print(\"\\nTreeIndex with show_progress=True, use_async=False\\n\")\n",
-    "TreeIndex.from_documents(\n",
-    "    documents, service_context=service_context, show_progress=True, use_async=False\n",
-    ")\n",
-    "\n",
-    "print(\"\\nTreeIndex with show_progress=False, use_async=True\\n\")\n",
-    "TreeIndex.from_documents(documents, service_context=service_context, use_async=True)\n",
-    "\n",
-    "print(\"\\nTreeIndex with show_progress=False, use_async=False\\n\")\n",
-    "TreeIndex.from_documents(documents, service_context=service_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "llama-index",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.9.6"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
+    "nbformat": 4,
+    "nbformat_minor": 2
 }
diff --git a/docs/core_modules/model_modules/llms/usage_custom.md b/docs/core_modules/model_modules/llms/usage_custom.md
index f24a12d793c12055e9086b47694c168af2755404..1b905380f49f65854b7375f1eba5576f6c82785a 100644
--- a/docs/core_modules/model_modules/llms/usage_custom.md
+++ b/docs/core_modules/model_modules/llms/usage_custom.md
@@ -187,7 +187,7 @@ from llama_index import (
     ServiceContext, 
     SimpleDirectoryReader, 
     LangchainEmbedding, 
-    ListIndex
+    SummaryIndex
 )
 from llama_index.callbacks import CallbackManager
 from llama_index.llms import (
@@ -243,7 +243,7 @@ service_context = ServiceContext.from_defaults(
 
 # Load the your data
 documents = SimpleDirectoryReader('./data').load_data()
-index = ListIndex.from_documents(documents, service_context=service_context)
+index = SummaryIndex.from_documents(documents, service_context=service_context)
 
 # Query and print response
 query_engine = index.as_query_engine()
diff --git a/docs/core_modules/query_modules/retriever/retriever_modes.md b/docs/core_modules/query_modules/retriever/retriever_modes.md
index 67f9c79a88b2c77bb538547df7d58eff16598871..e9ae005256cca3c9d6a94f4185884fbc0080312e 100644
--- a/docs/core_modules/query_modules/retriever/retriever_modes.md
+++ b/docs/core_modules/query_modules/retriever/retriever_modes.md
@@ -8,9 +8,9 @@ Specifying `retriever_mode` has no effect (silently ignored).
 
 
 ## List Index
-* `default`: ListIndexRetriever 
-* `embedding`: ListIndexEmbeddingRetriever 
-* `llm`: ListIndexLLMRetriever
+* `default`: SummaryIndexRetriever 
+* `embedding`: SummaryIndexEmbeddingRetriever 
+* `llm`: SummaryIndexLLMRetriever
 
 ## Tree Index
 * `select_leaf`: TreeSelectLeafRetriever
diff --git a/docs/core_modules/query_modules/retriever/usage_pattern.md b/docs/core_modules/query_modules/retriever/usage_pattern.md
index c7120257a1a5174435b1b8a9d9674ee84e910bf8..9d15329fe04f40404fcf9d88f6ce86e6aaa9992f 100644
--- a/docs/core_modules/query_modules/retriever/usage_pattern.md
+++ b/docs/core_modules/query_modules/retriever/usage_pattern.md
@@ -18,13 +18,13 @@ nodes = retriever.retrieve('Who is Paul Graham?')
 ### Selecting a Retriever
 
 You can select the index-specific retriever class via `retriever_mode`. 
-For example, with a `ListIndex`:
+For example, with a `SummaryIndex`:
 ```python
 retriever = list_index.as_retriever(
     retriever_mode='llm',
 )
 ```
-This creates a [ListIndexLLMRetriever](/api_reference/query/retrievers/list.rst) on top of the list index.
+This creates a [SummaryIndexLLMRetriever](/api_reference/query/retrievers/list.rst) on top of the list index.
 
 See [**Retriever Modes**](/core_modules/query_modules/retriever/retriever_modes.md) for a full list of (index-specific) retriever modes
 and the retriever classes they map to.
@@ -55,9 +55,9 @@ You can use the low-level composition API if you need more granular control.
 
 To achieve the same outcome as above, you can directly import and construct the desired retriever class:
 ```python
-from llama_index.indices.list import ListIndexLLMRetriever
+from llama_index.indices.list import SummaryIndexLLMRetriever
 
-retriever = ListIndexLLMRetriever(
+retriever = SummaryIndexLLMRetriever(
     index=list_index,
     choice_batch_size=5,
 )
diff --git a/docs/core_modules/query_modules/router/usage_pattern.md b/docs/core_modules/query_modules/router/usage_pattern.md
index 060dab173614330cf7c7f379cd697b39c901b7a9..679bed9d3c4ad324f657f311e28c0eec3f8cd7fc 100644
--- a/docs/core_modules/query_modules/router/usage_pattern.md
+++ b/docs/core_modules/query_modules/router/usage_pattern.md
@@ -45,7 +45,7 @@ from llama_index.selectors.pydantic_selectors import PydanticSingleSelector, Pyd
 from llama_index.tools.query_engine import QueryEngineTool
 from llama_index import (
     VectorStoreIndex,
-    ListIndex,
+    SummaryIndex,
 )
 
 # define query engines
diff --git a/docs/core_modules/supporting_modules/cost_analysis/root.md b/docs/core_modules/supporting_modules/cost_analysis/root.md
index f1dedeabed23d5958507b980a40349e41e1ad60c..0265244107391ff5a814d0e8e64b7ffd959314d6 100644
--- a/docs/core_modules/supporting_modules/cost_analysis/root.md
+++ b/docs/core_modules/supporting_modules/cost_analysis/root.md
@@ -17,7 +17,7 @@ The cost of building and querying each index is a TODO in the reference document
 
 #### Indices with no LLM calls
 The following indices don't require LLM calls at all during building (0 cost):
-- `ListIndex`
+- `SummaryIndex`
 - `SimpleKeywordTableIndex` - uses a regex keyword extractor to extract keywords from each document
 - `RAKEKeywordTableIndex` - uses a RAKE keyword extractor to extract keywords from each document
 
@@ -29,12 +29,12 @@ The following indices do require LLM calls during build time:
 ### Query Time
 
 There will always be >= 1 LLM call during query time, in order to synthesize the final answer. 
-Some indices contain cost tradeoffs between index building and querying. `ListIndex`, for instance,
+Some indices contain cost tradeoffs between index building and querying. `SummaryIndex`, for instance,
 is free to build, but running a query over a list index (without filtering or embedding lookups), will
 call the LLM {math}`N` times.
 
 Here are some notes regarding each of the indices:
-- `ListIndex`: by default requires {math}`N` LLM calls, where N is the number of nodes.
+- `SummaryIndex`: by default requires {math}`N` LLM calls, where N is the number of nodes.
 - `TreeIndex`: by default requires {math}`\log (N)` LLM calls, where N is the number of leaf nodes. 
     - Setting `child_branch_factor=2` will be more expensive than the default `child_branch_factor=1` (polynomial vs logarithmic), because we traverse 2 children instead of just 1 for each parent node.
 - `KeywordTableIndex`: by default requires an LLM call to extract query keywords.
diff --git a/docs/end_to_end_tutorials/chatbots/building_a_chatbot.md b/docs/end_to_end_tutorials/chatbots/building_a_chatbot.md
index 0439767fbaf840fb8bbfe88efec0051432f4e279..f537c8c9ccaa7d00d99430f031b9e2fac3ac7d9f 100644
--- a/docs/end_to_end_tutorials/chatbots/building_a_chatbot.md
+++ b/docs/end_to_end_tutorials/chatbots/building_a_chatbot.md
@@ -87,7 +87,7 @@ Since we have access to documents of 4 years, we may not only want to ask questi
 To address this, we compose a "graph" which consists of a list index defined over the 4 vector indices. Querying this graph would first retrieve information from each vector index, and combine information together via the list index.
 
 ```python
-from llama_index import ListIndex, LLMPredictor, ServiceContext, load_graph_from_storage
+from llama_index import SummaryIndex, LLMPredictor, ServiceContext, load_graph_from_storage
 from llama_index.llms import OpenAI
 from llama_index.indices.composability import ComposableGraph
 
@@ -102,7 +102,7 @@ storage_context = StorageContext.from_defaults()
 # define a list index over the vector indices
 # allows us to synthesize information across each index
 graph = ComposableGraph.from_indices(
-    ListIndex,
+    SummaryIndex,
     [index_set[y] for y in years], 
     index_summaries=index_summaries,
     service_context=service_context,
diff --git a/docs/end_to_end_tutorials/question_and_answer.md b/docs/end_to_end_tutorials/question_and_answer.md
index 2275fd61b6cbcde92823b2e2c8c5e422c3f94782..09969b136856a0adcdc5018afe25b1f8bb91e3a8 100644
--- a/docs/end_to_end_tutorials/question_and_answer.md
+++ b/docs/end_to_end_tutorials/question_and_answer.md
@@ -42,7 +42,7 @@ In general, a list index would be suited for this use case. A list index by defa
 Empirically, setting `response_mode="tree_summarize"` also leads to better summarization results.
 
 ```python
-index = ListIndex.from_documents(documents)
+index = SummaryIndex.from_documents(documents)
 
 query_engine = index.as_query_engine(
     response_mode="tree_summarize"
@@ -72,13 +72,13 @@ Specifically, compose a list index over your subindices. A list index inherently
 it can synthesize information across your heterogeneous data sources.
 
 ```python
-from llama_index import VectorStoreIndex, ListIndex
+from llama_index import VectorStoreIndex, SummaryIndex
 from llama_index.indices.composability import ComposableGraph
 
 index1 = VectorStoreIndex.from_documents(notion_docs)
 index2 = VectorStoreIndex.from_documents(slack_docs)
 
-graph = ComposableGraph.from_indices(ListIndex, [index1, index2], index_summaries=["summary1", "summary2"])
+graph = ComposableGraph.from_indices(SummaryIndex, [index1, index2], index_summaries=["summary1", "summary2"])
 query_engine = graph.as_query_engine()
 response = query_engine.query("<query_str>")
 
diff --git a/docs/end_to_end_tutorials/question_and_answer/terms_definitions_tutorial.md b/docs/end_to_end_tutorials/question_and_answer/terms_definitions_tutorial.md
index 0f576b5ffc208e46b9ba3b904dae09fec2f8c215..1eee13a83dcb46e1f7656f024762da37f1613497 100644
--- a/docs/end_to_end_tutorials/question_and_answer/terms_definitions_tutorial.md
+++ b/docs/end_to_end_tutorials/question_and_answer/terms_definitions_tutorial.md
@@ -77,7 +77,7 @@ Now that we are able to define LLM settings and upload text, we can try using Ll
 We can add the following functions to both initialize our LLM, as well as use it to extract terms from the input text.
 
 ```python
-from llama_index import Document, ListIndex, LLMPredictor, ServiceContext, load_index_from_storage
+from llama_index import Document, SummaryIndex, LLMPredictor, ServiceContext, load_index_from_storage
 from llama_index.llms import OpenAI
 
 def get_llm(llm_name, model_temperature, api_key, max_tokens=256):
@@ -90,7 +90,7 @@ def extract_terms(documents, term_extract_str, llm_name, model_temperature, api_
     service_context = ServiceContext.from_defaults(llm=llm,
                                                    chunk_size=1024)
 
-    temp_index = ListIndex.from_documents(documents, service_context=service_context)
+    temp_index = SummaryIndex.from_documents(documents, service_context=service_context)
     query_engine = temp_index.as_query_engine(response_mode="tree_summarize")
     terms_definitions = str(query_engine.query(term_extract_str))
     terms_definitions = [x for x in terms_definitions.split("\n") if x and 'Term:' in x and 'Definition:' in x]
diff --git a/docs/end_to_end_tutorials/question_and_answer/unified_query.md b/docs/end_to_end_tutorials/question_and_answer/unified_query.md
index 0639e96d6ff230798d219dc00f2ac4efdbff4e8d..4cbd683f9affb0df508057a7233bae3a8bffec39 100644
--- a/docs/end_to_end_tutorials/question_and_answer/unified_query.md
+++ b/docs/end_to_end_tutorials/question_and_answer/unified_query.md
@@ -2,7 +2,7 @@
 
 LlamaIndex offers a variety of different [use cases](/end_to_end_tutorials/use_cases.md).
 
-For simple queries, we may want to use a single index data structure, such as a `VectorStoreIndex` for semantic search, or `ListIndex` for summarization.
+For simple queries, we may want to use a single index data structure, such as a `VectorStoreIndex` for semantic search, or `SummaryIndex` for summarization.
 
 For more complex queries, we may want to use a composable graph.
 
diff --git a/docs/end_to_end_tutorials/usage_pattern.md b/docs/end_to_end_tutorials/usage_pattern.md
index 0127fac91597c2eb3b45d941bc699f200ceca219..8b54e2ff9e9a22233b0a98031fc20a76b11bd7b6 100644
--- a/docs/end_to_end_tutorials/usage_pattern.md
+++ b/docs/end_to_end_tutorials/usage_pattern.md
@@ -109,7 +109,7 @@ storage_context = StorageContext.from_defaults()
 storage_context.docstore.add_documents(nodes)
 
 index1 = VectorStoreIndex(nodes, storage_context=storage_context)
-index2 = ListIndex(nodes, storage_context=storage_context)
+index2 = SummaryIndex(nodes, storage_context=storage_context)
 ```
 
 **NOTE**: If the `storage_context` argument isn't specified, then it is implicitly
@@ -346,15 +346,15 @@ In the following, we discuss some commonly used configurations in detail.
 ### Configuring retriever
 
 An index can have a variety of index-specific retrieval modes.
-For instance, a list index supports the default `ListIndexRetriever` that retrieves all nodes, and
-`ListIndexEmbeddingRetriever` that retrieves the top-k nodes by embedding similarity.
+For instance, a list index supports the default `SummaryIndexRetriever` that retrieves all nodes, and
+`SummaryIndexEmbeddingRetriever` that retrieves the top-k nodes by embedding similarity.
 
 For convienience, you can also use the following shorthand:
 
 ```python
-    # ListIndexRetriever
+    # SummaryIndexRetriever
     retriever = index.as_retriever(retriever_mode='default')
-    # ListIndexEmbeddingRetriever
+    # SummaryIndexEmbeddingRetriever
     retriever = index.as_retriever(retriever_mode='embedding')
 ```
 
@@ -397,7 +397,7 @@ Right now, we support the following options:
   chunk.
 
 ```python
-index = ListIndex.from_documents(documents)
+index = SummaryIndex.from_documents(documents)
 retriever = index.as_retriever()
 
 # default
diff --git a/docs/examples/agent/openai_agent_retrieval.ipynb b/docs/examples/agent/openai_agent_retrieval.ipynb
index 6bad885592f0d804fd90f3945aa6a63c29e8e4bc..3b08d2b429c6a3d8bdfe8870ff9f1ff412117879 100644
--- a/docs/examples/agent/openai_agent_retrieval.ipynb
+++ b/docs/examples/agent/openai_agent_retrieval.ipynb
@@ -1,307 +1,307 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "99cea58c-48bc-4af6-8358-df9695659983",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "# Retrieval-Augmented OpenAI Agent"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "673df1fe-eb6c-46ea-9a73-a96e7ae7942e",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "In this tutorial, we show you how to use our `FnRetrieverOpenAI` implementation\n",
-    "to build an agent on top of OpenAI's function API and store/index an arbitrary number of tools. Our indexing/retrieval modules help to remove the complexity of having too many functions to fit in the prompt."
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "54b7bc2e-606f-411a-9490-fcfab9236dfc",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "## Initial Setup "
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "23e80e5b-aaee-4f23-b338-7ae62b08141f",
-   "metadata": {},
-   "source": [
-    "Let's start by importing some simple building blocks.  \n",
-    "\n",
-    "The main thing we need is:\n",
-    "1. the OpenAI API\n",
-    "2. a place to keep conversation history \n",
-    "3. a definition for tools that our agent can use."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "9d47283b-025e-4874-88ed-76245b22f82e",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "/Users/suo/miniconda3/envs/llama/lib/python3.9/site-packages/deeplake/util/check_latest_version.py:32: UserWarning: A newer version of deeplake (3.6.7) is available. It's recommended that you update to the latest version using `pip install -U deeplake`.\n",
-      "  warnings.warn(\n"
-     ]
-    }
-   ],
-   "source": [
-    "import json\n",
-    "from typing import Sequence\n",
-    "\n",
-    "from llama_index.tools import BaseTool, FunctionTool"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "6fe08eb1-e638-4c00-9103-5c305bfacccf",
-   "metadata": {},
-   "source": [
-    "Let's define some very simple calculator tools for our agent."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "3dd3c4a6-f3e0-46f9-ad3b-7ba57d1bc992",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "def multiply(a: int, b: int) -> int:\n",
-    "    \"\"\"Multiply two integers and returns the result integer\"\"\"\n",
-    "    return a * b\n",
-    "\n",
-    "\n",
-    "def add(a: int, b: int) -> int:\n",
-    "    \"\"\"Add two integers and returns the result integer\"\"\"\n",
-    "    return a + b\n",
-    "\n",
-    "\n",
-    "def useless(a: int, b: int) -> int:\n",
-    "    \"\"\"Toy useless function.\"\"\"\n",
-    "    pass\n",
-    "\n",
-    "\n",
-    "multiply_tool = FunctionTool.from_defaults(fn=multiply, name=\"multiply\")\n",
-    "useless_tools = [\n",
-    "    FunctionTool.from_defaults(fn=useless, name=f\"useless_{str(idx)}\")\n",
-    "    for idx in range(28)\n",
-    "]\n",
-    "add_tool = FunctionTool.from_defaults(fn=add, name=\"add\")\n",
-    "\n",
-    "all_tools = [multiply_tool] + [add_tool] + useless_tools\n",
-    "all_tools_map = {t.metadata.name: t for t in all_tools}"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "8170dd32-fa00-458c-aeb5-37292d0773c0",
-   "metadata": {},
-   "source": [
-    "## Building an Object Index\n",
-    "\n",
-    "We have an `ObjectIndex` construct in LlamaIndex that allows the user to use our index data structures over arbitrary objects.\n",
-    "The ObjectIndex will handle serialiation to/from the object, and use an underying index (e.g. VectorStoreIndex, ListIndex, KeywordTableIndex) as the storage mechanism. \n",
-    "\n",
-    "In this case, we have a large collection of Tool objects, and we'd want to define an ObjectIndex over these Tools.\n",
-    "\n",
-    "The index comes bundled with a retrieval mechanism, an `ObjectRetriever`. \n",
-    "\n",
-    "This can be passed in to our agent so that it can \n",
-    "perform Tool retrieval during query-time."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "6704a755-7f05-43a3-8a56-f5f587ae4c40",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# define an \"object\" index over these tools\n",
-    "from llama_index import VectorStoreIndex\n",
-    "from llama_index.objects import ObjectIndex, SimpleToolNodeMapping\n",
-    "\n",
-    "tool_mapping = SimpleToolNodeMapping.from_objects(all_tools)\n",
-    "obj_index = ObjectIndex.from_objects(\n",
-    "    all_tools,\n",
-    "    tool_mapping,\n",
-    "    VectorStoreIndex,\n",
-    ")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "707d30b8-6405-4187-a9ed-6146dcc42167",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "## Our `FnRetrieverOpenAIAgent` Implementation "
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "798ca3fd-6711-4c0c-a853-d868dd14b484",
-   "metadata": {},
-   "source": [
-    "We provide a `FnRetrieverOpenAIAgent` implementation in LlamaIndex, which can take in an `ObjectRetriever` over a set of `BaseTool` objects.\n",
-    "\n",
-    "During query-time, we would first use the `ObjectRetriever` to retrieve a set of relevant Tools. These tools would then be passed into the agent; more specifically, their function signatures would be passed into the OpenAI Function calling API. "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "38ab3938-1138-43ea-b085-f430b42f5377",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.agent import FnRetrieverOpenAIAgent"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "d852ece7-e5a1-4368-9d59-c7014e0b5b4d",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "agent = FnRetrieverOpenAIAgent.from_retriever(obj_index.as_retriever(), verbose=True)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "33ea069f-819b-4ec1-a93c-fcbaacb362a1",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "=== Calling Function ===\n",
-      "Calling function: multiply with args: {\n",
-      "  \"a\": 212,\n",
-      "  \"b\": 122\n",
-      "}\n",
-      "Got output: 25864\n",
-      "========================\n"
-     ]
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "99cea58c-48bc-4af6-8358-df9695659983",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "# Retrieval-Augmented OpenAI Agent"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "673df1fe-eb6c-46ea-9a73-a96e7ae7942e",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "In this tutorial, we show you how to use our `FnRetrieverOpenAI` implementation\n",
+                "to build an agent on top of OpenAI's function API and store/index an arbitrary number of tools. Our indexing/retrieval modules help to remove the complexity of having too many functions to fit in the prompt."
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "54b7bc2e-606f-411a-9490-fcfab9236dfc",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "## Initial Setup "
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "23e80e5b-aaee-4f23-b338-7ae62b08141f",
+            "metadata": {},
+            "source": [
+                "Let's start by importing some simple building blocks.  \n",
+                "\n",
+                "The main thing we need is:\n",
+                "1. the OpenAI API\n",
+                "2. a place to keep conversation history \n",
+                "3. a definition for tools that our agent can use."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "9d47283b-025e-4874-88ed-76245b22f82e",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "/Users/suo/miniconda3/envs/llama/lib/python3.9/site-packages/deeplake/util/check_latest_version.py:32: UserWarning: A newer version of deeplake (3.6.7) is available. It's recommended that you update to the latest version using `pip install -U deeplake`.\n",
+                        "  warnings.warn(\n"
+                    ]
+                }
+            ],
+            "source": [
+                "import json\n",
+                "from typing import Sequence\n",
+                "\n",
+                "from llama_index.tools import BaseTool, FunctionTool"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "6fe08eb1-e638-4c00-9103-5c305bfacccf",
+            "metadata": {},
+            "source": [
+                "Let's define some very simple calculator tools for our agent."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "3dd3c4a6-f3e0-46f9-ad3b-7ba57d1bc992",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "def multiply(a: int, b: int) -> int:\n",
+                "    \"\"\"Multiply two integers and returns the result integer\"\"\"\n",
+                "    return a * b\n",
+                "\n",
+                "\n",
+                "def add(a: int, b: int) -> int:\n",
+                "    \"\"\"Add two integers and returns the result integer\"\"\"\n",
+                "    return a + b\n",
+                "\n",
+                "\n",
+                "def useless(a: int, b: int) -> int:\n",
+                "    \"\"\"Toy useless function.\"\"\"\n",
+                "    pass\n",
+                "\n",
+                "\n",
+                "multiply_tool = FunctionTool.from_defaults(fn=multiply, name=\"multiply\")\n",
+                "useless_tools = [\n",
+                "    FunctionTool.from_defaults(fn=useless, name=f\"useless_{str(idx)}\")\n",
+                "    for idx in range(28)\n",
+                "]\n",
+                "add_tool = FunctionTool.from_defaults(fn=add, name=\"add\")\n",
+                "\n",
+                "all_tools = [multiply_tool] + [add_tool] + useless_tools\n",
+                "all_tools_map = {t.metadata.name: t for t in all_tools}"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "8170dd32-fa00-458c-aeb5-37292d0773c0",
+            "metadata": {},
+            "source": [
+                "## Building an Object Index\n",
+                "\n",
+                "We have an `ObjectIndex` construct in LlamaIndex that allows the user to use our index data structures over arbitrary objects.\n",
+                "The ObjectIndex will handle serialiation to/from the object, and use an underying index (e.g. VectorStoreIndex, SummaryIndex, KeywordTableIndex) as the storage mechanism. \n",
+                "\n",
+                "In this case, we have a large collection of Tool objects, and we'd want to define an ObjectIndex over these Tools.\n",
+                "\n",
+                "The index comes bundled with a retrieval mechanism, an `ObjectRetriever`. \n",
+                "\n",
+                "This can be passed in to our agent so that it can \n",
+                "perform Tool retrieval during query-time."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "6704a755-7f05-43a3-8a56-f5f587ae4c40",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# define an \"object\" index over these tools\n",
+                "from llama_index import VectorStoreIndex\n",
+                "from llama_index.objects import ObjectIndex, SimpleToolNodeMapping\n",
+                "\n",
+                "tool_mapping = SimpleToolNodeMapping.from_objects(all_tools)\n",
+                "obj_index = ObjectIndex.from_objects(\n",
+                "    all_tools,\n",
+                "    tool_mapping,\n",
+                "    VectorStoreIndex,\n",
+                ")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "707d30b8-6405-4187-a9ed-6146dcc42167",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "## Our `FnRetrieverOpenAIAgent` Implementation "
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "798ca3fd-6711-4c0c-a853-d868dd14b484",
+            "metadata": {},
+            "source": [
+                "We provide a `FnRetrieverOpenAIAgent` implementation in LlamaIndex, which can take in an `ObjectRetriever` over a set of `BaseTool` objects.\n",
+                "\n",
+                "During query-time, we would first use the `ObjectRetriever` to retrieve a set of relevant Tools. These tools would then be passed into the agent; more specifically, their function signatures would be passed into the OpenAI Function calling API. "
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "38ab3938-1138-43ea-b085-f430b42f5377",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.agent import FnRetrieverOpenAIAgent"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 5,
+            "id": "d852ece7-e5a1-4368-9d59-c7014e0b5b4d",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "agent = FnRetrieverOpenAIAgent.from_retriever(obj_index.as_retriever(), verbose=True)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "id": "33ea069f-819b-4ec1-a93c-fcbaacb362a1",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "=== Calling Function ===\n",
+                        "Calling function: multiply with args: {\n",
+                        "  \"a\": 212,\n",
+                        "  \"b\": 122\n",
+                        "}\n",
+                        "Got output: 25864\n",
+                        "========================\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "Response(response='212 multiplied by 122 is 25,864.', source_nodes=[], metadata=None)"
+                        ]
+                    },
+                    "execution_count": 11,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "agent.chat(\"What's 212 multiplied by 122? Make sure to use Tools\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 12,
+            "id": "ec423b90-59cd-40ef-b497-a3842b3e7b58",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "=== Calling Function ===\n",
+                        "Calling function: add with args: {\n",
+                        "  \"a\": 212,\n",
+                        "  \"b\": 122\n",
+                        "}\n",
+                        "Got output: 334\n",
+                        "========================\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "Response(response='212 added to 122 is 334.', source_nodes=[], metadata=None)"
+                        ]
+                    },
+                    "execution_count": 12,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "agent.chat(\"What's 212 added to 122 ? Make sure to use Tools\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "65bf017c-d381-414c-a4d4-d6a9d848e186",
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.9.16"
+        }
     },
-    {
-     "data": {
-      "text/plain": [
-       "Response(response='212 multiplied by 122 is 25,864.', source_nodes=[], metadata=None)"
-      ]
-     },
-     "execution_count": 11,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "agent.chat(\"What's 212 multiplied by 122? Make sure to use Tools\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "id": "ec423b90-59cd-40ef-b497-a3842b3e7b58",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "=== Calling Function ===\n",
-      "Calling function: add with args: {\n",
-      "  \"a\": 212,\n",
-      "  \"b\": 122\n",
-      "}\n",
-      "Got output: 334\n",
-      "========================\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "Response(response='212 added to 122 is 334.', source_nodes=[], metadata=None)"
-      ]
-     },
-     "execution_count": 12,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "agent.chat(\"What's 212 added to 122 ? Make sure to use Tools\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "65bf017c-d381-414c-a4d4-d6a9d848e186",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.9.16"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/docs/examples/callbacks/AimCallback.ipynb b/docs/examples/callbacks/AimCallback.ipynb
index eb9462d9d264611cded38a2ae5d76681db02586f..e32be528f060ac2059f66bff8e7b2ee230ba4efd 100644
--- a/docs/examples/callbacks/AimCallback.ipynb
+++ b/docs/examples/callbacks/AimCallback.ipynb
@@ -33,7 +33,7 @@
             "outputs": [],
             "source": [
                 "from llama_index.callbacks import CallbackManager, AimCallback\n",
-                "from llama_index import ListIndex, ServiceContext, SimpleDirectoryReader"
+                "from llama_index import SummaryIndex, ServiceContext, SimpleDirectoryReader"
             ]
         },
         {
@@ -82,7 +82,7 @@
             "metadata": {},
             "source": [
                 "In this snippet, we initialize a service context by providing the callback manager.\n",
-                "Next, we create an instance of `ListIndex` class, by passing in the document reader and the service context. After which we create a query engine which we will use to run queries on the index and retrieve relevant results."
+                "Next, we create an instance of `SummaryIndex` class, by passing in the document reader and the service context. After which we create a query engine which we will use to run queries on the index and retrieve relevant results."
             ]
         },
         {
@@ -93,7 +93,7 @@
             "outputs": [],
             "source": [
                 "service_context = ServiceContext.from_defaults(callback_manager=callback_manager)\n",
-                "index = ListIndex.from_documents(docs, service_context=service_context)\n",
+                "index = SummaryIndex.from_documents(docs, service_context=service_context)\n",
                 "query_engine = index.as_query_engine()"
             ]
         },
diff --git a/docs/examples/callbacks/WandbCallbackHandler.ipynb b/docs/examples/callbacks/WandbCallbackHandler.ipynb
index e993bf45ea305b1677bc7999b2bc9c416fd101b6..0dce8d8d1e839c1deb6d3227619114f682647f79 100644
--- a/docs/examples/callbacks/WandbCallbackHandler.ipynb
+++ b/docs/examples/callbacks/WandbCallbackHandler.ipynb
@@ -1,730 +1,730 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "c0d8b66c",
-   "metadata": {},
-   "source": [
-    "# Wandb Callback Handler\n",
-    "\n",
-    "[Weights & Biases Prompts](https://docs.wandb.ai/guides/prompts) is a suite of LLMOps tools built for the development of LLM-powered applications.\n",
-    "\n",
-    "The `WandbCallbackHandler` is integrated with W&B Prompts to visualize and inspect the execution flow of your index construction, or querying over your index and more. You can use this handler to persist your created indices as W&B Artifacts allowing you to version control your indices.\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "612f35ad",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "OpenAI API key configured\n"
-     ]
-    }
-   ],
-   "source": [
-    "import os\n",
-    "from getpass import getpass\n",
-    "\n",
-    "if os.getenv(\"OPENAI_API_KEY\") is None:\n",
-    "    os.environ[\"OPENAI_API_KEY\"] = getpass(\n",
-    "        \"Paste your OpenAI key from: https://platform.openai.com/account/api-keys\\n\"\n",
-    "    )\n",
-    "assert os.getenv(\"OPENAI_API_KEY\", \"\").startswith(\n",
-    "    \"sk-\"\n",
-    "), \"This doesn't look like a valid OpenAI API key\"\n",
-    "print(\"OpenAI API key configured\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "78a29d9a",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.callbacks import CallbackManager, CBEventType\n",
-    "from llama_index.callbacks import LlamaDebugHandler, WandbCallbackHandler\n",
-    "from llama_index import (\n",
-    "    GPTListIndex,\n",
-    "    GPTTreeIndex,\n",
-    "    GPTVectorStoreIndex,\n",
-    "    ServiceContext,\n",
-    "    SimpleDirectoryReader,\n",
-    "    LLMPredictor,\n",
-    "    GPTSimpleKeywordTableIndex,\n",
-    "    StorageContext,\n",
-    ")\n",
-    "from llama_index.indices.composability import ComposableGraph\n",
-    "from llama_index import load_index_from_storage, load_graph_from_storage\n",
-    "from llama_index.llms import OpenAI"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "e6feb252",
-   "metadata": {},
-   "source": [
-    "## Setup LLM"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "d22fee33",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "llm = OpenAI(model=\"gpt-4\", temperature=0)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "8790f4c7",
-   "metadata": {},
-   "source": [
-    "## W&B Callback Manager Setup"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "8a32b984-772e-4832-945e-cb6fc7be9e0b",
-   "metadata": {},
-   "source": [
-    "**Option 1**: Set Global Evaluation Handler"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "2a3b9d22-cd67-4fb5-9785-254e58179a02",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import set_global_handler\n",
-    "\n",
-    "set_global_handler(\"wandb\", run_args={\"project\": \"llamaindex\"})\n",
-    "wandb_callback = llama_index.global_handler"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c0645550-0585-4d3f-b075-32905552b2c4",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "service_context = ServiceContext.from_defaults(llm=llm)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "d1755516-f8ad-458e-b52f-f7665c023e43",
-   "metadata": {},
-   "source": [
-    "**Option 2**: Manually Configure Callback Handler\n",
-    "\n",
-    "Also configure a debugger handler for extra notebook visibility."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "defa9155-daca-4a8f-8ca6-87d1ee98f084",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "llama_debug = LlamaDebugHandler(print_trace_on_end=True)\n",
-    "\n",
-    "# wandb.init args\n",
-    "run_args = dict(\n",
-    "    project=\"llamaindex\",\n",
-    ")\n",
-    "\n",
-    "wandb_callback = WandbCallbackHandler(run_args=run_args)\n",
-    "\n",
-    "callback_manager = CallbackManager([llama_debug, wandb_callback])\n",
-    "\n",
-    "service_context = ServiceContext.from_defaults(\n",
-    "    callback_manager=callback_manager, llm=llm\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "c4cf969a",
-   "metadata": {},
-   "source": [
-    "> After running the above cell, you will get the W&B run page URL. Here you will find a trace table with all the events tracked using [Weights and Biases' Prompts](https://docs.wandb.ai/guides/prompts) feature."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "a4a7c101",
-   "metadata": {},
-   "source": [
-    "## 1. Indexing"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "id": "d1011596",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "docs = SimpleDirectoryReader(\"../data/paul_graham/\").load_data()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "id": "d3d6975c",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "**********\n",
-      "Trace: index_construction\n",
-      "    |_node_parsing ->  0.295179 seconds\n",
-      "      |_chunking ->  0.293976 seconds\n",
-      "    |_embedding ->  0.494492 seconds\n",
-      "    |_embedding ->  0.346162 seconds\n",
-      "**********\n"
-     ]
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "c0d8b66c",
+            "metadata": {},
+            "source": [
+                "# Wandb Callback Handler\n",
+                "\n",
+                "[Weights & Biases Prompts](https://docs.wandb.ai/guides/prompts) is a suite of LLMOps tools built for the development of LLM-powered applications.\n",
+                "\n",
+                "The `WandbCallbackHandler` is integrated with W&B Prompts to visualize and inspect the execution flow of your index construction, or querying over your index and more. You can use this handler to persist your created indices as W&B Artifacts allowing you to version control your indices.\n"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "612f35ad",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "OpenAI API key configured\n"
+                    ]
+                }
+            ],
+            "source": [
+                "import os\n",
+                "from getpass import getpass\n",
+                "\n",
+                "if os.getenv(\"OPENAI_API_KEY\") is None:\n",
+                "    os.environ[\"OPENAI_API_KEY\"] = getpass(\n",
+                "        \"Paste your OpenAI key from: https://platform.openai.com/account/api-keys\\n\"\n",
+                "    )\n",
+                "assert os.getenv(\"OPENAI_API_KEY\", \"\").startswith(\n",
+                "    \"sk-\"\n",
+                "), \"This doesn't look like a valid OpenAI API key\"\n",
+                "print(\"OpenAI API key configured\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "78a29d9a",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.callbacks import CallbackManager, CBEventType\n",
+                "from llama_index.callbacks import LlamaDebugHandler, WandbCallbackHandler\n",
+                "from llama_index import (\n",
+                "    SummaryIndex,\n",
+                "    GPTTreeIndex,\n",
+                "    GPTVectorStoreIndex,\n",
+                "    ServiceContext,\n",
+                "    SimpleDirectoryReader,\n",
+                "    LLMPredictor,\n",
+                "    GPTSimpleKeywordTableIndex,\n",
+                "    StorageContext,\n",
+                ")\n",
+                "from llama_index.indices.composability import ComposableGraph\n",
+                "from llama_index import load_index_from_storage, load_graph_from_storage\n",
+                "from llama_index.llms import OpenAI"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "e6feb252",
+            "metadata": {},
+            "source": [
+                "## Setup LLM"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "d22fee33",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "llm = OpenAI(model=\"gpt-4\", temperature=0)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "8790f4c7",
+            "metadata": {},
+            "source": [
+                "## W&B Callback Manager Setup"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "8a32b984-772e-4832-945e-cb6fc7be9e0b",
+            "metadata": {},
+            "source": [
+                "**Option 1**: Set Global Evaluation Handler"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "2a3b9d22-cd67-4fb5-9785-254e58179a02",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import set_global_handler\n",
+                "\n",
+                "set_global_handler(\"wandb\", run_args={\"project\": \"llamaindex\"})\n",
+                "wandb_callback = llama_index.global_handler"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "c0645550-0585-4d3f-b075-32905552b2c4",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "service_context = ServiceContext.from_defaults(llm=llm)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "d1755516-f8ad-458e-b52f-f7665c023e43",
+            "metadata": {},
+            "source": [
+                "**Option 2**: Manually Configure Callback Handler\n",
+                "\n",
+                "Also configure a debugger handler for extra notebook visibility."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "defa9155-daca-4a8f-8ca6-87d1ee98f084",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "llama_debug = LlamaDebugHandler(print_trace_on_end=True)\n",
+                "\n",
+                "# wandb.init args\n",
+                "run_args = dict(\n",
+                "    project=\"llamaindex\",\n",
+                ")\n",
+                "\n",
+                "wandb_callback = WandbCallbackHandler(run_args=run_args)\n",
+                "\n",
+                "callback_manager = CallbackManager([llama_debug, wandb_callback])\n",
+                "\n",
+                "service_context = ServiceContext.from_defaults(\n",
+                "    callback_manager=callback_manager, llm=llm\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "c4cf969a",
+            "metadata": {},
+            "source": [
+                "> After running the above cell, you will get the W&B run page URL. Here you will find a trace table with all the events tracked using [Weights and Biases' Prompts](https://docs.wandb.ai/guides/prompts) feature."
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "a4a7c101",
+            "metadata": {},
+            "source": [
+                "## 1. Indexing"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 20,
+            "id": "d1011596",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "docs = SimpleDirectoryReader(\"../data/paul_graham/\").load_data()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 21,
+            "id": "d3d6975c",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "**********\n",
+                        "Trace: index_construction\n",
+                        "    |_node_parsing ->  0.295179 seconds\n",
+                        "      |_chunking ->  0.293976 seconds\n",
+                        "    |_embedding ->  0.494492 seconds\n",
+                        "    |_embedding ->  0.346162 seconds\n",
+                        "**********\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[34m\u001b[1mwandb\u001b[0m: Logged trace tree to W&B.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "index = GPTVectorStoreIndex.from_documents(docs, service_context=service_context)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "0a948efc",
+            "metadata": {},
+            "source": [
+                "### 1.1 Persist Index as W&B Artifacts"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 22,
+            "id": "8ad58e67",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (/Users/loganmarkewich/llama_index/docs/examples/callbacks/wandb/run-20230801_152955-ds93prxa/files/storage)... Done. 0.0s\n"
+                    ]
+                }
+            ],
+            "source": [
+                "wandb_callback.persist_index(index, index_name=\"simple_vector_store\")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "7ed156a6",
+            "metadata": {},
+            "source": [
+                "### 1.2 Download Index from W&B Artifacts"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 23,
+            "id": "dc35f448",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[34m\u001b[1mwandb\u001b[0m:   3 of 3 files downloaded.  \n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "**********\n",
+                        "Trace: index_construction\n",
+                        "**********\n"
+                    ]
+                }
+            ],
+            "source": [
+                "storage_context = wandb_callback.load_storage_context(\n",
+                "    artifact_url=\"ayut/llamaindex/simple_vector_store:v0\"\n",
+                ")\n",
+                "\n",
+                "# Load the index and initialize a query engine\n",
+                "index = load_index_from_storage(storage_context, service_context=service_context)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "ae4de4a9",
+            "metadata": {},
+            "source": [
+                "## 2. Query Over Index"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 24,
+            "id": "42221465",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "**********\n",
+                        "Trace: query\n",
+                        "    |_query ->  2.695958 seconds\n",
+                        "      |_retrieve ->  0.806379 seconds\n",
+                        "        |_embedding ->  0.802871 seconds\n",
+                        "      |_synthesize ->  1.8893 seconds\n",
+                        "        |_llm ->  1.842434 seconds\n",
+                        "**********\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[34m\u001b[1mwandb\u001b[0m: Logged trace tree to W&B.\n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "The text does not provide information on what the author did growing up.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"What did the author do growing up?\")\n",
+                "print(response, sep=\"\\n\")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "d7250272",
+            "metadata": {},
+            "source": [
+                "## 3. Build Complex Indices"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 25,
+            "id": "3a5f2671",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# fetch \"New York City\" page from Wikipedia\n",
+                "from pathlib import Path\n",
+                "\n",
+                "import requests\n",
+                "\n",
+                "response = requests.get(\n",
+                "    \"https://en.wikipedia.org/w/api.php\",\n",
+                "    params={\n",
+                "        \"action\": \"query\",\n",
+                "        \"format\": \"json\",\n",
+                "        \"titles\": \"New York City\",\n",
+                "        \"prop\": \"extracts\",\n",
+                "        \"explaintext\": True,\n",
+                "    },\n",
+                ").json()\n",
+                "page = next(iter(response[\"query\"][\"pages\"].values()))\n",
+                "nyc_text = page[\"extract\"]\n",
+                "\n",
+                "data_path = Path(\"data\")\n",
+                "if not data_path.exists():\n",
+                "    Path.mkdir(data_path)\n",
+                "\n",
+                "with open(\"data/nyc_text.txt\", \"w\") as fp:\n",
+                "    fp.write(nyc_text)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 26,
+            "id": "cf0c0307",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# load NYC dataset\n",
+                "nyc_documents = SimpleDirectoryReader(\"data/\").load_data()\n",
+                "# load PG's essay\n",
+                "essay_documents = SimpleDirectoryReader(\"../data/paul_graham\").load_data()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 27,
+            "id": "0c2dbdea",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# While building a composable index, to correctly save the index,\n",
+                "# the same `storage_context` needs to be passed to every index.\n",
+                "storage_context = StorageContext.from_defaults()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 28,
+            "id": "1d795f6f",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "**********\n",
+                        "Trace: index_construction\n",
+                        "    |_node_parsing ->  0.491078 seconds\n",
+                        "      |_chunking ->  0.48921 seconds\n",
+                        "    |_embedding ->  0.314621 seconds\n",
+                        "    |_embedding ->  0.65393 seconds\n",
+                        "    |_embedding ->  0.452587 seconds\n",
+                        "    |_embedding ->  0.510454 seconds\n",
+                        "**********\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[34m\u001b[1mwandb\u001b[0m: Logged trace tree to W&B.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# build NYC index\n",
+                "nyc_index = GPTVectorStoreIndex.from_documents(\n",
+                "    nyc_documents, service_context=service_context, storage_context=storage_context\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 29,
+            "id": "e9a49c5a",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "**********\n",
+                        "Trace: index_construction\n",
+                        "    |_node_parsing ->  0.340749 seconds\n",
+                        "      |_chunking ->  0.339598 seconds\n",
+                        "    |_embedding ->  0.280761 seconds\n",
+                        "    |_embedding ->  0.315542 seconds\n",
+                        "**********\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[34m\u001b[1mwandb\u001b[0m: Logged trace tree to W&B.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# build essay index\n",
+                "essay_index = GPTVectorStoreIndex.from_documents(\n",
+                "    essay_documents, service_context=service_context, storage_context=storage_context\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "60aa7e5f",
+            "metadata": {},
+            "source": [
+                "### 3.1. Query Over Graph Index"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 36,
+            "id": "d2704b34",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "nyc_index_summary = \"\"\"\n",
+                "    New York, often called New York City or NYC, \n",
+                "    is the most populous city in the United States. \n",
+                "    With a 2020 population of 8,804,190 distributed over 300.46 square miles (778.2 km2), \n",
+                "    New York City is also the most densely populated major city in the United States, \n",
+                "    and is more than twice as populous as second-place Los Angeles. \n",
+                "    New York City lies at the southern tip of New York State, and \n",
+                "    constitutes the geographical and demographic center of both the \n",
+                "    Northeast megalopolis and the New York metropolitan area, the \n",
+                "    largest metropolitan area in the world by urban landmass.[8] With over \n",
+                "    20.1 million people in its metropolitan statistical area and 23.5 million \n",
+                "    in its combined statistical area as of 2020, New York is one of the world's \n",
+                "    most populous megacities, and over 58 million people live within 250 mi (400 km) of \n",
+                "    the city. New York City is a global cultural, financial, and media center with \n",
+                "    a significant influence on commerce, health care and life sciences, entertainment, \n",
+                "    research, technology, education, politics, tourism, dining, art, fashion, and sports. \n",
+                "    Home to the headquarters of the United Nations, \n",
+                "    New York is an important center for international diplomacy,\n",
+                "    an established safe haven for global investors, and is sometimes described as the capital of the world.\n",
+                "\"\"\"\n",
+                "essay_index_summary = \"\"\"\n",
+                "    Author: Paul Graham. \n",
+                "    The author grew up painting and writing essays. \n",
+                "    He wrote a book on Lisp and did freelance Lisp hacking work to support himself. \n",
+                "    He also became the de facto studio assistant for Idelle Weber, an early photorealist painter. \n",
+                "    He eventually had the idea to start a company to put art galleries online, but the idea was unsuccessful. \n",
+                "    He then had the idea to write software to build online stores, which became the basis for his successful company, Viaweb. \n",
+                "    After Viaweb was acquired by Yahoo!, the author returned to painting and started writing essays online. \n",
+                "    He wrote a book of essays, Hackers & Painters, and worked on spam filters. \n",
+                "    He also bought a building in Cambridge to use as an office. \n",
+                "    He then had the idea to start Y Combinator, an investment firm that would \n",
+                "    make a larger number of smaller investments and help founders remain as CEO. \n",
+                "    He and his partner Jessica Livingston ran Y Combinator and funded a batch of startups twice a year. \n",
+                "    He also continued to write essays, cook for groups of friends, and explore the concept of invented vs discovered in software. \n",
+                "\"\"\""
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 37,
+            "id": "353a644b",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "**********\n",
+                        "Trace: graph_construction\n",
+                        "**********\n"
+                    ]
+                }
+            ],
+            "source": [
+                "from llama_index import StorageContext, load_graph_from_storage\n",
+                "\n",
+                "graph = ComposableGraph.from_indices(\n",
+                "    GPTSimpleKeywordTableIndex,\n",
+                "    [nyc_index, essay_index],\n",
+                "    index_summaries=[nyc_index_summary, essay_index_summary],\n",
+                "    max_keywords_per_chunk=50,\n",
+                "    service_context=service_context,\n",
+                "    storage_context=storage_context,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "cda70171",
+            "metadata": {},
+            "source": [
+                "### 3.1.1 Persist Composable Index as W&B Artifacts "
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 17,
+            "id": "4c3ebaf7",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (/Users/ayushthakur/integrations/llamaindex/llama_index/docs/examples/callbacks/wandb/run-20230607_012558-js7j48l9/files/storage)... Done. 0.0s\n"
+                    ]
+                }
+            ],
+            "source": [
+                "wandb_callback.persist_index(graph, index_name=\"composable_graph\")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "ff60da73",
+            "metadata": {},
+            "source": [
+                "### 3.1.2 Download Index from W&B Artifacts"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 18,
+            "id": "8ce7ecb3",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[34m\u001b[1mwandb\u001b[0m:   3 of 3 files downloaded.  \n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "**********\n",
+                        "Trace: index_construction\n",
+                        "**********\n",
+                        "**********\n",
+                        "Trace: index_construction\n",
+                        "**********\n",
+                        "**********\n",
+                        "Trace: index_construction\n",
+                        "**********\n"
+                    ]
+                }
+            ],
+            "source": [
+                "storage_context = wandb_callback.load_storage_context(\n",
+                "    artifact_url=\"ayut/llamaindex/composable_graph:v0\"\n",
+                ")\n",
+                "\n",
+                "# Load the graph and initialize a query engine\n",
+                "graph = load_graph_from_storage(\n",
+                "    storage_context, root_id=graph.root_id, service_context=service_context\n",
+                ")\n",
+                "query_engine = index.as_query_engine()"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "b30ddfc9",
+            "metadata": {},
+            "source": [
+                "### 3.1.3 Query"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 19,
+            "id": "e852e00f",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "**********\n",
+                        "Trace: query\n",
+                        "    |_query ->  58.207419 seconds\n",
+                        "      |_retrieve ->  2.672269 seconds\n",
+                        "        |_llm ->  2.671922 seconds\n",
+                        "      |_query ->  39.630366 seconds\n",
+                        "        |_retrieve ->  0.165883 seconds\n",
+                        "          |_embedding ->  0.158699 seconds\n",
+                        "        |_synthesize ->  39.46435 seconds\n",
+                        "          |_llm ->  39.410054 seconds\n",
+                        "      |_synthesize ->  15.904373 seconds\n",
+                        "        |_llm ->  15.900012 seconds\n",
+                        "**********\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[34m\u001b[1mwandb\u001b[0m: Logged trace tree to W&B.\n"
+                    ]
+                },
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "New York City has a humid subtropical climate, making it the northernmost major city in North America with this type of climate. During the winter, the city is chilly and damp. The average daily temperature in January, the coldest month, is 33.3 °F (0.7 °C). Temperatures can drop to 10 °F (−12 °C) several times each winter, but can also reach 60 °F (16 °C) for several days even in the coldest winter month. The city also experiences the urban heat island effect, which can increase nighttime temperatures. The most extreme temperatures have ranged from −15 °F (−26 °C) to 106 °F (41 °C).\n"
+                    ]
+                }
+            ],
+            "source": [
+                "query_engine = graph.as_query_engine()\n",
+                "\n",
+                "response = query_engine.query(\n",
+                "    \"What is the climate of New York City like? How cold is it during the winter?\",\n",
+                ")\n",
+                "print(response, sep=\"\\n\")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "c49ff101",
+            "metadata": {},
+            "source": [
+                "## Close W&B Callback Handler\n",
+                "\n",
+                "When we are done tracking our events we can close the wandb run."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 39,
+            "id": "28ef6a7b",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "wandb_callback.finish()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "510f771b",
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.10"
+        }
     },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "\u001b[34m\u001b[1mwandb\u001b[0m: Logged trace tree to W&B.\n"
-     ]
-    }
-   ],
-   "source": [
-    "index = GPTVectorStoreIndex.from_documents(docs, service_context=service_context)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "0a948efc",
-   "metadata": {},
-   "source": [
-    "### 1.1 Persist Index as W&B Artifacts"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "id": "8ad58e67",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (/Users/loganmarkewich/llama_index/docs/examples/callbacks/wandb/run-20230801_152955-ds93prxa/files/storage)... Done. 0.0s\n"
-     ]
-    }
-   ],
-   "source": [
-    "wandb_callback.persist_index(index, index_name=\"simple_vector_store\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "7ed156a6",
-   "metadata": {},
-   "source": [
-    "### 1.2 Download Index from W&B Artifacts"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "id": "dc35f448",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "\u001b[34m\u001b[1mwandb\u001b[0m:   3 of 3 files downloaded.  \n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "**********\n",
-      "Trace: index_construction\n",
-      "**********\n"
-     ]
-    }
-   ],
-   "source": [
-    "storage_context = wandb_callback.load_storage_context(\n",
-    "    artifact_url=\"ayut/llamaindex/simple_vector_store:v0\"\n",
-    ")\n",
-    "\n",
-    "# Load the index and initialize a query engine\n",
-    "index = load_index_from_storage(storage_context, service_context=service_context)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "ae4de4a9",
-   "metadata": {},
-   "source": [
-    "## 2. Query Over Index"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "id": "42221465",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "**********\n",
-      "Trace: query\n",
-      "    |_query ->  2.695958 seconds\n",
-      "      |_retrieve ->  0.806379 seconds\n",
-      "        |_embedding ->  0.802871 seconds\n",
-      "      |_synthesize ->  1.8893 seconds\n",
-      "        |_llm ->  1.842434 seconds\n",
-      "**********\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "\u001b[34m\u001b[1mwandb\u001b[0m: Logged trace tree to W&B.\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "The text does not provide information on what the author did growing up.\n"
-     ]
-    }
-   ],
-   "source": [
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"What did the author do growing up?\")\n",
-    "print(response, sep=\"\\n\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "d7250272",
-   "metadata": {},
-   "source": [
-    "## 3. Build Complex Indices"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 25,
-   "id": "3a5f2671",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# fetch \"New York City\" page from Wikipedia\n",
-    "from pathlib import Path\n",
-    "\n",
-    "import requests\n",
-    "\n",
-    "response = requests.get(\n",
-    "    \"https://en.wikipedia.org/w/api.php\",\n",
-    "    params={\n",
-    "        \"action\": \"query\",\n",
-    "        \"format\": \"json\",\n",
-    "        \"titles\": \"New York City\",\n",
-    "        \"prop\": \"extracts\",\n",
-    "        \"explaintext\": True,\n",
-    "    },\n",
-    ").json()\n",
-    "page = next(iter(response[\"query\"][\"pages\"].values()))\n",
-    "nyc_text = page[\"extract\"]\n",
-    "\n",
-    "data_path = Path(\"data\")\n",
-    "if not data_path.exists():\n",
-    "    Path.mkdir(data_path)\n",
-    "\n",
-    "with open(\"data/nyc_text.txt\", \"w\") as fp:\n",
-    "    fp.write(nyc_text)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 26,
-   "id": "cf0c0307",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# load NYC dataset\n",
-    "nyc_documents = SimpleDirectoryReader(\"data/\").load_data()\n",
-    "# load PG's essay\n",
-    "essay_documents = SimpleDirectoryReader(\"../data/paul_graham\").load_data()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 27,
-   "id": "0c2dbdea",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# While building a composable index, to correctly save the index,\n",
-    "# the same `storage_context` needs to be passed to every index.\n",
-    "storage_context = StorageContext.from_defaults()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "id": "1d795f6f",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "**********\n",
-      "Trace: index_construction\n",
-      "    |_node_parsing ->  0.491078 seconds\n",
-      "      |_chunking ->  0.48921 seconds\n",
-      "    |_embedding ->  0.314621 seconds\n",
-      "    |_embedding ->  0.65393 seconds\n",
-      "    |_embedding ->  0.452587 seconds\n",
-      "    |_embedding ->  0.510454 seconds\n",
-      "**********\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "\u001b[34m\u001b[1mwandb\u001b[0m: Logged trace tree to W&B.\n"
-     ]
-    }
-   ],
-   "source": [
-    "# build NYC index\n",
-    "nyc_index = GPTVectorStoreIndex.from_documents(\n",
-    "    nyc_documents, service_context=service_context, storage_context=storage_context\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 29,
-   "id": "e9a49c5a",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "**********\n",
-      "Trace: index_construction\n",
-      "    |_node_parsing ->  0.340749 seconds\n",
-      "      |_chunking ->  0.339598 seconds\n",
-      "    |_embedding ->  0.280761 seconds\n",
-      "    |_embedding ->  0.315542 seconds\n",
-      "**********\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "\u001b[34m\u001b[1mwandb\u001b[0m: Logged trace tree to W&B.\n"
-     ]
-    }
-   ],
-   "source": [
-    "# build essay index\n",
-    "essay_index = GPTVectorStoreIndex.from_documents(\n",
-    "    essay_documents, service_context=service_context, storage_context=storage_context\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "60aa7e5f",
-   "metadata": {},
-   "source": [
-    "### 3.1. Query Over Graph Index"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "id": "d2704b34",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "nyc_index_summary = \"\"\"\n",
-    "    New York, often called New York City or NYC, \n",
-    "    is the most populous city in the United States. \n",
-    "    With a 2020 population of 8,804,190 distributed over 300.46 square miles (778.2 km2), \n",
-    "    New York City is also the most densely populated major city in the United States, \n",
-    "    and is more than twice as populous as second-place Los Angeles. \n",
-    "    New York City lies at the southern tip of New York State, and \n",
-    "    constitutes the geographical and demographic center of both the \n",
-    "    Northeast megalopolis and the New York metropolitan area, the \n",
-    "    largest metropolitan area in the world by urban landmass.[8] With over \n",
-    "    20.1 million people in its metropolitan statistical area and 23.5 million \n",
-    "    in its combined statistical area as of 2020, New York is one of the world's \n",
-    "    most populous megacities, and over 58 million people live within 250 mi (400 km) of \n",
-    "    the city. New York City is a global cultural, financial, and media center with \n",
-    "    a significant influence on commerce, health care and life sciences, entertainment, \n",
-    "    research, technology, education, politics, tourism, dining, art, fashion, and sports. \n",
-    "    Home to the headquarters of the United Nations, \n",
-    "    New York is an important center for international diplomacy,\n",
-    "    an established safe haven for global investors, and is sometimes described as the capital of the world.\n",
-    "\"\"\"\n",
-    "essay_index_summary = \"\"\"\n",
-    "    Author: Paul Graham. \n",
-    "    The author grew up painting and writing essays. \n",
-    "    He wrote a book on Lisp and did freelance Lisp hacking work to support himself. \n",
-    "    He also became the de facto studio assistant for Idelle Weber, an early photorealist painter. \n",
-    "    He eventually had the idea to start a company to put art galleries online, but the idea was unsuccessful. \n",
-    "    He then had the idea to write software to build online stores, which became the basis for his successful company, Viaweb. \n",
-    "    After Viaweb was acquired by Yahoo!, the author returned to painting and started writing essays online. \n",
-    "    He wrote a book of essays, Hackers & Painters, and worked on spam filters. \n",
-    "    He also bought a building in Cambridge to use as an office. \n",
-    "    He then had the idea to start Y Combinator, an investment firm that would \n",
-    "    make a larger number of smaller investments and help founders remain as CEO. \n",
-    "    He and his partner Jessica Livingston ran Y Combinator and funded a batch of startups twice a year. \n",
-    "    He also continued to write essays, cook for groups of friends, and explore the concept of invented vs discovered in software. \n",
-    "\"\"\""
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "id": "353a644b",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "**********\n",
-      "Trace: graph_construction\n",
-      "**********\n"
-     ]
-    }
-   ],
-   "source": [
-    "from llama_index import StorageContext, load_graph_from_storage\n",
-    "\n",
-    "graph = ComposableGraph.from_indices(\n",
-    "    GPTSimpleKeywordTableIndex,\n",
-    "    [nyc_index, essay_index],\n",
-    "    index_summaries=[nyc_index_summary, essay_index_summary],\n",
-    "    max_keywords_per_chunk=50,\n",
-    "    service_context=service_context,\n",
-    "    storage_context=storage_context,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "cda70171",
-   "metadata": {},
-   "source": [
-    "### 3.1.1 Persist Composable Index as W&B Artifacts "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "id": "4c3ebaf7",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (/Users/ayushthakur/integrations/llamaindex/llama_index/docs/examples/callbacks/wandb/run-20230607_012558-js7j48l9/files/storage)... Done. 0.0s\n"
-     ]
-    }
-   ],
-   "source": [
-    "wandb_callback.persist_index(graph, index_name=\"composable_graph\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "ff60da73",
-   "metadata": {},
-   "source": [
-    "### 3.1.2 Download Index from W&B Artifacts"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "id": "8ce7ecb3",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "\u001b[34m\u001b[1mwandb\u001b[0m:   3 of 3 files downloaded.  \n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "**********\n",
-      "Trace: index_construction\n",
-      "**********\n",
-      "**********\n",
-      "Trace: index_construction\n",
-      "**********\n",
-      "**********\n",
-      "Trace: index_construction\n",
-      "**********\n"
-     ]
-    }
-   ],
-   "source": [
-    "storage_context = wandb_callback.load_storage_context(\n",
-    "    artifact_url=\"ayut/llamaindex/composable_graph:v0\"\n",
-    ")\n",
-    "\n",
-    "# Load the graph and initialize a query engine\n",
-    "graph = load_graph_from_storage(\n",
-    "    storage_context, root_id=graph.root_id, service_context=service_context\n",
-    ")\n",
-    "query_engine = index.as_query_engine()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "b30ddfc9",
-   "metadata": {},
-   "source": [
-    "### 3.1.3 Query"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "id": "e852e00f",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "**********\n",
-      "Trace: query\n",
-      "    |_query ->  58.207419 seconds\n",
-      "      |_retrieve ->  2.672269 seconds\n",
-      "        |_llm ->  2.671922 seconds\n",
-      "      |_query ->  39.630366 seconds\n",
-      "        |_retrieve ->  0.165883 seconds\n",
-      "          |_embedding ->  0.158699 seconds\n",
-      "        |_synthesize ->  39.46435 seconds\n",
-      "          |_llm ->  39.410054 seconds\n",
-      "      |_synthesize ->  15.904373 seconds\n",
-      "        |_llm ->  15.900012 seconds\n",
-      "**********\n"
-     ]
-    },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "\u001b[34m\u001b[1mwandb\u001b[0m: Logged trace tree to W&B.\n"
-     ]
-    },
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "New York City has a humid subtropical climate, making it the northernmost major city in North America with this type of climate. During the winter, the city is chilly and damp. The average daily temperature in January, the coldest month, is 33.3 °F (0.7 °C). Temperatures can drop to 10 °F (−12 °C) several times each winter, but can also reach 60 °F (16 °C) for several days even in the coldest winter month. The city also experiences the urban heat island effect, which can increase nighttime temperatures. The most extreme temperatures have ranged from −15 °F (−26 °C) to 106 °F (41 °C).\n"
-     ]
-    }
-   ],
-   "source": [
-    "query_engine = graph.as_query_engine()\n",
-    "\n",
-    "response = query_engine.query(\n",
-    "    \"What is the climate of New York City like? How cold is it during the winter?\",\n",
-    ")\n",
-    "print(response, sep=\"\\n\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "c49ff101",
-   "metadata": {},
-   "source": [
-    "## Close W&B Callback Handler\n",
-    "\n",
-    "When we are done tracking our events we can close the wandb run."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 39,
-   "id": "28ef6a7b",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "wandb_callback.finish()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "510f771b",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.10"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/docs/examples/composable_indices/ComposableIndices-Prior.ipynb b/docs/examples/composable_indices/ComposableIndices-Prior.ipynb
index 7ff7ddf642115113adf4f307a615fb0c21e66d67..a6dbeb07322155ffc7ee2c105bc0b680ca995cb3 100644
--- a/docs/examples/composable_indices/ComposableIndices-Prior.ipynb
+++ b/docs/examples/composable_indices/ComposableIndices-Prior.ipynb
@@ -1,490 +1,490 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "cfb64210-9c6b-47d7-81f4-67dbdab68e4c",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "# Composable Graph Basic"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "41927486",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: This is ONLY necessary in jupyter notebook.\n",
-    "# Details: Jupyter runs an event-loop behind the scenes.\n",
-    "#          This results in nested event-loops when we start an event-loop to make async queries.\n",
-    "#          This is normally not allowed, we use nest_asyncio to allow it for convenience.\n",
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "fa0e62b6",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e27b0473-4bda-47f0-b6ed-fd482eac1a13",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import (\n",
-    "    VectorStoreIndex,\n",
-    "    EmptyIndex,\n",
-    "    TreeIndex,\n",
-    "    ListIndex,\n",
-    "    SimpleDirectoryReader,\n",
-    "    ServiceContext,\n",
-    "    StorageContext,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "49e0d841-680f-4a0c-b455-788b54978ebf",
-   "metadata": {},
-   "source": [
-    "### Load Datasets\n",
-    "\n",
-    "Load PG's essay"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "ddff8f98-e002-40c5-93ac-93aa40dca5ca",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# load PG's essay\n",
-    "essay_documents = SimpleDirectoryReader(\"../paul_graham_essay/data/\").load_data()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "f1782198-c0de-4679-8951-1297c21b8639",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "### Building the document indices\n",
-    "- Build a vector index for PG's essay\n",
-    "- Also build an empty index (to store prior knowledge)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "8b5aad4a-49ef-4b24-962a-0793f4f09316",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# configure\n",
-    "service_context = ServiceContext.from_defaults(chunk_size=512)\n",
-    "storage_context = StorageContext.from_defaults()\n",
-    "\n",
-    "# build essay index\n",
-    "essay_index = VectorStoreIndex.from_documents(\n",
-    "    essay_documents, service_context=service_context, storage_context=storage_context\n",
-    ")\n",
-    "empty_index = EmptyIndex(\n",
-    "    service_context=service_context, storage_context=storage_context\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "4ee2ed80-fa2a-477b-835c-464c6fc1d973",
-   "metadata": {},
-   "source": [
-    "### Query Indices\n",
-    "See the response of querying each index"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "df22aada-bd3c-48e8-98dd-ec38691a6414",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "query_engine = essay_index.as_query_engine(\n",
-    "    similarity_top_k=3,\n",
-    "    response_mode=\"tree_summarize\",\n",
-    ")\n",
-    "response = query_engine.query(\n",
-    "    \"Tell me about what Sam Altman did during his time in YC\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "718f0063-e41c-42da-a6f5-3cae90f7c6d3",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "print(str(response))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1b934abf-bb30-4d86-b0ba-3dc60666b798",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "query_engine = empty_index.as_query_engine(response_mode=\"generation\")\n",
-    "response = query_engine.query(\n",
-    "    \"Tell me about what Sam Altman did during his time in YC\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f677f144-549c-404f-aafb-5ce8fa295146",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "print(str(response))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "ff521fbb",
-   "metadata": {},
-   "source": [
-    "Define summary for each index."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "4149cbbd-7d0b-48c4-8c47-7d67ae0c55f0",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "essay_index_summary = \"This document describes Paul Graham's life, from early adulthood to the present day.\"\n",
-    "empty_index_summary = \"This can be used for general knowledge purposes.\""
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "eebbc448-1e0b-402c-b37e-f93bfcc0bf4f",
-   "metadata": {},
-   "source": [
-    "### Define Graph (List Index as Parent Index)\n",
-    "\n",
-    "This allows us to synthesize responses both using a knowledge corpus as well as prior knowledge."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c0580ff9-ca0a-4ac1-93ef-b570903ea404",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.indices.composability import ComposableGraph"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "eb064bf2-77f5-4205-bd1e-ec7de40a6f7f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "graph = ComposableGraph.from_indices(\n",
-    "    ListIndex,\n",
-    "    [essay_index, empty_index],\n",
-    "    index_summaries=[essay_index_summary, empty_index_summary],\n",
-    "    service_context=service_context,\n",
-    "    storage_context=storage_context,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "ae127943-afac-48b4-b22d-84a37e553e4b",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# [optional] persist to disk\n",
-    "storage_context.persist()\n",
-    "root_id = graph.root_id"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "dca2b64b-9af1-456f-8dab-822bfdc5d0ac",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# [optional] load from disk\n",
-    "from llama_index.indices.loading import load_graph_from_storage\n",
-    "\n",
-    "graph = load_graph_from_storage(storage_context, root_id=root_id)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "7a811f1a",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# configure query engines\n",
-    "custom_query_engines = {\n",
-    "    essay_index.index_id: essay_index.as_query_engine(\n",
-    "        similarity_top_k=3,\n",
-    "        response_mode=\"tree_summarize\",\n",
-    "    )\n",
-    "}"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f3c4e58b-b153-4e43-bc02-274a85babbe8",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "# ask it a question about Sam Altman\n",
-    "query_engine = graph.as_query_engine(custom_query_engines=custom_query_engines)\n",
-    "response = query_engine.query(\n",
-    "    \"Tell me about what Sam Altman did during his time in YC\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c0a43443-3e00-4e48-b3ab-f6369191d53a",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "print(str(response))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c78bc3da-6bad-4998-9a81-90a3fa9200a9",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Get source of response\n",
-    "print(response.get_formatted_sources())"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "f437c6df-31b1-40d9-9b57-70f7e0318eb7",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "### Define Graph (Tree Index as Parent Index)\n",
-    "\n",
-    "This allows us to \"route\" a query to either a knowledge-augmented index, or to the LLM itself."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "d0c05040-0f6c-4e9d-bf08-4e5207ea2774",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.indices.composability import ComposableGraph"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a6c1b887-9cb5-49db-a9c7-5cb348beff58",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# configure retriever\n",
-    "custom_query_engines = {\n",
-    "    essay_index.index_id: essay_index.as_query_engine(\n",
-    "        similarity_top_k=3,\n",
-    "        response_mode=\"tree_summarize\",\n",
-    "    )\n",
-    "}"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "d5579f16-cee5-4287-b89e-635d161bdfb5",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "graph2 = ComposableGraph.from_indices(\n",
-    "    TreeIndex,\n",
-    "    [essay_index, empty_index],\n",
-    "    index_summaries=[essay_index_summary, empty_index_summary],\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c57d370f-59af-4a2d-8fc6-05cf93d958e5",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "# ask it a question about NYC\n",
-    "query_engine = graph2.as_query_engine(custom_query_engines=custom_query_engines)\n",
-    "response = query_engine.query(\n",
-    "    \"Tell me about what Paul Graham did growing up?\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1d99502a-ab3c-48da-bfb1-c54a95dadbb5",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "str(response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "997498a9-128d-4c0b-8826-c6d6871571f5",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "print(response.get_formatted_sources())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "8dc10463-ca79-4b47-83d6-217bd186d822",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "response = query_engine.query(\n",
-    "    \"Tell me about Barack Obama\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "9b8411a0-f9a8-4f1b-a476-03e746ec8ab3",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "str(response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "749625a3-722c-4bf4-b4ef-55b00f20ef20",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "response.get_formatted_sources()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "78a616a3",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3.11.0 ('llama')",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.9.16"
-  },
-  "vscode": {
-   "interpreter": {
-    "hash": "775fd5332502f2902173832d699e1edc37222ebadd0e97b5c8a1a7431bebae89"
-   }
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "cfb64210-9c6b-47d7-81f4-67dbdab68e4c",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "# Composable Graph Basic"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "41927486",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: This is ONLY necessary in jupyter notebook.\n",
+                "# Details: Jupyter runs an event-loop behind the scenes.\n",
+                "#          This results in nested event-loops when we start an event-loop to make async queries.\n",
+                "#          This is normally not allowed, we use nest_asyncio to allow it for convenience.\n",
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "fa0e62b6",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e27b0473-4bda-47f0-b6ed-fd482eac1a13",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import (\n",
+                "    VectorStoreIndex,\n",
+                "    EmptyIndex,\n",
+                "    TreeIndex,\n",
+                "    SummaryIndex,\n",
+                "    SimpleDirectoryReader,\n",
+                "    ServiceContext,\n",
+                "    StorageContext,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "49e0d841-680f-4a0c-b455-788b54978ebf",
+            "metadata": {},
+            "source": [
+                "### Load Datasets\n",
+                "\n",
+                "Load PG's essay"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "ddff8f98-e002-40c5-93ac-93aa40dca5ca",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# load PG's essay\n",
+                "essay_documents = SimpleDirectoryReader(\"../paul_graham_essay/data/\").load_data()"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "f1782198-c0de-4679-8951-1297c21b8639",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "### Building the document indices\n",
+                "- Build a vector index for PG's essay\n",
+                "- Also build an empty index (to store prior knowledge)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "8b5aad4a-49ef-4b24-962a-0793f4f09316",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# configure\n",
+                "service_context = ServiceContext.from_defaults(chunk_size=512)\n",
+                "storage_context = StorageContext.from_defaults()\n",
+                "\n",
+                "# build essay index\n",
+                "essay_index = VectorStoreIndex.from_documents(\n",
+                "    essay_documents, service_context=service_context, storage_context=storage_context\n",
+                ")\n",
+                "empty_index = EmptyIndex(\n",
+                "    service_context=service_context, storage_context=storage_context\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "4ee2ed80-fa2a-477b-835c-464c6fc1d973",
+            "metadata": {},
+            "source": [
+                "### Query Indices\n",
+                "See the response of querying each index"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "df22aada-bd3c-48e8-98dd-ec38691a6414",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "query_engine = essay_index.as_query_engine(\n",
+                "    similarity_top_k=3,\n",
+                "    response_mode=\"tree_summarize\",\n",
+                ")\n",
+                "response = query_engine.query(\n",
+                "    \"Tell me about what Sam Altman did during his time in YC\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "718f0063-e41c-42da-a6f5-3cae90f7c6d3",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "print(str(response))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "1b934abf-bb30-4d86-b0ba-3dc60666b798",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "query_engine = empty_index.as_query_engine(response_mode=\"generation\")\n",
+                "response = query_engine.query(\n",
+                "    \"Tell me about what Sam Altman did during his time in YC\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f677f144-549c-404f-aafb-5ce8fa295146",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "print(str(response))"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "ff521fbb",
+            "metadata": {},
+            "source": [
+                "Define summary for each index."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "4149cbbd-7d0b-48c4-8c47-7d67ae0c55f0",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "essay_index_summary = \"This document describes Paul Graham's life, from early adulthood to the present day.\"\n",
+                "empty_index_summary = \"This can be used for general knowledge purposes.\""
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "eebbc448-1e0b-402c-b37e-f93bfcc0bf4f",
+            "metadata": {},
+            "source": [
+                "### Define Graph (List Index as Parent Index)\n",
+                "\n",
+                "This allows us to synthesize responses both using a knowledge corpus as well as prior knowledge."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "c0580ff9-ca0a-4ac1-93ef-b570903ea404",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.indices.composability import ComposableGraph"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "eb064bf2-77f5-4205-bd1e-ec7de40a6f7f",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "graph = ComposableGraph.from_indices(\n",
+                "    SummaryIndex,\n",
+                "    [essay_index, empty_index],\n",
+                "    index_summaries=[essay_index_summary, empty_index_summary],\n",
+                "    service_context=service_context,\n",
+                "    storage_context=storage_context,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "ae127943-afac-48b4-b22d-84a37e553e4b",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# [optional] persist to disk\n",
+                "storage_context.persist()\n",
+                "root_id = graph.root_id"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "dca2b64b-9af1-456f-8dab-822bfdc5d0ac",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# [optional] load from disk\n",
+                "from llama_index.indices.loading import load_graph_from_storage\n",
+                "\n",
+                "graph = load_graph_from_storage(storage_context, root_id=root_id)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "7a811f1a",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# configure query engines\n",
+                "custom_query_engines = {\n",
+                "    essay_index.index_id: essay_index.as_query_engine(\n",
+                "        similarity_top_k=3,\n",
+                "        response_mode=\"tree_summarize\",\n",
+                "    )\n",
+                "}"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f3c4e58b-b153-4e43-bc02-274a85babbe8",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "# ask it a question about Sam Altman\n",
+                "query_engine = graph.as_query_engine(custom_query_engines=custom_query_engines)\n",
+                "response = query_engine.query(\n",
+                "    \"Tell me about what Sam Altman did during his time in YC\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "c0a43443-3e00-4e48-b3ab-f6369191d53a",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "print(str(response))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "c78bc3da-6bad-4998-9a81-90a3fa9200a9",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# Get source of response\n",
+                "print(response.get_formatted_sources())"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "f437c6df-31b1-40d9-9b57-70f7e0318eb7",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "### Define Graph (Tree Index as Parent Index)\n",
+                "\n",
+                "This allows us to \"route\" a query to either a knowledge-augmented index, or to the LLM itself."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "d0c05040-0f6c-4e9d-bf08-4e5207ea2774",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.indices.composability import ComposableGraph"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "a6c1b887-9cb5-49db-a9c7-5cb348beff58",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# configure retriever\n",
+                "custom_query_engines = {\n",
+                "    essay_index.index_id: essay_index.as_query_engine(\n",
+                "        similarity_top_k=3,\n",
+                "        response_mode=\"tree_summarize\",\n",
+                "    )\n",
+                "}"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "d5579f16-cee5-4287-b89e-635d161bdfb5",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "graph2 = ComposableGraph.from_indices(\n",
+                "    TreeIndex,\n",
+                "    [essay_index, empty_index],\n",
+                "    index_summaries=[essay_index_summary, empty_index_summary],\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "c57d370f-59af-4a2d-8fc6-05cf93d958e5",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "# ask it a question about NYC\n",
+                "query_engine = graph2.as_query_engine(custom_query_engines=custom_query_engines)\n",
+                "response = query_engine.query(\n",
+                "    \"Tell me about what Paul Graham did growing up?\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "1d99502a-ab3c-48da-bfb1-c54a95dadbb5",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "str(response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "997498a9-128d-4c0b-8826-c6d6871571f5",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "print(response.get_formatted_sources())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "8dc10463-ca79-4b47-83d6-217bd186d822",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "response = query_engine.query(\n",
+                "    \"Tell me about Barack Obama\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "9b8411a0-f9a8-4f1b-a476-03e746ec8ab3",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "str(response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "749625a3-722c-4bf4-b4ef-55b00f20ef20",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "response.get_formatted_sources()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "78a616a3",
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3.11.0 ('llama')",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.9.16"
+        },
+        "vscode": {
+            "interpreter": {
+                "hash": "775fd5332502f2902173832d699e1edc37222ebadd0e97b5c8a1a7431bebae89"
+            }
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/composable_indices/ComposableIndices-Weaviate.ipynb b/docs/examples/composable_indices/ComposableIndices-Weaviate.ipynb
index 7b57e5d9ca3fc631245d56d905442cb5980c2db9..9837de909a0f376c4b55420c94ef5e3d60dc15f7 100644
--- a/docs/examples/composable_indices/ComposableIndices-Weaviate.ipynb
+++ b/docs/examples/composable_indices/ComposableIndices-Weaviate.ipynb
@@ -29,7 +29,7 @@
     "from llama_index import (\n",
     "    VectorStoreIndex,\n",
     "    SimpleKeywordTableIndex,\n",
-    "    ListIndex,\n",
+    "    SummaryIndex,\n",
     "    VectorStoreIndex,\n",
     "    SimpleDirectoryReader,\n",
     ")\n",
diff --git a/docs/examples/composable_indices/city_analysis/City_Analysis.ipynb b/docs/examples/composable_indices/city_analysis/City_Analysis.ipynb
index 88fbac9d0c4c8a323e296224384d183ec3891390..4cfe2b56a5e0734b996b5d040b2e580614dc1ae0 100644
--- a/docs/examples/composable_indices/city_analysis/City_Analysis.ipynb
+++ b/docs/examples/composable_indices/city_analysis/City_Analysis.ipynb
@@ -1,2322 +1,2322 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "cfb64210-9c6b-47d7-81f4-67dbdab68e4c",
-   "metadata": {
-    "id": "cfb64210-9c6b-47d7-81f4-67dbdab68e4c",
-    "tags": []
-   },
-   "source": [
-    "# Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)\n",
-    "\n",
-    "Test complex queries over both text-davinci-003 and ChatGPT"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "D2ZI8iKch-V_",
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/"
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "cfb64210-9c6b-47d7-81f4-67dbdab68e4c",
+            "metadata": {
+                "id": "cfb64210-9c6b-47d7-81f4-67dbdab68e4c",
+                "tags": []
+            },
+            "source": [
+                "# Test Complex Queries over Multiple Documents (text-davinci-003 vs. ChatGPT)\n",
+                "\n",
+                "Test complex queries over both text-davinci-003 and ChatGPT"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "D2ZI8iKch-V_",
+            "metadata": {
+                "colab": {
+                    "base_uri": "https://localhost:8080/"
+                },
+                "id": "D2ZI8iKch-V_",
+                "outputId": "bc63c640-8508-4c74-8bd9-3fc1495b7839"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
+                        "Collecting llama-index\n",
+                        "  Downloading llama_index-0.4.17.tar.gz (122 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m122.8/122.8 KB\u001b[0m \u001b[31m9.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25h  Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
+                        "Collecting langchain\n",
+                        "  Downloading langchain-0.0.98-py3-none-any.whl (337 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m337.8/337.8 KB\u001b[0m \u001b[31m23.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hCollecting openai>=0.26.4\n",
+                        "  Downloading openai-0.27.0-py3-none-any.whl (70 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.1/70.1 KB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hCollecting dataclasses_json\n",
+                        "  Downloading dataclasses_json-0.5.7-py3-none-any.whl (25 kB)\n",
+                        "Collecting transformers\n",
+                        "  Downloading transformers-4.26.1-py3-none-any.whl (6.3 MB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.3/6.3 MB\u001b[0m \u001b[31m73.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hRequirement already satisfied: nltk in /usr/local/lib/python3.8/dist-packages (from llama-index) (3.7)\n",
+                        "Requirement already satisfied: numpy in /usr/local/lib/python3.8/dist-packages (from llama-index) (1.22.4)\n",
+                        "Collecting tenacity<8.2.0\n",
+                        "  Downloading tenacity-8.1.0-py3-none-any.whl (23 kB)\n",
+                        "Requirement already satisfied: pandas in /usr/local/lib/python3.8/dist-packages (from llama-index) (1.3.5)\n",
+                        "Requirement already satisfied: tqdm in /usr/local/lib/python3.8/dist-packages (from openai>=0.26.4->llama-index) (4.64.1)\n",
+                        "Requirement already satisfied: requests>=2.20 in /usr/local/lib/python3.8/dist-packages (from openai>=0.26.4->llama-index) (2.25.1)\n",
+                        "Requirement already satisfied: aiohttp in /usr/local/lib/python3.8/dist-packages (from openai>=0.26.4->llama-index) (3.8.4)\n",
+                        "Requirement already satisfied: marshmallow<4.0.0,>=3.3.0 in /usr/local/lib/python3.8/dist-packages (from dataclasses_json->llama-index) (3.19.0)\n",
+                        "Collecting marshmallow-enum<2.0.0,>=1.5.1\n",
+                        "  Downloading marshmallow_enum-1.5.1-py2.py3-none-any.whl (4.2 kB)\n",
+                        "Collecting typing-inspect>=0.4.0\n",
+                        "  Downloading typing_inspect-0.8.0-py3-none-any.whl (8.7 kB)\n",
+                        "Collecting deeplake<4.0.0,>=3.2.9\n",
+                        "  Downloading deeplake-3.2.12.tar.gz (439 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m439.1/439.1 KB\u001b[0m \u001b[31m31.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25h  Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
+                        "Requirement already satisfied: PyYAML<7,>=6 in /usr/local/lib/python3.8/dist-packages (from langchain->llama-index) (6.0)\n",
+                        "Requirement already satisfied: SQLAlchemy<2,>=1 in /usr/local/lib/python3.8/dist-packages (from langchain->llama-index) (1.4.46)\n",
+                        "Requirement already satisfied: pydantic<2,>=1 in /usr/local/lib/python3.8/dist-packages (from langchain->llama-index) (1.10.5)\n",
+                        "Collecting aleph-alpha-client<3.0.0,>=2.15.0\n",
+                        "  Downloading aleph_alpha_client-2.16.0-py3-none-any.whl (38 kB)\n",
+                        "Requirement already satisfied: joblib in /usr/local/lib/python3.8/dist-packages (from nltk->llama-index) (1.2.0)\n",
+                        "Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.8/dist-packages (from nltk->llama-index) (2022.6.2)\n",
+                        "Requirement already satisfied: click in /usr/local/lib/python3.8/dist-packages (from nltk->llama-index) (8.1.3)\n",
+                        "Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.8/dist-packages (from pandas->llama-index) (2.8.2)\n",
+                        "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.8/dist-packages (from pandas->llama-index) (2022.7.1)\n",
+                        "Collecting huggingface-hub<1.0,>=0.11.0\n",
+                        "  Downloading huggingface_hub-0.12.1-py3-none-any.whl (190 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m190.3/190.3 KB\u001b[0m \u001b[31m3.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hCollecting tokenizers!=0.11.3,<0.14,>=0.11.1\n",
+                        "  Downloading tokenizers-0.13.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (7.6 MB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.6/7.6 MB\u001b[0m \u001b[31m48.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hRequirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.8/dist-packages (from transformers->llama-index) (23.0)\n",
+                        "Requirement already satisfied: filelock in /usr/local/lib/python3.8/dist-packages (from transformers->llama-index) (3.9.0)\n",
+                        "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (6.0.4)\n",
+                        "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (22.2.0)\n",
+                        "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (1.3.1)\n",
+                        "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (1.3.3)\n",
+                        "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (4.0.2)\n",
+                        "Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (3.0.1)\n",
+                        "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (1.8.2)\n",
+                        "Requirement already satisfied: urllib3>=1.26 in /usr/local/lib/python3.8/dist-packages (from aleph-alpha-client<3.0.0,>=2.15.0->langchain->llama-index) (1.26.14)\n",
+                        "Collecting aiohttp-retry>=2.8.3\n",
+                        "  Downloading aiohttp_retry-2.8.3-py3-none-any.whl (9.8 kB)\n",
+                        "Collecting aiodns>=3.0.0\n",
+                        "  Downloading aiodns-3.0.0-py3-none-any.whl (5.0 kB)\n",
+                        "Collecting requests>=2.20\n",
+                        "  Downloading requests-2.28.2-py3-none-any.whl (62 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.8/62.8 KB\u001b[0m \u001b[31m4.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hRequirement already satisfied: pillow in /usr/local/lib/python3.8/dist-packages (from deeplake<4.0.0,>=3.2.9->langchain->llama-index) (8.4.0)\n",
+                        "Collecting boto3\n",
+                        "  Downloading boto3-1.26.82-py3-none-any.whl (134 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.7/134.7 KB\u001b[0m \u001b[31m6.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hCollecting pathos\n",
+                        "  Downloading pathos-0.3.0-py3-none-any.whl (79 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m79.8/79.8 KB\u001b[0m \u001b[31m8.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hCollecting humbug>=0.2.6\n",
+                        "  Downloading humbug-0.2.8-py3-none-any.whl (13 kB)\n",
+                        "Collecting numcodecs\n",
+                        "  Downloading numcodecs-0.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (6.7 MB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.7/6.7 MB\u001b[0m \u001b[31m41.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hCollecting pyjwt\n",
+                        "  Downloading PyJWT-2.6.0-py3-none-any.whl (20 kB)\n",
+                        "Collecting hub>=2.8.7\n",
+                        "  Downloading hub-3.0.1-py3-none-any.whl (1.4 kB)\n",
+                        "Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.8/dist-packages (from huggingface-hub<1.0,>=0.11.0->transformers->llama-index) (4.5.0)\n",
+                        "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.8/dist-packages (from python-dateutil>=2.7.3->pandas->llama-index) (1.15.0)\n",
+                        "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests>=2.20->openai>=0.26.4->llama-index) (2.10)\n",
+                        "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests>=2.20->openai>=0.26.4->llama-index) (2022.12.7)\n",
+                        "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.8/dist-packages (from SQLAlchemy<2,>=1->langchain->llama-index) (2.0.2)\n",
+                        "Collecting mypy-extensions>=0.3.0\n",
+                        "  Downloading mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\n",
+                        "Collecting pycares>=4.0.0\n",
+                        "  Downloading pycares-4.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (288 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m289.0/289.0 KB\u001b[0m \u001b[31m19.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hCollecting s3transfer<0.7.0,>=0.6.0\n",
+                        "  Downloading s3transfer-0.6.0-py3-none-any.whl (79 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m79.6/79.6 KB\u001b[0m \u001b[31m6.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hCollecting botocore<1.30.0,>=1.29.82\n",
+                        "  Downloading botocore-1.29.82-py3-none-any.whl (10.5 MB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m10.5/10.5 MB\u001b[0m \u001b[31m69.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hCollecting jmespath<2.0.0,>=0.7.1\n",
+                        "  Downloading jmespath-1.0.1-py3-none-any.whl (20 kB)\n",
+                        "Requirement already satisfied: entrypoints in /usr/local/lib/python3.8/dist-packages (from numcodecs->deeplake<4.0.0,>=3.2.9->langchain->llama-index) (0.4)\n",
+                        "Collecting ppft>=1.7.6.6\n",
+                        "  Downloading ppft-1.7.6.6-py3-none-any.whl (52 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m52.8/52.8 KB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hCollecting pox>=0.3.2\n",
+                        "  Downloading pox-0.3.2-py3-none-any.whl (29 kB)\n",
+                        "Requirement already satisfied: dill>=0.3.6 in /usr/local/lib/python3.8/dist-packages (from pathos->deeplake<4.0.0,>=3.2.9->langchain->llama-index) (0.3.6)\n",
+                        "Collecting multiprocess>=0.70.14\n",
+                        "  Downloading multiprocess-0.70.14-py38-none-any.whl (132 kB)\n",
+                        "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m132.0/132.0 KB\u001b[0m \u001b[31m10.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
+                        "\u001b[?25hRequirement already satisfied: cffi>=1.5.0 in /usr/local/lib/python3.8/dist-packages (from pycares>=4.0.0->aiodns>=3.0.0->aleph-alpha-client<3.0.0,>=2.15.0->langchain->llama-index) (1.15.1)\n",
+                        "Requirement already satisfied: pycparser in /usr/local/lib/python3.8/dist-packages (from cffi>=1.5.0->pycares>=4.0.0->aiodns>=3.0.0->aleph-alpha-client<3.0.0,>=2.15.0->langchain->llama-index) (2.21)\n",
+                        "Building wheels for collected packages: llama-index, deeplake\n",
+                        "  Building wheel for llama-index (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
+                        "  Created wheel for llama-index: filename=llama_index-0.4.17-py3-none-any.whl size=182750 sha256=67cb3c836e93d9d29a73307c2393d49392a4c8ceae94be552e0a91ca4b1d2cf1\n",
+                        "  Stored in directory: /root/.cache/pip/wheels/15/bb/a9/de82e6a211b5f22899972226d5164f91546e6ac016bbd6c248\n",
+                        "  Building wheel for deeplake (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
+                        "  Created wheel for deeplake: filename=deeplake-3.2.12-py3-none-any.whl size=534308 sha256=b49c2dd3396d018a03f60c580ca9f15903b45507d648336b281f36605cb7950f\n",
+                        "  Stored in directory: /root/.cache/pip/wheels/4b/1a/74/4b341aa1a16e01324c9728738ff705c049c3fa2a09e40d3d9f\n",
+                        "Successfully built llama-index deeplake\n",
+                        "Installing collected packages: tokenizers, tenacity, requests, pyjwt, ppft, pox, numcodecs, mypy-extensions, multiprocess, jmespath, typing-inspect, pycares, pathos, marshmallow-enum, humbug, huggingface-hub, botocore, transformers, s3transfer, openai, dataclasses_json, aiohttp-retry, aiodns, boto3, aleph-alpha-client, hub, deeplake, langchain, llama-index\n",
+                        "  Attempting uninstall: tenacity\n",
+                        "    Found existing installation: tenacity 8.2.1\n",
+                        "    Uninstalling tenacity-8.2.1:\n",
+                        "      Successfully uninstalled tenacity-8.2.1\n",
+                        "  Attempting uninstall: requests\n",
+                        "    Found existing installation: requests 2.25.1\n",
+                        "    Uninstalling requests-2.25.1:\n",
+                        "      Successfully uninstalled requests-2.25.1\n",
+                        "Successfully installed aiodns-3.0.0 aiohttp-retry-2.8.3 aleph-alpha-client-2.16.0 boto3-1.26.82 botocore-1.29.82 dataclasses_json-0.5.7 deeplake-3.2.12 hub-3.0.1 huggingface-hub-0.12.1 humbug-0.2.8 jmespath-1.0.1 langchain-0.0.98 llama-index-0.4.17 marshmallow-enum-1.5.1 multiprocess-0.70.14 mypy-extensions-1.0.0 numcodecs-0.11.0 openai-0.27.0 pathos-0.3.0 pox-0.3.2 ppft-1.7.6.6 pycares-4.3.0 pyjwt-2.6.0 requests-2.28.2 s3transfer-0.6.0 tenacity-8.1.0 tokenizers-0.13.2 transformers-4.26.1 typing-inspect-0.8.0\n"
+                    ]
+                }
+            ],
+            "source": [
+                "!pip install llama-index"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "d35ov8dk_6WP",
+            "metadata": {
+                "id": "d35ov8dk_6WP"
+            },
+            "outputs": [],
+            "source": [
+                "# My OpenAI Key\n",
+                "import os\n",
+                "\n",
+                "os.environ[\"OPENAI_API_KEY\"] = \"\""
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "fa0e62b6",
+            "metadata": {
+                "id": "fa0e62b6"
+            },
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 7,
+            "id": "e27b0473-4bda-47f0-b6ed-fd482eac1a13",
+            "metadata": {
+                "id": "e27b0473-4bda-47f0-b6ed-fd482eac1a13"
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index import (\n",
+                "    VectorStoreIndex,\n",
+                "    SimpleKeywordTableIndex,\n",
+                "    SummaryIndex,\n",
+                "    SimpleDirectoryReader,\n",
+                "    LLMPredictor,\n",
+                "    ServiceContext,\n",
+                ")\n",
+                "from llama_index.llms import OpenAI\n",
+                "import requests"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "49e0d841-680f-4a0c-b455-788b54978ebf",
+            "metadata": {
+                "id": "49e0d841-680f-4a0c-b455-788b54978ebf"
+            },
+            "source": [
+                "#### Load Datasets\n",
+                "\n",
+                "Load Wikipedia pages as well as Paul Graham's \"What I Worked On\" essay"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 8,
+            "id": "fc4692a1",
+            "metadata": {
+                "id": "fc4692a1"
+            },
+            "outputs": [],
+            "source": [
+                "wiki_titles = [\n",
+                "    \"Toronto\",\n",
+                "    \"Seattle\",\n",
+                "    \"San Francisco\",\n",
+                "    \"Chicago\",\n",
+                "    \"Boston\",\n",
+                "    \"Washington, D.C.\",\n",
+                "    \"Cambridge, Massachusetts\",\n",
+                "    \"Houston\",\n",
+                "]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 9,
+            "id": "9ec16a8b-6aae-4bf7-9b83-b82087b4ea52",
+            "metadata": {
+                "id": "9ec16a8b-6aae-4bf7-9b83-b82087b4ea52"
+            },
+            "outputs": [],
+            "source": [
+                "from pathlib import Path\n",
+                "\n",
+                "import requests\n",
+                "\n",
+                "for title in wiki_titles:\n",
+                "    response = requests.get(\n",
+                "        \"https://en.wikipedia.org/w/api.php\",\n",
+                "        params={\n",
+                "            \"action\": \"query\",\n",
+                "            \"format\": \"json\",\n",
+                "            \"titles\": title,\n",
+                "            \"prop\": \"extracts\",\n",
+                "            # 'exintro': True,\n",
+                "            \"explaintext\": True,\n",
+                "        },\n",
+                "    ).json()\n",
+                "    page = next(iter(response[\"query\"][\"pages\"].values()))\n",
+                "    wiki_text = page[\"extract\"]\n",
+                "\n",
+                "    data_path = Path(\"data\")\n",
+                "    if not data_path.exists():\n",
+                "        Path.mkdir(data_path)\n",
+                "\n",
+                "    with open(data_path / f\"{title}.txt\", \"w\") as fp:\n",
+                "        fp.write(wiki_text)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 10,
+            "id": "39c00aeb-adef-4ce3-8134-031de18e64ea",
+            "metadata": {
+                "id": "39c00aeb-adef-4ce3-8134-031de18e64ea"
+            },
+            "outputs": [],
+            "source": [
+                "# Load all wiki documents\n",
+                "city_docs = {}\n",
+                "for wiki_title in wiki_titles:\n",
+                "    city_docs[wiki_title] = SimpleDirectoryReader(\n",
+                "        input_files=[f\"data/{wiki_title}.txt\"]\n",
+                "    ).load_data()"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "f1782198-c0de-4679-8951-1297c21b8639",
+            "metadata": {
+                "id": "f1782198-c0de-4679-8951-1297c21b8639"
+            },
+            "source": [
+                "### Building the document indices\n",
+                "Build a vector index for the wiki pages about cities and persons, and PG essay"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "id": "M0GylZB-C2zL",
+            "metadata": {
+                "id": "M0GylZB-C2zL"
+            },
+            "outputs": [],
+            "source": [
+                "# LLM Predictor (text-davinci-003)\n",
+                "davinci = OpenAI(temperature=0, model=\"text-davinci-003\")\n",
+                "service_context_davinci = ServiceContext.from_defaults(llm=davinci)\n",
+                "\n",
+                "# # LLM Predictor (gpt-3.5-turbo)\n",
+                "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
+                "service_context_chatgpt = ServiceContext.from_defaults(llm=chatgpt)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 12,
+            "id": "5431e83e-428b-4473-bad1-24b7a6c4db38",
+            "metadata": {
+                "colab": {
+                    "base_uri": "https://localhost:8080/",
+                    "height": 183,
+                    "referenced_widgets": [
+                        "b5566e3db2914ddebd80d7bde75b2559",
+                        "208d404f405a42a3b06d65ad67fb7322",
+                        "7da29a2b6508494282acbc459eccbb96",
+                        "47838fa763ca40598b2622a9d1e79444",
+                        "ff32a3f12e814740a1cd5dd12bd731d4",
+                        "3fef46c902524717b377dee6c1dfc929",
+                        "fd8b887c1f7149f2876cf8a31e534ad6",
+                        "7438aea716f44d85ad1c2b49a93acd83",
+                        "fe39f994fa9b4d7daa232e1dcd2b0e8b",
+                        "b102e756f9b848a98f58396fc825be84",
+                        "fbd7219af1924d2ead5310eb7b35aab0",
+                        "3b4c1066797b43a586611ec2d63e7ca1",
+                        "c06865c1e01a441698dacf48600dd03c",
+                        "9d229e5dd56e4d539ca2c1b9f0a37812",
+                        "868aa268dd28498d902782215e53c6fa",
+                        "46f644cf589e4a48a6fad1742f0c0575",
+                        "adb40ef11f094594b14776e238955224",
+                        "7b47c78391a4431aa2d3f84677f24046",
+                        "398f1c0f56fe4f218d999df138adfdac",
+                        "f1839e86863948f68314f81ba6bca4c9",
+                        "3c37e72850c746ce9c919add5340dede",
+                        "2053e6adef1b4dba89f861eaf3d916fd",
+                        "eab4127882d24acfa9518ebff6f4e22a",
+                        "64b754f563834be0a6963349b1f2dcf2",
+                        "c7636a6d7380465895b8c86d34caf500",
+                        "f7803dea63994cc2a31acf805bd19e67",
+                        "380a0c11434241b191b17421e395be8b",
+                        "a02534c347aa4865ab4ab3de3a3ee2f5",
+                        "b0ccb9d9d96e4ed8bec4d540c34d337c",
+                        "f22e9615de674e05978f332eb88750cf",
+                        "b53e8481f6d64018988dc03081bf2765",
+                        "b458d6fa793d4fa080b9f1e5013af3de",
+                        "119d6d7a8d524aa49170f5784ebc6b9e",
+                        "d55f842766484d299c75f74e31e7aa6a",
+                        "1bdaf4dab16f48dbaeed3fb9bf268e45",
+                        "026cc1a42e154f1f92b5236869311929",
+                        "a2edbc4195d843e0acfba83726a08e78",
+                        "40e148c291ad4f739998a7eac55a8af6",
+                        "028aa5d1f7a74d538b5c606d4a6d146f",
+                        "c078fe9a056a473dab7d474cd7907154",
+                        "4cc9ec6ba46647aba2d53e352f91c137",
+                        "f2a1c5087d0e44909139697ed90474e8",
+                        "7b24b46d6c3643e581ba003a9c473745",
+                        "3f748152b9274556afad2555572aa9f4"
+                    ]
+                },
+                "id": "5431e83e-428b-4473-bad1-24b7a6c4db38",
+                "outputId": "5721e863-d460-4f5c-9e36-5a586180b669"
+            },
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 17592 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 14402 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 19954 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 22057 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 15733 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 18327 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 10999 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 18480 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# Build city document index\n",
+                "city_indices = {}\n",
+                "for wiki_title in wiki_titles:\n",
+                "    city_indices[wiki_title] = VectorStoreIndex.from_documents(city_docs[wiki_title])"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "d4d3cd8b-4134-4cfa-8002-e0a34694d2e1",
+            "metadata": {
+                "id": "d4d3cd8b-4134-4cfa-8002-e0a34694d2e1",
+                "tags": []
+            },
+            "source": [
+                "### Build Graph: Keyword Table Index on top of vector indices! \n",
+                "\n",
+                "We compose a keyword table index on top of all the vector indices."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 14,
+            "id": "be1e3d7d-c4a3-4268-9408-b3cb984ffa4a",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set summaries for each city\n",
+                "index_summaries = {}\n",
+                "for wiki_title in wiki_titles:\n",
+                "    # set summary text for city\n",
+                "    index_summaries[wiki_title] = f\"Wikipedia articles about {wiki_title}\""
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 15,
+            "id": "ddc2e4de-0719-4607-86f8-18c953344199",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 0 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "from llama_index.indices.composability import ComposableGraph\n",
+                "\n",
+                "graph = ComposableGraph.from_indices(\n",
+                "    SimpleKeywordTableIndex,\n",
+                "    [index for _, index in city_indices.items()],\n",
+                "    [summary for _, summary in index_summaries.items()],\n",
+                "    max_keywords_per_chunk=50,\n",
+                ")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "49c900ee-a31f-4fcd-bb44-ff2cd12a41eb",
+            "metadata": {
+                "id": "49c900ee-a31f-4fcd-bb44-ff2cd12a41eb"
+            },
+            "source": [
+                "### Compare Queries (text-davinci-003 vs. ChatGPT)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "e0a8fa6a-e96e-4341-bb43-7547415f766e",
+            "metadata": {
+                "id": "e0a8fa6a-e96e-4341-bb43-7547415f766e"
+            },
+            "source": [
+                "**Simple Query**"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 25,
+            "id": "OVnzf3myEz88",
+            "metadata": {
+                "id": "OVnzf3myEz88",
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.indices.keyword_table.retrievers:> Starting query: Tell me more about Boston\n",
+                        "INFO:llama_index.indices.keyword_table.retrievers:query keywords: ['tell', 'boston']\n",
+                        "INFO:llama_index.indices.keyword_table.retrievers:> Extracted keywords: ['boston']\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 5 tokens\n",
+                        "INFO:llama_index.indices.common_tree.base:> Building index from nodes: 1 chunks\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 802 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 4801 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 545 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 545 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.indices.keyword_table.retrievers:> Starting query: Tell me more about Boston\n",
+                        "INFO:llama_index.indices.keyword_table.retrievers:query keywords: ['tell', 'boston']\n",
+                        "INFO:llama_index.indices.keyword_table.retrievers:> Extracted keywords: ['boston']\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 5 tokens\n",
+                        "INFO:llama_index.indices.common_tree.base:> Building index from nodes: 1 chunks\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 641 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 4580 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 308 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 308 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "query_engine_davinci = graph.as_query_engine(\n",
+                "    custom_query_engines={\n",
+                "        graph.root_index.index_id: graph.root_index.as_query_engine(\n",
+                "            retriever_mode=\"simple\",\n",
+                "            service_context=service_context_davinci,\n",
+                "            response_mode=\"tree_summarize\",\n",
+                "        )\n",
+                "    }\n",
+                ")\n",
+                "query_engine_chatgpt = graph.as_query_engine(\n",
+                "    custom_query_engines={\n",
+                "        graph.root_index.index_id: graph.root_index.as_query_engine(\n",
+                "            retriever_mode=\"simple\",\n",
+                "            service_context=service_context_chatgpt,\n",
+                "            response_mode=\"tree_summarize\",\n",
+                "        )\n",
+                "    }\n",
+                ")\n",
+                "query_str = \"Tell me more about Boston\"\n",
+                "response_davinci = query_engine_davinci.query(query_str)\n",
+                "response_chatgpt = query_engine_chatgpt.query(query_str)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 21,
+            "id": "6f5f5467-fa79-4f48-8b78-32ae8f86d12d",
+            "metadata": {
+                "id": "6f5f5467-fa79-4f48-8b78-32ae8f86d12d",
+                "outputId": "53105550-370a-4281-974d-9b0ae8064e1c"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "Boston is the capital and largest city of the Commonwealth of Massachusetts and the cultural and financial center of the New England region of the Northeastern United States. It is one of the oldest municipalities in America, founded on the Shawmut Peninsula in 1630 by Puritan settlers from the English town of the same name. It is a center of scientific research and innovation, with nearly 5,000 startups, and is home to a number of colleges and universities, notably Harvard and MIT. It has a long seafaring tradition, and was a major port for both domestic and international trade in the 19th century. It has seen waves of immigration, with Irish, Germans, Lebanese, Syrians, French Canadians, and Russian and Polish Jews settling in the city. It was an early port of the Atlantic triangular slave trade in the New England colonies, but was soon overtaken. Boston is also known for its philanthropy, with households in the city claiming the highest average rate of philanthropy in the United States.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response_davinci)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 22,
+            "id": "29f32345-6f28-4545-afa9-e3c5849dfb82",
+            "metadata": {
+                "id": "29f32345-6f28-4545-afa9-e3c5849dfb82",
+                "outputId": "904002ea-f062-4f7d-8fe6-3e6b7b13b420"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Boston is a city in the New England region of the United States with a population of 675,647 as of 2020. It is known for its rich history and is considered the economic and cultural center of the region. The city has many firsts, including the first public park, first public or state school, first subway system, and first large public library in the United States. Boston is also a global pioneer in innovation and entrepreneurship, with nearly 5,000 startups. The city's economy includes finance, professional and business services, biotechnology, information technology, and government activities. Boston is a popular tourist destination, with Faneuil Hall alone drawing more than 20 million visitors per year. The city is home to many prestigious hospitals and universities, including Massachusetts General Hospital, Harvard Medical School, and Boston University.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response_chatgpt)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "018d0a51-3a3f-4dc5-9e1d-f2e79eb0cc43",
+            "metadata": {
+                "id": "018d0a51-3a3f-4dc5-9e1d-f2e79eb0cc43"
+            },
+            "source": [
+                "**Complex Query 1**"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "730b7a1f-5197-4cdf-add2-9b46c07465f3",
+            "metadata": {
+                "id": "730b7a1f-5197-4cdf-add2-9b46c07465f3"
+            },
+            "outputs": [],
+            "source": [
+                "query_str = (\n",
+                "    \"Tell me the airports in Seattle, Houston, and Toronto. \"\n",
+                "    \"If only one city is provided, return the airport information for that city. \"\n",
+                "    \"If airports for multiple cities are provided, compare and contrast the airports. \"\n",
+                ")\n",
+                "response_davinci = query_engine_davinci.query(query_str)\n",
+                "response_chatgpt = query_engine_chatgpt.query(query_str)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "ee57efaa-dd8e-45af-968c-45d9bf92b948",
+            "metadata": {
+                "id": "ee57efaa-dd8e-45af-968c-45d9bf92b948",
+                "outputId": "8b70b13d-c07a-4685-bd1d-b0e776607ad5"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "The airports in Seattle, Houston, and Toronto are Seattle–Tacoma International Airport (IATA: SEA), George Bush Intercontinental Airport (IATA: IAH), Toronto Pearson International Airport (IATA: YYZ), and Billy Bishop Toronto City Airport (IATA: YTZ). Seattle–Tacoma International Airport is the largest airport in the Pacific Northwest region of the United States, serving over 44 million passengers annually. George Bush Intercontinental Airport is the largest airport in Houston, serving over 40 million passengers annually. Toronto Pearson International Airport is the busiest airport in Canada, serving over 50 million passengers annually. Billy Bishop Toronto City Airport is a smaller airport located on the Toronto Islands, serving over 2 million passengers annually.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response_davinci)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e499b388-5a1c-4047-8fee-122dfe73c800",
+            "metadata": {
+                "id": "e499b388-5a1c-4047-8fee-122dfe73c800",
+                "outputId": "ca0c8d9d-2f7c-4d80-a793-a79cb3b243ed"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Airports in Seattle: Seattle-Tacoma International Airport.\n",
+                        "Airports in Houston: George Bush Intercontinental Airport, William P. Hobby Airport, and Ellington Airport.\n",
+                        "Airports in Toronto: Toronto Pearson International Airport, Billy Bishop Toronto City Airport, Buttonville Municipal Airport, and Downsview Airport.\n",
+                        "\n",
+                        "Seattle has one major airport, Seattle-Tacoma International Airport. Houston has three airports: George Bush Intercontinental Airport, William P. Hobby Airport, and Ellington Airport. Toronto has four airports: Toronto Pearson International Airport, Billy Bishop Toronto City Airport, Buttonville Municipal Airport, and Downsview Airport. Toronto has a mix of commercial and smaller airports, while Houston has a mix of commercial, military, government, and general aviation airports.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response_chatgpt)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "d3cb4d7b-7bcc-46bf-b7d6-d0230c3d7fdd",
+            "metadata": {
+                "id": "d3cb4d7b-7bcc-46bf-b7d6-d0230c3d7fdd"
+            },
+            "source": [
+                "**Complex Query 2**"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "a0fa2840-77b2-42c3-b6af-39fbe02c78ce",
+            "metadata": {
+                "id": "a0fa2840-77b2-42c3-b6af-39fbe02c78ce"
+            },
+            "outputs": [],
+            "source": [
+                "query_str = (\n",
+                "    \"Look at Houston and Boston. \"\n",
+                "    \"If only one city is provided, provide information about the sports teams for that city. \"\n",
+                "    \"If context for multiple cities are provided, compare and contrast the sports environment of the cities. \"\n",
+                ")\n",
+                "response_davinci = query_engine_davinci.query(query_str)\n",
+                "response_chatgpt = query_engine_chatgpt.query(query_str)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "3af1e27f-7697-4cbc-ba38-a7dc11330dc0",
+            "metadata": {
+                "id": "3af1e27f-7697-4cbc-ba38-a7dc11330dc0",
+                "outputId": "3d394401-ad19-4fa6-97fe-6bae70f0beff"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "Houston has teams for every major professional league. The Houston Astros are a Major League Baseball team that have won the World Series in 2017, 2022, and appeared in it in 2005, 2019, and 2021. The Houston Rockets are a National Basketball Association franchise based in the city since 1971, and have won two NBA Championships. The Houston Texans are a National Football League expansion team formed in 2002, and the Houston Dynamo is a Major League Soccer franchise that has been based in Houston since 2006, winning two MLS Cup titles. The Houston Dash team plays in the National Women's Soccer League, and the Houston SaberCats are a rugby team that plays in Major League Rugby. \n",
+                        "\n",
+                        "Boston also has teams for every major professional league. The Boston Red Sox are a Major League Baseball team that have won the World Series in 2004, 2007, 2013, and 2018. The Boston Celtics are a National Basketball Association team that have won 17 championships, most recently in 2008. The Boston Bruins are a National Hockey League team that have won six Stanley Cup championships, most recently in 2011. The New England Revolution is a Major League Soccer team that has been based in Boston since 1996. During a particularly impressive 17-year stretch from 2001 to 2018, the city's professional sports teams won twelve championships\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response_davinci)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "226ee4f5-c941-4497-a04c-630757622282",
+            "metadata": {
+                "id": "226ee4f5-c941-4497-a04c-630757622282",
+                "outputId": "c8b0c521-d2e7-4ba6-dc9f-52189fbf0b9b"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "If only one city is provided, Houston has sports teams for every major professional league except the National Hockey League, including the Houston Astros (MLB), Houston Rockets (NBA), Houston Texans (NFL), Houston Dynamo (MLS), Houston Dash (National Women's Soccer League), and Houston SaberCats (rugby).\n",
+                        "\n",
+                        "If context for multiple cities are provided, Boston has teams in the four major North American men's professional sports leagues plus Major League Soccer, and has won 39 championships in these leagues. Boston is one of eight cities to have won championships in all four major American sports leagues. During a particularly impressive 17-year stretch from 2001 to 2018, the city's professional sports teams won twelve championships. The Celtics and Bruins remain competitive for titles in the century’s third decade, though the Patriots and Red Sox have fallen off from these recent glory days. In contrast, Houston has not won as many championships as Boston, but has hosted several major sports events, including the Super Bowl and World Series. Houston is also home to the first major esports team, the Houston Outlaws.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response_chatgpt)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "53f527c8-0d53-4b29-8f1f-7b5bf22ca55e",
+            "metadata": {
+                "id": "53f527c8-0d53-4b29-8f1f-7b5bf22ca55e"
+            },
+            "source": [
+                "**Complex Query 3**"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "5b11d9c6-1905-4bd1-bb9a-4d60b0bc3c2d",
+            "metadata": {
+                "id": "5b11d9c6-1905-4bd1-bb9a-4d60b0bc3c2d"
+            },
+            "outputs": [],
+            "source": [
+                "query_str = (\n",
+                "    \"Look at Houston and Boston. \"\n",
+                "    \"If only one city is provided, provide information about the arts and culture for that city. \"\n",
+                "    \"If context for multiple cities are provided, compare and contrast the arts and culture of the two cities. \"\n",
+                ")\n",
+                "response_davinci = query_engine_davinci.query(query_str)\n",
+                "response_chatgpt = query_engine_chatgpt.query(query_str)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "4ccbbcf6-3074-4d8e-9ad4-92daa13a67dc",
+            "metadata": {
+                "id": "4ccbbcf6-3074-4d8e-9ad4-92daa13a67dc",
+                "outputId": "28429b4e-1854-44e8-8dcd-850f7ca7d0c2"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "Houston and Boston both have a wide range of cultural attractions. In Houston, the Theater District is a 17-block area in the center of Downtown Houston that is home to the Bayou Place entertainment complex, restaurants, movies, plazas, and parks. The Museum District's cultural institutions and exhibits attract more than 7 million visitors a year. Notable facilities include The Museum of Fine Arts, the Houston Museum of Natural Science, the Contemporary Arts Museum Houston, the Station Museum of Contemporary Art, the Holocaust Museum Houston, the Children's Museum of Houston, and the Houston Zoo. Houston also has many annual events celebrating the diverse cultures of the city, such as the Houston Livestock Show and Rodeo, the Houston Gay Pride Parade, the Houston Greek Festival, Art Car Parade, the Houston Auto Show, the Houston International Festival, and the Bayou City Art Festival.\n",
+                        "\n",
+                        "In Boston, the Freedom Trail is a 2.5-mile walking tour of 16 historically significant sites in downtown Boston. The Museum of Fine Arts is one of the largest and most comprehensive art museums in the world, with more than 450,000 works of art. Boston also has many annual events celebrating the diverse cultures of the city, such as the Boston Marathon, the Boston Arts Festival\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response_davinci)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "10c6ca94-c053-4009-b52d-a5255e74853c",
+            "metadata": {
+                "id": "10c6ca94-c053-4009-b52d-a5255e74853c",
+                "outputId": "b4575737-59e2-43b5-85e2-c51ffe0f8cdd"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "There is no information about the arts and culture of Houston provided, but for Boston, there is a rich cultural history with a strong literary culture and a center for classical music. The city is also home to several art museums and galleries, including the Museum of Fine Arts and the Isabella Stewart Gardner Museum. The Institute of Contemporary Art is housed in a contemporary building designed by Diller Scofidio + Renfro in the Seaport District. Boston's South End Art and Design District (SoWa) and Newbury St. are both art gallery destinations.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response_chatgpt)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "7b299ebe-cdbd-4abf-9015-4894f6aa94ba",
+            "metadata": {
+                "id": "7b299ebe-cdbd-4abf-9015-4894f6aa94ba"
+            },
+            "source": [
+                "**Complex Query 4**"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "261e3881-6414-4ace-9816-aa71a39051b5",
+            "metadata": {
+                "id": "261e3881-6414-4ace-9816-aa71a39051b5"
+            },
+            "outputs": [],
+            "source": [
+                "query_str = (\n",
+                "    \"Look at Toronto and San Francisco. \"\n",
+                "    \"If only one city is provided, provide information about the demographics for that city. \"\n",
+                "    \"If context for multiple cities are provided, compare and contrast the demographics of the two cities. \"\n",
+                ")\n",
+                "response_davinci = query_engine_davinci.query(query_str)\n",
+                "response_chatgpt = query_engine_chatgpt.query(query_str)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f58ca597-8a40-4fa0-995b-f54cff133ec8",
+            "metadata": {
+                "id": "f58ca597-8a40-4fa0-995b-f54cff133ec8",
+                "outputId": "7fefef2f-78b8-47c3-ade3-5a8673a264e1"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "In Toronto, the population is 2,731,571 people, with a median age of 39.2 years. The racial makeup of the city is 51.5% White, 20.3% Asian, 8.6% African American, 0.8% Native American, 0.2% Pacific Islander, and 18.6% from other races. The city is also home to a large Hispanic population, making up 6.2% of the population. The three most commonly reported ethnic origins are White (46.9%), Asian (20.3%), and Black (8.6%). Christianity is the most commonly reported religion (48.4%), followed by no religion and secular perspectives (31.2%). English is the predominant language spoken by Torontonians with approximately 79% of residents having proficiency in the language, although only 43.2% of Torontonians reported English as their mother tongue.\n",
+                        "\n",
+                        "When comparing Toronto and San Francisco, we can see that Toronto has a larger population than San Francisco, with a median age that is slightly higher. The racial makeup of Toronto is slightly more White than San Francisco, while San Francisco has a larger Asian population. The Hispanic population is larger in San Francisco than in Toronto. Christianity is the\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response_davinci)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "c4fca866-b5fd-493b-8e2f-33dbe485c463",
+            "metadata": {
+                "id": "c4fca866-b5fd-493b-8e2f-33dbe485c463",
+                "outputId": "528a27a4-bef3-4e4a-d788-57958739dee6"
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Only information about Toronto is provided in the context, so demographics for Toronto can be provided. However, there is no context information about San Francisco to compare and contrast with Toronto.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response_chatgpt)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "ab6d123c-afdf-4aea-8e5a-9513891ba799",
+            "metadata": {
+                "id": "ab6d123c-afdf-4aea-8e5a-9513891ba799"
+            },
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "colab": {
+            "provenance": []
+        },
+        "kernelspec": {
+            "display_name": "llama",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.9.16"
+        },
+        "widgets": {
+            "application/vnd.jupyter.widget-state+json": {
+                "026cc1a42e154f1f92b5236869311929": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "FloatProgressModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "FloatProgressModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "ProgressView",
+                        "bar_style": "success",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_4cc9ec6ba46647aba2d53e352f91c137",
+                        "max": 665,
+                        "min": 0,
+                        "orientation": "horizontal",
+                        "style": "IPY_MODEL_f2a1c5087d0e44909139697ed90474e8",
+                        "value": 665
+                    }
+                },
+                "028aa5d1f7a74d538b5c606d4a6d146f": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "119d6d7a8d524aa49170f5784ebc6b9e": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "DescriptionStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "DescriptionStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "description_width": ""
+                    }
+                },
+                "1bdaf4dab16f48dbaeed3fb9bf268e45": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HTMLModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HTMLModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HTMLView",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_028aa5d1f7a74d538b5c606d4a6d146f",
+                        "placeholder": "​",
+                        "style": "IPY_MODEL_c078fe9a056a473dab7d474cd7907154",
+                        "value": "Downloading (…)lve/main/config.json: 100%"
+                    }
+                },
+                "2053e6adef1b4dba89f861eaf3d916fd": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "DescriptionStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "DescriptionStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "description_width": ""
+                    }
+                },
+                "208d404f405a42a3b06d65ad67fb7322": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HTMLModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HTMLModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HTMLView",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_3fef46c902524717b377dee6c1dfc929",
+                        "placeholder": "​",
+                        "style": "IPY_MODEL_fd8b887c1f7149f2876cf8a31e534ad6",
+                        "value": "Downloading (…)olve/main/vocab.json: 100%"
+                    }
+                },
+                "380a0c11434241b191b17421e395be8b": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "398f1c0f56fe4f218d999df138adfdac": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "3b4c1066797b43a586611ec2d63e7ca1": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HBoxModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HBoxModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HBoxView",
+                        "box_style": "",
+                        "children": [
+                            "IPY_MODEL_c06865c1e01a441698dacf48600dd03c",
+                            "IPY_MODEL_9d229e5dd56e4d539ca2c1b9f0a37812",
+                            "IPY_MODEL_868aa268dd28498d902782215e53c6fa"
+                        ],
+                        "layout": "IPY_MODEL_46f644cf589e4a48a6fad1742f0c0575"
+                    }
+                },
+                "3c37e72850c746ce9c919add5340dede": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "3f748152b9274556afad2555572aa9f4": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "DescriptionStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "DescriptionStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "description_width": ""
+                    }
+                },
+                "3fef46c902524717b377dee6c1dfc929": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "40e148c291ad4f739998a7eac55a8af6": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "46f644cf589e4a48a6fad1742f0c0575": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "47838fa763ca40598b2622a9d1e79444": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HTMLModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HTMLModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HTMLView",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_b102e756f9b848a98f58396fc825be84",
+                        "placeholder": "​",
+                        "style": "IPY_MODEL_fbd7219af1924d2ead5310eb7b35aab0",
+                        "value": " 1.04M/1.04M [00:00&lt;00:00, 23.7MB/s]"
+                    }
+                },
+                "4cc9ec6ba46647aba2d53e352f91c137": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "64b754f563834be0a6963349b1f2dcf2": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HTMLModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HTMLModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HTMLView",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_a02534c347aa4865ab4ab3de3a3ee2f5",
+                        "placeholder": "​",
+                        "style": "IPY_MODEL_b0ccb9d9d96e4ed8bec4d540c34d337c",
+                        "value": "Downloading (…)/main/tokenizer.json: 100%"
+                    }
+                },
+                "7438aea716f44d85ad1c2b49a93acd83": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "7b24b46d6c3643e581ba003a9c473745": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "7b47c78391a4431aa2d3f84677f24046": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "DescriptionStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "DescriptionStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "description_width": ""
+                    }
+                },
+                "7da29a2b6508494282acbc459eccbb96": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "FloatProgressModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "FloatProgressModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "ProgressView",
+                        "bar_style": "success",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_7438aea716f44d85ad1c2b49a93acd83",
+                        "max": 1042301,
+                        "min": 0,
+                        "orientation": "horizontal",
+                        "style": "IPY_MODEL_fe39f994fa9b4d7daa232e1dcd2b0e8b",
+                        "value": 1042301
+                    }
+                },
+                "868aa268dd28498d902782215e53c6fa": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HTMLModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HTMLModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HTMLView",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_3c37e72850c746ce9c919add5340dede",
+                        "placeholder": "​",
+                        "style": "IPY_MODEL_2053e6adef1b4dba89f861eaf3d916fd",
+                        "value": " 456k/456k [00:00&lt;00:00, 11.9MB/s]"
+                    }
+                },
+                "9d229e5dd56e4d539ca2c1b9f0a37812": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "FloatProgressModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "FloatProgressModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "ProgressView",
+                        "bar_style": "success",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_398f1c0f56fe4f218d999df138adfdac",
+                        "max": 456318,
+                        "min": 0,
+                        "orientation": "horizontal",
+                        "style": "IPY_MODEL_f1839e86863948f68314f81ba6bca4c9",
+                        "value": 456318
+                    }
+                },
+                "a02534c347aa4865ab4ab3de3a3ee2f5": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "a2edbc4195d843e0acfba83726a08e78": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HTMLModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HTMLModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HTMLView",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_7b24b46d6c3643e581ba003a9c473745",
+                        "placeholder": "​",
+                        "style": "IPY_MODEL_3f748152b9274556afad2555572aa9f4",
+                        "value": " 665/665 [00:00&lt;00:00, 22.7kB/s]"
+                    }
+                },
+                "adb40ef11f094594b14776e238955224": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "b0ccb9d9d96e4ed8bec4d540c34d337c": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "DescriptionStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "DescriptionStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "description_width": ""
+                    }
+                },
+                "b102e756f9b848a98f58396fc825be84": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "b458d6fa793d4fa080b9f1e5013af3de": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "b53e8481f6d64018988dc03081bf2765": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "ProgressStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "ProgressStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "bar_color": null,
+                        "description_width": ""
+                    }
+                },
+                "b5566e3db2914ddebd80d7bde75b2559": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HBoxModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HBoxModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HBoxView",
+                        "box_style": "",
+                        "children": [
+                            "IPY_MODEL_208d404f405a42a3b06d65ad67fb7322",
+                            "IPY_MODEL_7da29a2b6508494282acbc459eccbb96",
+                            "IPY_MODEL_47838fa763ca40598b2622a9d1e79444"
+                        ],
+                        "layout": "IPY_MODEL_ff32a3f12e814740a1cd5dd12bd731d4"
+                    }
+                },
+                "c06865c1e01a441698dacf48600dd03c": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HTMLModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HTMLModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HTMLView",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_adb40ef11f094594b14776e238955224",
+                        "placeholder": "​",
+                        "style": "IPY_MODEL_7b47c78391a4431aa2d3f84677f24046",
+                        "value": "Downloading (…)olve/main/merges.txt: 100%"
+                    }
+                },
+                "c078fe9a056a473dab7d474cd7907154": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "DescriptionStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "DescriptionStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "description_width": ""
+                    }
+                },
+                "c7636a6d7380465895b8c86d34caf500": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "FloatProgressModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "FloatProgressModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "ProgressView",
+                        "bar_style": "success",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_f22e9615de674e05978f332eb88750cf",
+                        "max": 1355256,
+                        "min": 0,
+                        "orientation": "horizontal",
+                        "style": "IPY_MODEL_b53e8481f6d64018988dc03081bf2765",
+                        "value": 1355256
+                    }
+                },
+                "d55f842766484d299c75f74e31e7aa6a": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HBoxModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HBoxModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HBoxView",
+                        "box_style": "",
+                        "children": [
+                            "IPY_MODEL_1bdaf4dab16f48dbaeed3fb9bf268e45",
+                            "IPY_MODEL_026cc1a42e154f1f92b5236869311929",
+                            "IPY_MODEL_a2edbc4195d843e0acfba83726a08e78"
+                        ],
+                        "layout": "IPY_MODEL_40e148c291ad4f739998a7eac55a8af6"
+                    }
+                },
+                "eab4127882d24acfa9518ebff6f4e22a": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HBoxModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HBoxModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HBoxView",
+                        "box_style": "",
+                        "children": [
+                            "IPY_MODEL_64b754f563834be0a6963349b1f2dcf2",
+                            "IPY_MODEL_c7636a6d7380465895b8c86d34caf500",
+                            "IPY_MODEL_f7803dea63994cc2a31acf805bd19e67"
+                        ],
+                        "layout": "IPY_MODEL_380a0c11434241b191b17421e395be8b"
+                    }
+                },
+                "f1839e86863948f68314f81ba6bca4c9": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "ProgressStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "ProgressStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "bar_color": null,
+                        "description_width": ""
+                    }
+                },
+                "f22e9615de674e05978f332eb88750cf": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                },
+                "f2a1c5087d0e44909139697ed90474e8": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "ProgressStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "ProgressStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "bar_color": null,
+                        "description_width": ""
+                    }
+                },
+                "f7803dea63994cc2a31acf805bd19e67": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "HTMLModel",
+                    "state": {
+                        "_dom_classes": [],
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "HTMLModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/controls",
+                        "_view_module_version": "1.5.0",
+                        "_view_name": "HTMLView",
+                        "description": "",
+                        "description_tooltip": null,
+                        "layout": "IPY_MODEL_b458d6fa793d4fa080b9f1e5013af3de",
+                        "placeholder": "​",
+                        "style": "IPY_MODEL_119d6d7a8d524aa49170f5784ebc6b9e",
+                        "value": " 1.36M/1.36M [00:00&lt;00:00, 30.3MB/s]"
+                    }
+                },
+                "fbd7219af1924d2ead5310eb7b35aab0": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "DescriptionStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "DescriptionStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "description_width": ""
+                    }
+                },
+                "fd8b887c1f7149f2876cf8a31e534ad6": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "DescriptionStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "DescriptionStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "description_width": ""
+                    }
+                },
+                "fe39f994fa9b4d7daa232e1dcd2b0e8b": {
+                    "model_module": "@jupyter-widgets/controls",
+                    "model_module_version": "1.5.0",
+                    "model_name": "ProgressStyleModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/controls",
+                        "_model_module_version": "1.5.0",
+                        "_model_name": "ProgressStyleModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "StyleView",
+                        "bar_color": null,
+                        "description_width": ""
+                    }
+                },
+                "ff32a3f12e814740a1cd5dd12bd731d4": {
+                    "model_module": "@jupyter-widgets/base",
+                    "model_module_version": "1.2.0",
+                    "model_name": "LayoutModel",
+                    "state": {
+                        "_model_module": "@jupyter-widgets/base",
+                        "_model_module_version": "1.2.0",
+                        "_model_name": "LayoutModel",
+                        "_view_count": null,
+                        "_view_module": "@jupyter-widgets/base",
+                        "_view_module_version": "1.2.0",
+                        "_view_name": "LayoutView",
+                        "align_content": null,
+                        "align_items": null,
+                        "align_self": null,
+                        "border": null,
+                        "bottom": null,
+                        "display": null,
+                        "flex": null,
+                        "flex_flow": null,
+                        "grid_area": null,
+                        "grid_auto_columns": null,
+                        "grid_auto_flow": null,
+                        "grid_auto_rows": null,
+                        "grid_column": null,
+                        "grid_gap": null,
+                        "grid_row": null,
+                        "grid_template_areas": null,
+                        "grid_template_columns": null,
+                        "grid_template_rows": null,
+                        "height": null,
+                        "justify_content": null,
+                        "justify_items": null,
+                        "left": null,
+                        "margin": null,
+                        "max_height": null,
+                        "max_width": null,
+                        "min_height": null,
+                        "min_width": null,
+                        "object_fit": null,
+                        "object_position": null,
+                        "order": null,
+                        "overflow": null,
+                        "overflow_x": null,
+                        "overflow_y": null,
+                        "padding": null,
+                        "right": null,
+                        "top": null,
+                        "visibility": null,
+                        "width": null
+                    }
+                }
+            }
+        }
     },
-    "id": "D2ZI8iKch-V_",
-    "outputId": "bc63c640-8508-4c74-8bd9-3fc1495b7839"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
-      "Collecting llama-index\n",
-      "  Downloading llama_index-0.4.17.tar.gz (122 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m122.8/122.8 KB\u001b[0m \u001b[31m9.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25h  Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
-      "Collecting langchain\n",
-      "  Downloading langchain-0.0.98-py3-none-any.whl (337 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m337.8/337.8 KB\u001b[0m \u001b[31m23.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hCollecting openai>=0.26.4\n",
-      "  Downloading openai-0.27.0-py3-none-any.whl (70 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m70.1/70.1 KB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hCollecting dataclasses_json\n",
-      "  Downloading dataclasses_json-0.5.7-py3-none-any.whl (25 kB)\n",
-      "Collecting transformers\n",
-      "  Downloading transformers-4.26.1-py3-none-any.whl (6.3 MB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.3/6.3 MB\u001b[0m \u001b[31m73.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hRequirement already satisfied: nltk in /usr/local/lib/python3.8/dist-packages (from llama-index) (3.7)\n",
-      "Requirement already satisfied: numpy in /usr/local/lib/python3.8/dist-packages (from llama-index) (1.22.4)\n",
-      "Collecting tenacity<8.2.0\n",
-      "  Downloading tenacity-8.1.0-py3-none-any.whl (23 kB)\n",
-      "Requirement already satisfied: pandas in /usr/local/lib/python3.8/dist-packages (from llama-index) (1.3.5)\n",
-      "Requirement already satisfied: tqdm in /usr/local/lib/python3.8/dist-packages (from openai>=0.26.4->llama-index) (4.64.1)\n",
-      "Requirement already satisfied: requests>=2.20 in /usr/local/lib/python3.8/dist-packages (from openai>=0.26.4->llama-index) (2.25.1)\n",
-      "Requirement already satisfied: aiohttp in /usr/local/lib/python3.8/dist-packages (from openai>=0.26.4->llama-index) (3.8.4)\n",
-      "Requirement already satisfied: marshmallow<4.0.0,>=3.3.0 in /usr/local/lib/python3.8/dist-packages (from dataclasses_json->llama-index) (3.19.0)\n",
-      "Collecting marshmallow-enum<2.0.0,>=1.5.1\n",
-      "  Downloading marshmallow_enum-1.5.1-py2.py3-none-any.whl (4.2 kB)\n",
-      "Collecting typing-inspect>=0.4.0\n",
-      "  Downloading typing_inspect-0.8.0-py3-none-any.whl (8.7 kB)\n",
-      "Collecting deeplake<4.0.0,>=3.2.9\n",
-      "  Downloading deeplake-3.2.12.tar.gz (439 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m439.1/439.1 KB\u001b[0m \u001b[31m31.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25h  Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
-      "Requirement already satisfied: PyYAML<7,>=6 in /usr/local/lib/python3.8/dist-packages (from langchain->llama-index) (6.0)\n",
-      "Requirement already satisfied: SQLAlchemy<2,>=1 in /usr/local/lib/python3.8/dist-packages (from langchain->llama-index) (1.4.46)\n",
-      "Requirement already satisfied: pydantic<2,>=1 in /usr/local/lib/python3.8/dist-packages (from langchain->llama-index) (1.10.5)\n",
-      "Collecting aleph-alpha-client<3.0.0,>=2.15.0\n",
-      "  Downloading aleph_alpha_client-2.16.0-py3-none-any.whl (38 kB)\n",
-      "Requirement already satisfied: joblib in /usr/local/lib/python3.8/dist-packages (from nltk->llama-index) (1.2.0)\n",
-      "Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.8/dist-packages (from nltk->llama-index) (2022.6.2)\n",
-      "Requirement already satisfied: click in /usr/local/lib/python3.8/dist-packages (from nltk->llama-index) (8.1.3)\n",
-      "Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.8/dist-packages (from pandas->llama-index) (2.8.2)\n",
-      "Requirement already satisfied: pytz>=2017.3 in /usr/local/lib/python3.8/dist-packages (from pandas->llama-index) (2022.7.1)\n",
-      "Collecting huggingface-hub<1.0,>=0.11.0\n",
-      "  Downloading huggingface_hub-0.12.1-py3-none-any.whl (190 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m190.3/190.3 KB\u001b[0m \u001b[31m3.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hCollecting tokenizers!=0.11.3,<0.14,>=0.11.1\n",
-      "  Downloading tokenizers-0.13.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (7.6 MB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.6/7.6 MB\u001b[0m \u001b[31m48.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hRequirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.8/dist-packages (from transformers->llama-index) (23.0)\n",
-      "Requirement already satisfied: filelock in /usr/local/lib/python3.8/dist-packages (from transformers->llama-index) (3.9.0)\n",
-      "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (6.0.4)\n",
-      "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (22.2.0)\n",
-      "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (1.3.1)\n",
-      "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (1.3.3)\n",
-      "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (4.0.2)\n",
-      "Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (3.0.1)\n",
-      "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.8/dist-packages (from aiohttp->openai>=0.26.4->llama-index) (1.8.2)\n",
-      "Requirement already satisfied: urllib3>=1.26 in /usr/local/lib/python3.8/dist-packages (from aleph-alpha-client<3.0.0,>=2.15.0->langchain->llama-index) (1.26.14)\n",
-      "Collecting aiohttp-retry>=2.8.3\n",
-      "  Downloading aiohttp_retry-2.8.3-py3-none-any.whl (9.8 kB)\n",
-      "Collecting aiodns>=3.0.0\n",
-      "  Downloading aiodns-3.0.0-py3-none-any.whl (5.0 kB)\n",
-      "Collecting requests>=2.20\n",
-      "  Downloading requests-2.28.2-py3-none-any.whl (62 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.8/62.8 KB\u001b[0m \u001b[31m4.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hRequirement already satisfied: pillow in /usr/local/lib/python3.8/dist-packages (from deeplake<4.0.0,>=3.2.9->langchain->llama-index) (8.4.0)\n",
-      "Collecting boto3\n",
-      "  Downloading boto3-1.26.82-py3-none-any.whl (134 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.7/134.7 KB\u001b[0m \u001b[31m6.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hCollecting pathos\n",
-      "  Downloading pathos-0.3.0-py3-none-any.whl (79 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m79.8/79.8 KB\u001b[0m \u001b[31m8.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hCollecting humbug>=0.2.6\n",
-      "  Downloading humbug-0.2.8-py3-none-any.whl (13 kB)\n",
-      "Collecting numcodecs\n",
-      "  Downloading numcodecs-0.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (6.7 MB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.7/6.7 MB\u001b[0m \u001b[31m41.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hCollecting pyjwt\n",
-      "  Downloading PyJWT-2.6.0-py3-none-any.whl (20 kB)\n",
-      "Collecting hub>=2.8.7\n",
-      "  Downloading hub-3.0.1-py3-none-any.whl (1.4 kB)\n",
-      "Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.8/dist-packages (from huggingface-hub<1.0,>=0.11.0->transformers->llama-index) (4.5.0)\n",
-      "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.8/dist-packages (from python-dateutil>=2.7.3->pandas->llama-index) (1.15.0)\n",
-      "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.8/dist-packages (from requests>=2.20->openai>=0.26.4->llama-index) (2.10)\n",
-      "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.8/dist-packages (from requests>=2.20->openai>=0.26.4->llama-index) (2022.12.7)\n",
-      "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.8/dist-packages (from SQLAlchemy<2,>=1->langchain->llama-index) (2.0.2)\n",
-      "Collecting mypy-extensions>=0.3.0\n",
-      "  Downloading mypy_extensions-1.0.0-py3-none-any.whl (4.7 kB)\n",
-      "Collecting pycares>=4.0.0\n",
-      "  Downloading pycares-4.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (288 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m289.0/289.0 KB\u001b[0m \u001b[31m19.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hCollecting s3transfer<0.7.0,>=0.6.0\n",
-      "  Downloading s3transfer-0.6.0-py3-none-any.whl (79 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m79.6/79.6 KB\u001b[0m \u001b[31m6.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hCollecting botocore<1.30.0,>=1.29.82\n",
-      "  Downloading botocore-1.29.82-py3-none-any.whl (10.5 MB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m10.5/10.5 MB\u001b[0m \u001b[31m69.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hCollecting jmespath<2.0.0,>=0.7.1\n",
-      "  Downloading jmespath-1.0.1-py3-none-any.whl (20 kB)\n",
-      "Requirement already satisfied: entrypoints in /usr/local/lib/python3.8/dist-packages (from numcodecs->deeplake<4.0.0,>=3.2.9->langchain->llama-index) (0.4)\n",
-      "Collecting ppft>=1.7.6.6\n",
-      "  Downloading ppft-1.7.6.6-py3-none-any.whl (52 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m52.8/52.8 KB\u001b[0m \u001b[31m4.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hCollecting pox>=0.3.2\n",
-      "  Downloading pox-0.3.2-py3-none-any.whl (29 kB)\n",
-      "Requirement already satisfied: dill>=0.3.6 in /usr/local/lib/python3.8/dist-packages (from pathos->deeplake<4.0.0,>=3.2.9->langchain->llama-index) (0.3.6)\n",
-      "Collecting multiprocess>=0.70.14\n",
-      "  Downloading multiprocess-0.70.14-py38-none-any.whl (132 kB)\n",
-      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m132.0/132.0 KB\u001b[0m \u001b[31m10.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
-      "\u001b[?25hRequirement already satisfied: cffi>=1.5.0 in /usr/local/lib/python3.8/dist-packages (from pycares>=4.0.0->aiodns>=3.0.0->aleph-alpha-client<3.0.0,>=2.15.0->langchain->llama-index) (1.15.1)\n",
-      "Requirement already satisfied: pycparser in /usr/local/lib/python3.8/dist-packages (from cffi>=1.5.0->pycares>=4.0.0->aiodns>=3.0.0->aleph-alpha-client<3.0.0,>=2.15.0->langchain->llama-index) (2.21)\n",
-      "Building wheels for collected packages: llama-index, deeplake\n",
-      "  Building wheel for llama-index (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
-      "  Created wheel for llama-index: filename=llama_index-0.4.17-py3-none-any.whl size=182750 sha256=67cb3c836e93d9d29a73307c2393d49392a4c8ceae94be552e0a91ca4b1d2cf1\n",
-      "  Stored in directory: /root/.cache/pip/wheels/15/bb/a9/de82e6a211b5f22899972226d5164f91546e6ac016bbd6c248\n",
-      "  Building wheel for deeplake (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
-      "  Created wheel for deeplake: filename=deeplake-3.2.12-py3-none-any.whl size=534308 sha256=b49c2dd3396d018a03f60c580ca9f15903b45507d648336b281f36605cb7950f\n",
-      "  Stored in directory: /root/.cache/pip/wheels/4b/1a/74/4b341aa1a16e01324c9728738ff705c049c3fa2a09e40d3d9f\n",
-      "Successfully built llama-index deeplake\n",
-      "Installing collected packages: tokenizers, tenacity, requests, pyjwt, ppft, pox, numcodecs, mypy-extensions, multiprocess, jmespath, typing-inspect, pycares, pathos, marshmallow-enum, humbug, huggingface-hub, botocore, transformers, s3transfer, openai, dataclasses_json, aiohttp-retry, aiodns, boto3, aleph-alpha-client, hub, deeplake, langchain, llama-index\n",
-      "  Attempting uninstall: tenacity\n",
-      "    Found existing installation: tenacity 8.2.1\n",
-      "    Uninstalling tenacity-8.2.1:\n",
-      "      Successfully uninstalled tenacity-8.2.1\n",
-      "  Attempting uninstall: requests\n",
-      "    Found existing installation: requests 2.25.1\n",
-      "    Uninstalling requests-2.25.1:\n",
-      "      Successfully uninstalled requests-2.25.1\n",
-      "Successfully installed aiodns-3.0.0 aiohttp-retry-2.8.3 aleph-alpha-client-2.16.0 boto3-1.26.82 botocore-1.29.82 dataclasses_json-0.5.7 deeplake-3.2.12 hub-3.0.1 huggingface-hub-0.12.1 humbug-0.2.8 jmespath-1.0.1 langchain-0.0.98 llama-index-0.4.17 marshmallow-enum-1.5.1 multiprocess-0.70.14 mypy-extensions-1.0.0 numcodecs-0.11.0 openai-0.27.0 pathos-0.3.0 pox-0.3.2 ppft-1.7.6.6 pycares-4.3.0 pyjwt-2.6.0 requests-2.28.2 s3transfer-0.6.0 tenacity-8.1.0 tokenizers-0.13.2 transformers-4.26.1 typing-inspect-0.8.0\n"
-     ]
-    }
-   ],
-   "source": [
-    "!pip install llama-index"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "d35ov8dk_6WP",
-   "metadata": {
-    "id": "d35ov8dk_6WP"
-   },
-   "outputs": [],
-   "source": [
-    "# My OpenAI Key\n",
-    "import os\n",
-    "\n",
-    "os.environ[\"OPENAI_API_KEY\"] = \"\""
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "fa0e62b6",
-   "metadata": {
-    "id": "fa0e62b6"
-   },
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "e27b0473-4bda-47f0-b6ed-fd482eac1a13",
-   "metadata": {
-    "id": "e27b0473-4bda-47f0-b6ed-fd482eac1a13"
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index import (\n",
-    "    VectorStoreIndex,\n",
-    "    SimpleKeywordTableIndex,\n",
-    "    ListIndex,\n",
-    "    SimpleDirectoryReader,\n",
-    "    LLMPredictor,\n",
-    "    ServiceContext,\n",
-    ")\n",
-    "from llama_index.llms import OpenAI\n",
-    "import requests"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "49e0d841-680f-4a0c-b455-788b54978ebf",
-   "metadata": {
-    "id": "49e0d841-680f-4a0c-b455-788b54978ebf"
-   },
-   "source": [
-    "#### Load Datasets\n",
-    "\n",
-    "Load Wikipedia pages as well as Paul Graham's \"What I Worked On\" essay"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "id": "fc4692a1",
-   "metadata": {
-    "id": "fc4692a1"
-   },
-   "outputs": [],
-   "source": [
-    "wiki_titles = [\n",
-    "    \"Toronto\",\n",
-    "    \"Seattle\",\n",
-    "    \"San Francisco\",\n",
-    "    \"Chicago\",\n",
-    "    \"Boston\",\n",
-    "    \"Washington, D.C.\",\n",
-    "    \"Cambridge, Massachusetts\",\n",
-    "    \"Houston\",\n",
-    "]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "id": "9ec16a8b-6aae-4bf7-9b83-b82087b4ea52",
-   "metadata": {
-    "id": "9ec16a8b-6aae-4bf7-9b83-b82087b4ea52"
-   },
-   "outputs": [],
-   "source": [
-    "from pathlib import Path\n",
-    "\n",
-    "import requests\n",
-    "\n",
-    "for title in wiki_titles:\n",
-    "    response = requests.get(\n",
-    "        \"https://en.wikipedia.org/w/api.php\",\n",
-    "        params={\n",
-    "            \"action\": \"query\",\n",
-    "            \"format\": \"json\",\n",
-    "            \"titles\": title,\n",
-    "            \"prop\": \"extracts\",\n",
-    "            # 'exintro': True,\n",
-    "            \"explaintext\": True,\n",
-    "        },\n",
-    "    ).json()\n",
-    "    page = next(iter(response[\"query\"][\"pages\"].values()))\n",
-    "    wiki_text = page[\"extract\"]\n",
-    "\n",
-    "    data_path = Path(\"data\")\n",
-    "    if not data_path.exists():\n",
-    "        Path.mkdir(data_path)\n",
-    "\n",
-    "    with open(data_path / f\"{title}.txt\", \"w\") as fp:\n",
-    "        fp.write(wiki_text)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "id": "39c00aeb-adef-4ce3-8134-031de18e64ea",
-   "metadata": {
-    "id": "39c00aeb-adef-4ce3-8134-031de18e64ea"
-   },
-   "outputs": [],
-   "source": [
-    "# Load all wiki documents\n",
-    "city_docs = {}\n",
-    "for wiki_title in wiki_titles:\n",
-    "    city_docs[wiki_title] = SimpleDirectoryReader(\n",
-    "        input_files=[f\"data/{wiki_title}.txt\"]\n",
-    "    ).load_data()"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "f1782198-c0de-4679-8951-1297c21b8639",
-   "metadata": {
-    "id": "f1782198-c0de-4679-8951-1297c21b8639"
-   },
-   "source": [
-    "### Building the document indices\n",
-    "Build a vector index for the wiki pages about cities and persons, and PG essay"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "M0GylZB-C2zL",
-   "metadata": {
-    "id": "M0GylZB-C2zL"
-   },
-   "outputs": [],
-   "source": [
-    "# LLM Predictor (text-davinci-003)\n",
-    "davinci = OpenAI(temperature=0, model=\"text-davinci-003\")\n",
-    "service_context_davinci = ServiceContext.from_defaults(llm=davinci)\n",
-    "\n",
-    "# # LLM Predictor (gpt-3.5-turbo)\n",
-    "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
-    "service_context_chatgpt = ServiceContext.from_defaults(llm=chatgpt)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "id": "5431e83e-428b-4473-bad1-24b7a6c4db38",
-   "metadata": {
-    "colab": {
-     "base_uri": "https://localhost:8080/",
-     "height": 183,
-     "referenced_widgets": [
-      "b5566e3db2914ddebd80d7bde75b2559",
-      "208d404f405a42a3b06d65ad67fb7322",
-      "7da29a2b6508494282acbc459eccbb96",
-      "47838fa763ca40598b2622a9d1e79444",
-      "ff32a3f12e814740a1cd5dd12bd731d4",
-      "3fef46c902524717b377dee6c1dfc929",
-      "fd8b887c1f7149f2876cf8a31e534ad6",
-      "7438aea716f44d85ad1c2b49a93acd83",
-      "fe39f994fa9b4d7daa232e1dcd2b0e8b",
-      "b102e756f9b848a98f58396fc825be84",
-      "fbd7219af1924d2ead5310eb7b35aab0",
-      "3b4c1066797b43a586611ec2d63e7ca1",
-      "c06865c1e01a441698dacf48600dd03c",
-      "9d229e5dd56e4d539ca2c1b9f0a37812",
-      "868aa268dd28498d902782215e53c6fa",
-      "46f644cf589e4a48a6fad1742f0c0575",
-      "adb40ef11f094594b14776e238955224",
-      "7b47c78391a4431aa2d3f84677f24046",
-      "398f1c0f56fe4f218d999df138adfdac",
-      "f1839e86863948f68314f81ba6bca4c9",
-      "3c37e72850c746ce9c919add5340dede",
-      "2053e6adef1b4dba89f861eaf3d916fd",
-      "eab4127882d24acfa9518ebff6f4e22a",
-      "64b754f563834be0a6963349b1f2dcf2",
-      "c7636a6d7380465895b8c86d34caf500",
-      "f7803dea63994cc2a31acf805bd19e67",
-      "380a0c11434241b191b17421e395be8b",
-      "a02534c347aa4865ab4ab3de3a3ee2f5",
-      "b0ccb9d9d96e4ed8bec4d540c34d337c",
-      "f22e9615de674e05978f332eb88750cf",
-      "b53e8481f6d64018988dc03081bf2765",
-      "b458d6fa793d4fa080b9f1e5013af3de",
-      "119d6d7a8d524aa49170f5784ebc6b9e",
-      "d55f842766484d299c75f74e31e7aa6a",
-      "1bdaf4dab16f48dbaeed3fb9bf268e45",
-      "026cc1a42e154f1f92b5236869311929",
-      "a2edbc4195d843e0acfba83726a08e78",
-      "40e148c291ad4f739998a7eac55a8af6",
-      "028aa5d1f7a74d538b5c606d4a6d146f",
-      "c078fe9a056a473dab7d474cd7907154",
-      "4cc9ec6ba46647aba2d53e352f91c137",
-      "f2a1c5087d0e44909139697ed90474e8",
-      "7b24b46d6c3643e581ba003a9c473745",
-      "3f748152b9274556afad2555572aa9f4"
-     ]
-    },
-    "id": "5431e83e-428b-4473-bad1-24b7a6c4db38",
-    "outputId": "5721e863-d460-4f5c-9e36-5a586180b669"
-   },
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 17592 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 14402 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 19954 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 22057 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 15733 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 18327 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 10999 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 18480 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Build city document index\n",
-    "city_indices = {}\n",
-    "for wiki_title in wiki_titles:\n",
-    "    city_indices[wiki_title] = VectorStoreIndex.from_documents(city_docs[wiki_title])"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "d4d3cd8b-4134-4cfa-8002-e0a34694d2e1",
-   "metadata": {
-    "id": "d4d3cd8b-4134-4cfa-8002-e0a34694d2e1",
-    "tags": []
-   },
-   "source": [
-    "### Build Graph: Keyword Table Index on top of vector indices! \n",
-    "\n",
-    "We compose a keyword table index on top of all the vector indices."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "id": "be1e3d7d-c4a3-4268-9408-b3cb984ffa4a",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set summaries for each city\n",
-    "index_summaries = {}\n",
-    "for wiki_title in wiki_titles:\n",
-    "    # set summary text for city\n",
-    "    index_summaries[wiki_title] = f\"Wikipedia articles about {wiki_title}\""
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "id": "ddc2e4de-0719-4607-86f8-18c953344199",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 0 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "from llama_index.indices.composability import ComposableGraph\n",
-    "\n",
-    "graph = ComposableGraph.from_indices(\n",
-    "    SimpleKeywordTableIndex,\n",
-    "    [index for _, index in city_indices.items()],\n",
-    "    [summary for _, summary in index_summaries.items()],\n",
-    "    max_keywords_per_chunk=50,\n",
-    ")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "49c900ee-a31f-4fcd-bb44-ff2cd12a41eb",
-   "metadata": {
-    "id": "49c900ee-a31f-4fcd-bb44-ff2cd12a41eb"
-   },
-   "source": [
-    "### Compare Queries (text-davinci-003 vs. ChatGPT)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "e0a8fa6a-e96e-4341-bb43-7547415f766e",
-   "metadata": {
-    "id": "e0a8fa6a-e96e-4341-bb43-7547415f766e"
-   },
-   "source": [
-    "**Simple Query**"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 25,
-   "id": "OVnzf3myEz88",
-   "metadata": {
-    "id": "OVnzf3myEz88",
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.indices.keyword_table.retrievers:> Starting query: Tell me more about Boston\n",
-      "INFO:llama_index.indices.keyword_table.retrievers:query keywords: ['tell', 'boston']\n",
-      "INFO:llama_index.indices.keyword_table.retrievers:> Extracted keywords: ['boston']\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 5 tokens\n",
-      "INFO:llama_index.indices.common_tree.base:> Building index from nodes: 1 chunks\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 802 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 4801 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 545 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 545 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.indices.keyword_table.retrievers:> Starting query: Tell me more about Boston\n",
-      "INFO:llama_index.indices.keyword_table.retrievers:query keywords: ['tell', 'boston']\n",
-      "INFO:llama_index.indices.keyword_table.retrievers:> Extracted keywords: ['boston']\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 5 tokens\n",
-      "INFO:llama_index.indices.common_tree.base:> Building index from nodes: 1 chunks\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 641 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 4580 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 308 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 308 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "query_engine_davinci = graph.as_query_engine(\n",
-    "    custom_query_engines={\n",
-    "        graph.root_index.index_id: graph.root_index.as_query_engine(\n",
-    "            retriever_mode=\"simple\",\n",
-    "            service_context=service_context_davinci,\n",
-    "            response_mode=\"tree_summarize\",\n",
-    "        )\n",
-    "    }\n",
-    ")\n",
-    "query_engine_chatgpt = graph.as_query_engine(\n",
-    "    custom_query_engines={\n",
-    "        graph.root_index.index_id: graph.root_index.as_query_engine(\n",
-    "            retriever_mode=\"simple\",\n",
-    "            service_context=service_context_chatgpt,\n",
-    "            response_mode=\"tree_summarize\",\n",
-    "        )\n",
-    "    }\n",
-    ")\n",
-    "query_str = \"Tell me more about Boston\"\n",
-    "response_davinci = query_engine_davinci.query(query_str)\n",
-    "response_chatgpt = query_engine_chatgpt.query(query_str)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "id": "6f5f5467-fa79-4f48-8b78-32ae8f86d12d",
-   "metadata": {
-    "id": "6f5f5467-fa79-4f48-8b78-32ae8f86d12d",
-    "outputId": "53105550-370a-4281-974d-9b0ae8064e1c"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "Boston is the capital and largest city of the Commonwealth of Massachusetts and the cultural and financial center of the New England region of the Northeastern United States. It is one of the oldest municipalities in America, founded on the Shawmut Peninsula in 1630 by Puritan settlers from the English town of the same name. It is a center of scientific research and innovation, with nearly 5,000 startups, and is home to a number of colleges and universities, notably Harvard and MIT. It has a long seafaring tradition, and was a major port for both domestic and international trade in the 19th century. It has seen waves of immigration, with Irish, Germans, Lebanese, Syrians, French Canadians, and Russian and Polish Jews settling in the city. It was an early port of the Atlantic triangular slave trade in the New England colonies, but was soon overtaken. Boston is also known for its philanthropy, with households in the city claiming the highest average rate of philanthropy in the United States.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response_davinci)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "id": "29f32345-6f28-4545-afa9-e3c5849dfb82",
-   "metadata": {
-    "id": "29f32345-6f28-4545-afa9-e3c5849dfb82",
-    "outputId": "904002ea-f062-4f7d-8fe6-3e6b7b13b420"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Boston is a city in the New England region of the United States with a population of 675,647 as of 2020. It is known for its rich history and is considered the economic and cultural center of the region. The city has many firsts, including the first public park, first public or state school, first subway system, and first large public library in the United States. Boston is also a global pioneer in innovation and entrepreneurship, with nearly 5,000 startups. The city's economy includes finance, professional and business services, biotechnology, information technology, and government activities. Boston is a popular tourist destination, with Faneuil Hall alone drawing more than 20 million visitors per year. The city is home to many prestigious hospitals and universities, including Massachusetts General Hospital, Harvard Medical School, and Boston University.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response_chatgpt)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "018d0a51-3a3f-4dc5-9e1d-f2e79eb0cc43",
-   "metadata": {
-    "id": "018d0a51-3a3f-4dc5-9e1d-f2e79eb0cc43"
-   },
-   "source": [
-    "**Complex Query 1**"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "730b7a1f-5197-4cdf-add2-9b46c07465f3",
-   "metadata": {
-    "id": "730b7a1f-5197-4cdf-add2-9b46c07465f3"
-   },
-   "outputs": [],
-   "source": [
-    "query_str = (\n",
-    "    \"Tell me the airports in Seattle, Houston, and Toronto. \"\n",
-    "    \"If only one city is provided, return the airport information for that city. \"\n",
-    "    \"If airports for multiple cities are provided, compare and contrast the airports. \"\n",
-    ")\n",
-    "response_davinci = query_engine_davinci.query(query_str)\n",
-    "response_chatgpt = query_engine_chatgpt.query(query_str)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "ee57efaa-dd8e-45af-968c-45d9bf92b948",
-   "metadata": {
-    "id": "ee57efaa-dd8e-45af-968c-45d9bf92b948",
-    "outputId": "8b70b13d-c07a-4685-bd1d-b0e776607ad5"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "The airports in Seattle, Houston, and Toronto are Seattle–Tacoma International Airport (IATA: SEA), George Bush Intercontinental Airport (IATA: IAH), Toronto Pearson International Airport (IATA: YYZ), and Billy Bishop Toronto City Airport (IATA: YTZ). Seattle–Tacoma International Airport is the largest airport in the Pacific Northwest region of the United States, serving over 44 million passengers annually. George Bush Intercontinental Airport is the largest airport in Houston, serving over 40 million passengers annually. Toronto Pearson International Airport is the busiest airport in Canada, serving over 50 million passengers annually. Billy Bishop Toronto City Airport is a smaller airport located on the Toronto Islands, serving over 2 million passengers annually.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response_davinci)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e499b388-5a1c-4047-8fee-122dfe73c800",
-   "metadata": {
-    "id": "e499b388-5a1c-4047-8fee-122dfe73c800",
-    "outputId": "ca0c8d9d-2f7c-4d80-a793-a79cb3b243ed"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Airports in Seattle: Seattle-Tacoma International Airport.\n",
-      "Airports in Houston: George Bush Intercontinental Airport, William P. Hobby Airport, and Ellington Airport.\n",
-      "Airports in Toronto: Toronto Pearson International Airport, Billy Bishop Toronto City Airport, Buttonville Municipal Airport, and Downsview Airport.\n",
-      "\n",
-      "Seattle has one major airport, Seattle-Tacoma International Airport. Houston has three airports: George Bush Intercontinental Airport, William P. Hobby Airport, and Ellington Airport. Toronto has four airports: Toronto Pearson International Airport, Billy Bishop Toronto City Airport, Buttonville Municipal Airport, and Downsview Airport. Toronto has a mix of commercial and smaller airports, while Houston has a mix of commercial, military, government, and general aviation airports.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response_chatgpt)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "d3cb4d7b-7bcc-46bf-b7d6-d0230c3d7fdd",
-   "metadata": {
-    "id": "d3cb4d7b-7bcc-46bf-b7d6-d0230c3d7fdd"
-   },
-   "source": [
-    "**Complex Query 2**"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a0fa2840-77b2-42c3-b6af-39fbe02c78ce",
-   "metadata": {
-    "id": "a0fa2840-77b2-42c3-b6af-39fbe02c78ce"
-   },
-   "outputs": [],
-   "source": [
-    "query_str = (\n",
-    "    \"Look at Houston and Boston. \"\n",
-    "    \"If only one city is provided, provide information about the sports teams for that city. \"\n",
-    "    \"If context for multiple cities are provided, compare and contrast the sports environment of the cities. \"\n",
-    ")\n",
-    "response_davinci = query_engine_davinci.query(query_str)\n",
-    "response_chatgpt = query_engine_chatgpt.query(query_str)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "3af1e27f-7697-4cbc-ba38-a7dc11330dc0",
-   "metadata": {
-    "id": "3af1e27f-7697-4cbc-ba38-a7dc11330dc0",
-    "outputId": "3d394401-ad19-4fa6-97fe-6bae70f0beff"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "Houston has teams for every major professional league. The Houston Astros are a Major League Baseball team that have won the World Series in 2017, 2022, and appeared in it in 2005, 2019, and 2021. The Houston Rockets are a National Basketball Association franchise based in the city since 1971, and have won two NBA Championships. The Houston Texans are a National Football League expansion team formed in 2002, and the Houston Dynamo is a Major League Soccer franchise that has been based in Houston since 2006, winning two MLS Cup titles. The Houston Dash team plays in the National Women's Soccer League, and the Houston SaberCats are a rugby team that plays in Major League Rugby. \n",
-      "\n",
-      "Boston also has teams for every major professional league. The Boston Red Sox are a Major League Baseball team that have won the World Series in 2004, 2007, 2013, and 2018. The Boston Celtics are a National Basketball Association team that have won 17 championships, most recently in 2008. The Boston Bruins are a National Hockey League team that have won six Stanley Cup championships, most recently in 2011. The New England Revolution is a Major League Soccer team that has been based in Boston since 1996. During a particularly impressive 17-year stretch from 2001 to 2018, the city's professional sports teams won twelve championships\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response_davinci)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "226ee4f5-c941-4497-a04c-630757622282",
-   "metadata": {
-    "id": "226ee4f5-c941-4497-a04c-630757622282",
-    "outputId": "c8b0c521-d2e7-4ba6-dc9f-52189fbf0b9b"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "If only one city is provided, Houston has sports teams for every major professional league except the National Hockey League, including the Houston Astros (MLB), Houston Rockets (NBA), Houston Texans (NFL), Houston Dynamo (MLS), Houston Dash (National Women's Soccer League), and Houston SaberCats (rugby).\n",
-      "\n",
-      "If context for multiple cities are provided, Boston has teams in the four major North American men's professional sports leagues plus Major League Soccer, and has won 39 championships in these leagues. Boston is one of eight cities to have won championships in all four major American sports leagues. During a particularly impressive 17-year stretch from 2001 to 2018, the city's professional sports teams won twelve championships. The Celtics and Bruins remain competitive for titles in the century’s third decade, though the Patriots and Red Sox have fallen off from these recent glory days. In contrast, Houston has not won as many championships as Boston, but has hosted several major sports events, including the Super Bowl and World Series. Houston is also home to the first major esports team, the Houston Outlaws.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response_chatgpt)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "53f527c8-0d53-4b29-8f1f-7b5bf22ca55e",
-   "metadata": {
-    "id": "53f527c8-0d53-4b29-8f1f-7b5bf22ca55e"
-   },
-   "source": [
-    "**Complex Query 3**"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "5b11d9c6-1905-4bd1-bb9a-4d60b0bc3c2d",
-   "metadata": {
-    "id": "5b11d9c6-1905-4bd1-bb9a-4d60b0bc3c2d"
-   },
-   "outputs": [],
-   "source": [
-    "query_str = (\n",
-    "    \"Look at Houston and Boston. \"\n",
-    "    \"If only one city is provided, provide information about the arts and culture for that city. \"\n",
-    "    \"If context for multiple cities are provided, compare and contrast the arts and culture of the two cities. \"\n",
-    ")\n",
-    "response_davinci = query_engine_davinci.query(query_str)\n",
-    "response_chatgpt = query_engine_chatgpt.query(query_str)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "4ccbbcf6-3074-4d8e-9ad4-92daa13a67dc",
-   "metadata": {
-    "id": "4ccbbcf6-3074-4d8e-9ad4-92daa13a67dc",
-    "outputId": "28429b4e-1854-44e8-8dcd-850f7ca7d0c2"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "Houston and Boston both have a wide range of cultural attractions. In Houston, the Theater District is a 17-block area in the center of Downtown Houston that is home to the Bayou Place entertainment complex, restaurants, movies, plazas, and parks. The Museum District's cultural institutions and exhibits attract more than 7 million visitors a year. Notable facilities include The Museum of Fine Arts, the Houston Museum of Natural Science, the Contemporary Arts Museum Houston, the Station Museum of Contemporary Art, the Holocaust Museum Houston, the Children's Museum of Houston, and the Houston Zoo. Houston also has many annual events celebrating the diverse cultures of the city, such as the Houston Livestock Show and Rodeo, the Houston Gay Pride Parade, the Houston Greek Festival, Art Car Parade, the Houston Auto Show, the Houston International Festival, and the Bayou City Art Festival.\n",
-      "\n",
-      "In Boston, the Freedom Trail is a 2.5-mile walking tour of 16 historically significant sites in downtown Boston. The Museum of Fine Arts is one of the largest and most comprehensive art museums in the world, with more than 450,000 works of art. Boston also has many annual events celebrating the diverse cultures of the city, such as the Boston Marathon, the Boston Arts Festival\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response_davinci)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "10c6ca94-c053-4009-b52d-a5255e74853c",
-   "metadata": {
-    "id": "10c6ca94-c053-4009-b52d-a5255e74853c",
-    "outputId": "b4575737-59e2-43b5-85e2-c51ffe0f8cdd"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "There is no information about the arts and culture of Houston provided, but for Boston, there is a rich cultural history with a strong literary culture and a center for classical music. The city is also home to several art museums and galleries, including the Museum of Fine Arts and the Isabella Stewart Gardner Museum. The Institute of Contemporary Art is housed in a contemporary building designed by Diller Scofidio + Renfro in the Seaport District. Boston's South End Art and Design District (SoWa) and Newbury St. are both art gallery destinations.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response_chatgpt)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "7b299ebe-cdbd-4abf-9015-4894f6aa94ba",
-   "metadata": {
-    "id": "7b299ebe-cdbd-4abf-9015-4894f6aa94ba"
-   },
-   "source": [
-    "**Complex Query 4**"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "261e3881-6414-4ace-9816-aa71a39051b5",
-   "metadata": {
-    "id": "261e3881-6414-4ace-9816-aa71a39051b5"
-   },
-   "outputs": [],
-   "source": [
-    "query_str = (\n",
-    "    \"Look at Toronto and San Francisco. \"\n",
-    "    \"If only one city is provided, provide information about the demographics for that city. \"\n",
-    "    \"If context for multiple cities are provided, compare and contrast the demographics of the two cities. \"\n",
-    ")\n",
-    "response_davinci = query_engine_davinci.query(query_str)\n",
-    "response_chatgpt = query_engine_chatgpt.query(query_str)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f58ca597-8a40-4fa0-995b-f54cff133ec8",
-   "metadata": {
-    "id": "f58ca597-8a40-4fa0-995b-f54cff133ec8",
-    "outputId": "7fefef2f-78b8-47c3-ade3-5a8673a264e1"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "In Toronto, the population is 2,731,571 people, with a median age of 39.2 years. The racial makeup of the city is 51.5% White, 20.3% Asian, 8.6% African American, 0.8% Native American, 0.2% Pacific Islander, and 18.6% from other races. The city is also home to a large Hispanic population, making up 6.2% of the population. The three most commonly reported ethnic origins are White (46.9%), Asian (20.3%), and Black (8.6%). Christianity is the most commonly reported religion (48.4%), followed by no religion and secular perspectives (31.2%). English is the predominant language spoken by Torontonians with approximately 79% of residents having proficiency in the language, although only 43.2% of Torontonians reported English as their mother tongue.\n",
-      "\n",
-      "When comparing Toronto and San Francisco, we can see that Toronto has a larger population than San Francisco, with a median age that is slightly higher. The racial makeup of Toronto is slightly more White than San Francisco, while San Francisco has a larger Asian population. The Hispanic population is larger in San Francisco than in Toronto. Christianity is the\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response_davinci)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c4fca866-b5fd-493b-8e2f-33dbe485c463",
-   "metadata": {
-    "id": "c4fca866-b5fd-493b-8e2f-33dbe485c463",
-    "outputId": "528a27a4-bef3-4e4a-d788-57958739dee6"
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Only information about Toronto is provided in the context, so demographics for Toronto can be provided. However, there is no context information about San Francisco to compare and contrast with Toronto.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response_chatgpt)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "ab6d123c-afdf-4aea-8e5a-9513891ba799",
-   "metadata": {
-    "id": "ab6d123c-afdf-4aea-8e5a-9513891ba799"
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "colab": {
-   "provenance": []
-  },
-  "kernelspec": {
-   "display_name": "llama",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.9.16"
-  },
-  "widgets": {
-   "application/vnd.jupyter.widget-state+json": {
-    "026cc1a42e154f1f92b5236869311929": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "FloatProgressModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "FloatProgressModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "ProgressView",
-      "bar_style": "success",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_4cc9ec6ba46647aba2d53e352f91c137",
-      "max": 665,
-      "min": 0,
-      "orientation": "horizontal",
-      "style": "IPY_MODEL_f2a1c5087d0e44909139697ed90474e8",
-      "value": 665
-     }
-    },
-    "028aa5d1f7a74d538b5c606d4a6d146f": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "119d6d7a8d524aa49170f5784ebc6b9e": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "DescriptionStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "DescriptionStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "description_width": ""
-     }
-    },
-    "1bdaf4dab16f48dbaeed3fb9bf268e45": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HTMLModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HTMLModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HTMLView",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_028aa5d1f7a74d538b5c606d4a6d146f",
-      "placeholder": "​",
-      "style": "IPY_MODEL_c078fe9a056a473dab7d474cd7907154",
-      "value": "Downloading (…)lve/main/config.json: 100%"
-     }
-    },
-    "2053e6adef1b4dba89f861eaf3d916fd": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "DescriptionStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "DescriptionStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "description_width": ""
-     }
-    },
-    "208d404f405a42a3b06d65ad67fb7322": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HTMLModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HTMLModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HTMLView",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_3fef46c902524717b377dee6c1dfc929",
-      "placeholder": "​",
-      "style": "IPY_MODEL_fd8b887c1f7149f2876cf8a31e534ad6",
-      "value": "Downloading (…)olve/main/vocab.json: 100%"
-     }
-    },
-    "380a0c11434241b191b17421e395be8b": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "398f1c0f56fe4f218d999df138adfdac": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "3b4c1066797b43a586611ec2d63e7ca1": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HBoxModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HBoxModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HBoxView",
-      "box_style": "",
-      "children": [
-       "IPY_MODEL_c06865c1e01a441698dacf48600dd03c",
-       "IPY_MODEL_9d229e5dd56e4d539ca2c1b9f0a37812",
-       "IPY_MODEL_868aa268dd28498d902782215e53c6fa"
-      ],
-      "layout": "IPY_MODEL_46f644cf589e4a48a6fad1742f0c0575"
-     }
-    },
-    "3c37e72850c746ce9c919add5340dede": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "3f748152b9274556afad2555572aa9f4": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "DescriptionStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "DescriptionStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "description_width": ""
-     }
-    },
-    "3fef46c902524717b377dee6c1dfc929": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "40e148c291ad4f739998a7eac55a8af6": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "46f644cf589e4a48a6fad1742f0c0575": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "47838fa763ca40598b2622a9d1e79444": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HTMLModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HTMLModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HTMLView",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_b102e756f9b848a98f58396fc825be84",
-      "placeholder": "​",
-      "style": "IPY_MODEL_fbd7219af1924d2ead5310eb7b35aab0",
-      "value": " 1.04M/1.04M [00:00&lt;00:00, 23.7MB/s]"
-     }
-    },
-    "4cc9ec6ba46647aba2d53e352f91c137": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "64b754f563834be0a6963349b1f2dcf2": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HTMLModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HTMLModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HTMLView",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_a02534c347aa4865ab4ab3de3a3ee2f5",
-      "placeholder": "​",
-      "style": "IPY_MODEL_b0ccb9d9d96e4ed8bec4d540c34d337c",
-      "value": "Downloading (…)/main/tokenizer.json: 100%"
-     }
-    },
-    "7438aea716f44d85ad1c2b49a93acd83": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "7b24b46d6c3643e581ba003a9c473745": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "7b47c78391a4431aa2d3f84677f24046": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "DescriptionStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "DescriptionStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "description_width": ""
-     }
-    },
-    "7da29a2b6508494282acbc459eccbb96": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "FloatProgressModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "FloatProgressModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "ProgressView",
-      "bar_style": "success",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_7438aea716f44d85ad1c2b49a93acd83",
-      "max": 1042301,
-      "min": 0,
-      "orientation": "horizontal",
-      "style": "IPY_MODEL_fe39f994fa9b4d7daa232e1dcd2b0e8b",
-      "value": 1042301
-     }
-    },
-    "868aa268dd28498d902782215e53c6fa": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HTMLModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HTMLModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HTMLView",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_3c37e72850c746ce9c919add5340dede",
-      "placeholder": "​",
-      "style": "IPY_MODEL_2053e6adef1b4dba89f861eaf3d916fd",
-      "value": " 456k/456k [00:00&lt;00:00, 11.9MB/s]"
-     }
-    },
-    "9d229e5dd56e4d539ca2c1b9f0a37812": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "FloatProgressModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "FloatProgressModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "ProgressView",
-      "bar_style": "success",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_398f1c0f56fe4f218d999df138adfdac",
-      "max": 456318,
-      "min": 0,
-      "orientation": "horizontal",
-      "style": "IPY_MODEL_f1839e86863948f68314f81ba6bca4c9",
-      "value": 456318
-     }
-    },
-    "a02534c347aa4865ab4ab3de3a3ee2f5": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "a2edbc4195d843e0acfba83726a08e78": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HTMLModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HTMLModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HTMLView",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_7b24b46d6c3643e581ba003a9c473745",
-      "placeholder": "​",
-      "style": "IPY_MODEL_3f748152b9274556afad2555572aa9f4",
-      "value": " 665/665 [00:00&lt;00:00, 22.7kB/s]"
-     }
-    },
-    "adb40ef11f094594b14776e238955224": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "b0ccb9d9d96e4ed8bec4d540c34d337c": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "DescriptionStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "DescriptionStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "description_width": ""
-     }
-    },
-    "b102e756f9b848a98f58396fc825be84": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "b458d6fa793d4fa080b9f1e5013af3de": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "b53e8481f6d64018988dc03081bf2765": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "ProgressStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "ProgressStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "bar_color": null,
-      "description_width": ""
-     }
-    },
-    "b5566e3db2914ddebd80d7bde75b2559": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HBoxModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HBoxModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HBoxView",
-      "box_style": "",
-      "children": [
-       "IPY_MODEL_208d404f405a42a3b06d65ad67fb7322",
-       "IPY_MODEL_7da29a2b6508494282acbc459eccbb96",
-       "IPY_MODEL_47838fa763ca40598b2622a9d1e79444"
-      ],
-      "layout": "IPY_MODEL_ff32a3f12e814740a1cd5dd12bd731d4"
-     }
-    },
-    "c06865c1e01a441698dacf48600dd03c": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HTMLModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HTMLModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HTMLView",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_adb40ef11f094594b14776e238955224",
-      "placeholder": "​",
-      "style": "IPY_MODEL_7b47c78391a4431aa2d3f84677f24046",
-      "value": "Downloading (…)olve/main/merges.txt: 100%"
-     }
-    },
-    "c078fe9a056a473dab7d474cd7907154": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "DescriptionStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "DescriptionStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "description_width": ""
-     }
-    },
-    "c7636a6d7380465895b8c86d34caf500": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "FloatProgressModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "FloatProgressModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "ProgressView",
-      "bar_style": "success",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_f22e9615de674e05978f332eb88750cf",
-      "max": 1355256,
-      "min": 0,
-      "orientation": "horizontal",
-      "style": "IPY_MODEL_b53e8481f6d64018988dc03081bf2765",
-      "value": 1355256
-     }
-    },
-    "d55f842766484d299c75f74e31e7aa6a": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HBoxModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HBoxModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HBoxView",
-      "box_style": "",
-      "children": [
-       "IPY_MODEL_1bdaf4dab16f48dbaeed3fb9bf268e45",
-       "IPY_MODEL_026cc1a42e154f1f92b5236869311929",
-       "IPY_MODEL_a2edbc4195d843e0acfba83726a08e78"
-      ],
-      "layout": "IPY_MODEL_40e148c291ad4f739998a7eac55a8af6"
-     }
-    },
-    "eab4127882d24acfa9518ebff6f4e22a": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HBoxModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HBoxModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HBoxView",
-      "box_style": "",
-      "children": [
-       "IPY_MODEL_64b754f563834be0a6963349b1f2dcf2",
-       "IPY_MODEL_c7636a6d7380465895b8c86d34caf500",
-       "IPY_MODEL_f7803dea63994cc2a31acf805bd19e67"
-      ],
-      "layout": "IPY_MODEL_380a0c11434241b191b17421e395be8b"
-     }
-    },
-    "f1839e86863948f68314f81ba6bca4c9": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "ProgressStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "ProgressStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "bar_color": null,
-      "description_width": ""
-     }
-    },
-    "f22e9615de674e05978f332eb88750cf": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    },
-    "f2a1c5087d0e44909139697ed90474e8": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "ProgressStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "ProgressStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "bar_color": null,
-      "description_width": ""
-     }
-    },
-    "f7803dea63994cc2a31acf805bd19e67": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "HTMLModel",
-     "state": {
-      "_dom_classes": [],
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "HTMLModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/controls",
-      "_view_module_version": "1.5.0",
-      "_view_name": "HTMLView",
-      "description": "",
-      "description_tooltip": null,
-      "layout": "IPY_MODEL_b458d6fa793d4fa080b9f1e5013af3de",
-      "placeholder": "​",
-      "style": "IPY_MODEL_119d6d7a8d524aa49170f5784ebc6b9e",
-      "value": " 1.36M/1.36M [00:00&lt;00:00, 30.3MB/s]"
-     }
-    },
-    "fbd7219af1924d2ead5310eb7b35aab0": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "DescriptionStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "DescriptionStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "description_width": ""
-     }
-    },
-    "fd8b887c1f7149f2876cf8a31e534ad6": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "DescriptionStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "DescriptionStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "description_width": ""
-     }
-    },
-    "fe39f994fa9b4d7daa232e1dcd2b0e8b": {
-     "model_module": "@jupyter-widgets/controls",
-     "model_module_version": "1.5.0",
-     "model_name": "ProgressStyleModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/controls",
-      "_model_module_version": "1.5.0",
-      "_model_name": "ProgressStyleModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "StyleView",
-      "bar_color": null,
-      "description_width": ""
-     }
-    },
-    "ff32a3f12e814740a1cd5dd12bd731d4": {
-     "model_module": "@jupyter-widgets/base",
-     "model_module_version": "1.2.0",
-     "model_name": "LayoutModel",
-     "state": {
-      "_model_module": "@jupyter-widgets/base",
-      "_model_module_version": "1.2.0",
-      "_model_name": "LayoutModel",
-      "_view_count": null,
-      "_view_module": "@jupyter-widgets/base",
-      "_view_module_version": "1.2.0",
-      "_view_name": "LayoutView",
-      "align_content": null,
-      "align_items": null,
-      "align_self": null,
-      "border": null,
-      "bottom": null,
-      "display": null,
-      "flex": null,
-      "flex_flow": null,
-      "grid_area": null,
-      "grid_auto_columns": null,
-      "grid_auto_flow": null,
-      "grid_auto_rows": null,
-      "grid_column": null,
-      "grid_gap": null,
-      "grid_row": null,
-      "grid_template_areas": null,
-      "grid_template_columns": null,
-      "grid_template_rows": null,
-      "height": null,
-      "justify_content": null,
-      "justify_items": null,
-      "left": null,
-      "margin": null,
-      "max_height": null,
-      "max_width": null,
-      "min_height": null,
-      "min_width": null,
-      "object_fit": null,
-      "object_position": null,
-      "order": null,
-      "overflow": null,
-      "overflow_x": null,
-      "overflow_y": null,
-      "padding": null,
-      "right": null,
-      "top": null,
-      "visibility": null,
-      "width": null
-     }
-    }
-   }
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/docs/examples/data_connectors/ChromaDemo.ipynb b/docs/examples/data_connectors/ChromaDemo.ipynb
index 82698c04d177d05eac5ea01d4b85b531cec22f10..96a1e5bd7e4e6e850ffc180fb10ba290de7a1ff3 100644
--- a/docs/examples/data_connectors/ChromaDemo.ipynb
+++ b/docs/examples/data_connectors/ChromaDemo.ipynb
@@ -1,148 +1,148 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "f3ca56f0-6ef1-426f-bac5-fd7c374d0f51",
-   "metadata": {},
-   "source": [
-    "# Chroma Reader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "778ee662",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "262f990a-79c8-413a-9f3c-cd9a3c191307",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.readers.chroma import ChromaReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "252f8163-7297-44b6-a838-709e9662f3d6",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# The chroma reader loads data from a persisted Chroma collection.\n",
-    "# This requires a collection name and a persist directory.\n",
-    "\n",
-    "reader = ChromaReader(\n",
-    "    collection_name=\"chroma_collection\",\n",
-    "    persist_directory=\"examples/data_connectors/chroma_collection\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "53b49187-8477-436c-9718-5d2f8cc6fad0",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# the query_vector is an embedding representation of your query.\n",
-    "# Example query vector:\n",
-    "#   query_vector=[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3]\n",
-    "\n",
-    "query_vector = [n1, n2, n3, ...]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a88be1c4-603f-48b9-ac64-10a219af4951",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: Required args are collection_name, query_vector.\n",
-    "# See the Python client: https://github.com/qdrant/qdrant_client\n",
-    "# for more details.\n",
-    "documents = reader.load_data(collection_name=\"demo\", query_vector=query_vector, limit=5)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "169b4273-eb20-4d06-9ffe-71320f4570f6",
-   "metadata": {},
-   "source": [
-    "### Create index"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "ac4563a1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.indices import ListIndex\n",
-    "\n",
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f06b02db",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"<query_text>\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "97d1ae80",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "chroma-gpt-index",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.9.16"
-  },
-  "vscode": {
-   "interpreter": {
-    "hash": "0ac390d292208ca2380c85f5bce7ded36a7a25670a97c40b8009630eb36cb06e"
-   }
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "f3ca56f0-6ef1-426f-bac5-fd7c374d0f51",
+            "metadata": {},
+            "source": [
+                "# Chroma Reader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "778ee662",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "262f990a-79c8-413a-9f3c-cd9a3c191307",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.readers.chroma import ChromaReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "252f8163-7297-44b6-a838-709e9662f3d6",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# The chroma reader loads data from a persisted Chroma collection.\n",
+                "# This requires a collection name and a persist directory.\n",
+                "\n",
+                "reader = ChromaReader(\n",
+                "    collection_name=\"chroma_collection\",\n",
+                "    persist_directory=\"examples/data_connectors/chroma_collection\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "53b49187-8477-436c-9718-5d2f8cc6fad0",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# the query_vector is an embedding representation of your query.\n",
+                "# Example query vector:\n",
+                "#   query_vector=[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3]\n",
+                "\n",
+                "query_vector = [n1, n2, n3, ...]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "a88be1c4-603f-48b9-ac64-10a219af4951",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: Required args are collection_name, query_vector.\n",
+                "# See the Python client: https://github.com/qdrant/qdrant_client\n",
+                "# for more details.\n",
+                "documents = reader.load_data(collection_name=\"demo\", query_vector=query_vector, limit=5)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "169b4273-eb20-4d06-9ffe-71320f4570f6",
+            "metadata": {},
+            "source": [
+                "### Create index"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "ac4563a1",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.indices import SummaryIndex\n",
+                "\n",
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f06b02db",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"<query_text>\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "97d1ae80",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "chroma-gpt-index",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.9.16"
+        },
+        "vscode": {
+            "interpreter": {
+                "hash": "0ac390d292208ca2380c85f5bce7ded36a7a25670a97c40b8009630eb36cb06e"
+            }
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/data_connectors/DiscordDemo.ipynb b/docs/examples/data_connectors/DiscordDemo.ipynb
index 5b4f0d713c2abc40dcae5d5d089dbde8da43bcc3..f41b90eb396c5116df29cc6e5e579e7eeac8944d 100644
--- a/docs/examples/data_connectors/DiscordDemo.ipynb
+++ b/docs/examples/data_connectors/DiscordDemo.ipynb
@@ -1,122 +1,122 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "effeb5a7-8544-4ee4-8c11-bad0d8165394",
-   "metadata": {},
-   "source": [
-    "# Discord Reader\n",
-    "Demonstrates our Discord data connector"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "5fb15bc4",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "04edcd4a-5633-47ee-8a92-ff2f6abc2ec7",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# This is due to the fact that we use asyncio.loop_until_complete in\n",
-    "# the DiscordReader. Since the Jupyter kernel itself runs on\n",
-    "# an event loop, we need to add some help with nesting\n",
-    "!pip install nest_asyncio\n",
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "6ea1f66d-10ed-4417-bdcb-f8a894836ea5",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import ListIndex, DiscordReader\n",
-    "from IPython.display import Markdown, display\n",
-    "import os"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "da90589a-fb44-4ec6-9706-753dba4fa968",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "discord_token = os.getenv(\"DISCORD_TOKEN\")\n",
-    "channel_ids = [1057178784895348746]  # Replace with your channel_id\n",
-    "documents = DiscordReader(discord_token=discord_token).load_data(\n",
-    "    channel_ids=channel_ids\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "341295df-2029-4728-ab3d-2ee178a7e6f1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "01c26b9d-49ec-4a6e-9c61-5c06bb86bbb2",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"<query_text>\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f160c678-2fb5-4d6d-b2bc-87abb61cfdec",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.1"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "effeb5a7-8544-4ee4-8c11-bad0d8165394",
+            "metadata": {},
+            "source": [
+                "# Discord Reader\n",
+                "Demonstrates our Discord data connector"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "5fb15bc4",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "04edcd4a-5633-47ee-8a92-ff2f6abc2ec7",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# This is due to the fact that we use asyncio.loop_until_complete in\n",
+                "# the DiscordReader. Since the Jupyter kernel itself runs on\n",
+                "# an event loop, we need to add some help with nesting\n",
+                "!pip install nest_asyncio\n",
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "6ea1f66d-10ed-4417-bdcb-f8a894836ea5",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import SummaryIndex, DiscordReader\n",
+                "from IPython.display import Markdown, display\n",
+                "import os"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "da90589a-fb44-4ec6-9706-753dba4fa968",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "discord_token = os.getenv(\"DISCORD_TOKEN\")\n",
+                "channel_ids = [1057178784895348746]  # Replace with your channel_id\n",
+                "documents = DiscordReader(discord_token=discord_token).load_data(\n",
+                "    channel_ids=channel_ids\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "341295df-2029-4728-ab3d-2ee178a7e6f1",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "01c26b9d-49ec-4a6e-9c61-5c06bb86bbb2",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"<query_text>\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f160c678-2fb5-4d6d-b2bc-87abb61cfdec",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.1"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/data_connectors/FaissDemo.ipynb b/docs/examples/data_connectors/FaissDemo.ipynb
index a16b0bfcf165849bf67f36e3ef8fe89034d6437d..06ab0d137c6a9c00f7fa6b8543b19ed6c13f1902 100644
--- a/docs/examples/data_connectors/FaissDemo.ipynb
+++ b/docs/examples/data_connectors/FaissDemo.ipynb
@@ -1,166 +1,166 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "5d974136",
-   "metadata": {},
-   "source": [
-    "# Faiss Reader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "4026b434",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "b541d8ec",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.readers.faiss import FaissReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "90d37078",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Build the Faiss index.\n",
-    "# A guide for how to get started with Faiss is here: https://github.com/facebookresearch/faiss/wiki/Getting-started\n",
-    "# We provide some example code below.\n",
-    "\n",
-    "import faiss\n",
-    "\n",
-    "# # Example Code\n",
-    "# d = 8\n",
-    "# docs = np.array([\n",
-    "#     [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1],\n",
-    "#     [0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2],\n",
-    "#     [0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3],\n",
-    "#     [0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4],\n",
-    "#     [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5]\n",
-    "# ])\n",
-    "# # id_to_text_map is used for query retrieval\n",
-    "# id_to_text_map = {\n",
-    "#     0: \"aaaaaaaaa bbbbbbb cccccc\",\n",
-    "#     1: \"foooooo barrrrrr\",\n",
-    "#     2: \"tmp tmptmp tmp\",\n",
-    "#     3: \"hello world hello world\",\n",
-    "#     4: \"cat dog cat dog\"\n",
-    "# }\n",
-    "# # build the index\n",
-    "# index = faiss.IndexFlatL2(d)\n",
-    "# index.add(docs)\n",
-    "\n",
-    "id_to_text_map = {\n",
-    "    \"id1\": \"text blob 1\",\n",
-    "    \"id2\": \"text blob 2\",\n",
-    "}\n",
-    "index = ..."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "fd470a09",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "reader = FaissReader(index)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c33084c5",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# To load data from the Faiss index, you must specify:\n",
-    "# k: top nearest neighbors\n",
-    "# query: a 2D embedding representation of your queries (rows are queries)\n",
-    "k = 4\n",
-    "query1 = np.array([...])\n",
-    "query2 = np.array([...])\n",
-    "query = np.array([query1, query2])\n",
-    "\n",
-    "documents = reader.load_data(query=query, id_to_text_map=id_to_text_map, k=k)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "0b74697a",
-   "metadata": {},
-   "source": [
-    "### Create index"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e85d7e5b",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "31c3b68f",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"<query_text>\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "56fce3fb",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.1"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "5d974136",
+            "metadata": {},
+            "source": [
+                "# Faiss Reader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "4026b434",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "b541d8ec",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.readers.faiss import FaissReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "90d37078",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# Build the Faiss index.\n",
+                "# A guide for how to get started with Faiss is here: https://github.com/facebookresearch/faiss/wiki/Getting-started\n",
+                "# We provide some example code below.\n",
+                "\n",
+                "import faiss\n",
+                "\n",
+                "# # Example Code\n",
+                "# d = 8\n",
+                "# docs = np.array([\n",
+                "#     [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.1],\n",
+                "#     [0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2],\n",
+                "#     [0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3],\n",
+                "#     [0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4],\n",
+                "#     [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5]\n",
+                "# ])\n",
+                "# # id_to_text_map is used for query retrieval\n",
+                "# id_to_text_map = {\n",
+                "#     0: \"aaaaaaaaa bbbbbbb cccccc\",\n",
+                "#     1: \"foooooo barrrrrr\",\n",
+                "#     2: \"tmp tmptmp tmp\",\n",
+                "#     3: \"hello world hello world\",\n",
+                "#     4: \"cat dog cat dog\"\n",
+                "# }\n",
+                "# # build the index\n",
+                "# index = faiss.IndexFlatL2(d)\n",
+                "# index.add(docs)\n",
+                "\n",
+                "id_to_text_map = {\n",
+                "    \"id1\": \"text blob 1\",\n",
+                "    \"id2\": \"text blob 2\",\n",
+                "}\n",
+                "index = ..."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "fd470a09",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "reader = FaissReader(index)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "c33084c5",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# To load data from the Faiss index, you must specify:\n",
+                "# k: top nearest neighbors\n",
+                "# query: a 2D embedding representation of your queries (rows are queries)\n",
+                "k = 4\n",
+                "query1 = np.array([...])\n",
+                "query2 = np.array([...])\n",
+                "query = np.array([query1, query2])\n",
+                "\n",
+                "documents = reader.load_data(query=query, id_to_text_map=id_to_text_map, k=k)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "0b74697a",
+            "metadata": {},
+            "source": [
+                "### Create index"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e85d7e5b",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "31c3b68f",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"<query_text>\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "56fce3fb",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.1"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/docs/examples/data_connectors/GithubRepositoryReaderDemo.ipynb b/docs/examples/data_connectors/GithubRepositoryReaderDemo.ipynb
index dbdb7a7e3bdae3f0206b460fe358fb07a1ee96d2..f380f5c39ef5394024dc9ab92535509ef2175377 100644
--- a/docs/examples/data_connectors/GithubRepositoryReaderDemo.ipynb
+++ b/docs/examples/data_connectors/GithubRepositoryReaderDemo.ipynb
@@ -1,122 +1,122 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Github Repo Reader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# This is due to the fact that we use asyncio.loop_until_complete in\n",
-    "# the DiscordReader. Since the Jupyter kernel itself runs on\n",
-    "# an event loop, we need to add some help with nesting\n",
-    "!pip install nest_asyncio httpx\n",
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "%env OPENAI_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\n",
-    "from llama_index import VectorStoreIndex, GithubRepositoryReader\n",
-    "from IPython.display import Markdown, display\n",
-    "import os"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "%env GITHUB_TOKEN=github_pat_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\n",
-    "github_token = os.environ.get(\"GITHUB_TOKEN\")\n",
-    "owner = \"jerryjliu\"\n",
-    "repo = \"llama_index\"\n",
-    "branch = \"main\"\n",
-    "\n",
-    "documents = GithubRepositoryReader(\n",
-    "    github_token=github_token,\n",
-    "    owner=owner,\n",
-    "    repo=repo,\n",
-    "    use_parser=False,\n",
-    "    verbose=False,\n",
-    "    ignore_directories=[\"examples\"],\n",
-    ").load_data(branch=branch)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = VectorStoreIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# import time\n",
-    "# for document in documents:\n",
-    "#     print(document.metadata)\n",
-    "#     time.sleep(.25)\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\n",
-    "    \"What is the difference between VectorStoreIndex and ListIndex?\", verbose=True\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "llama_index-github-reader",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.0"
-  },
-  "orig_nbformat": 4,
-  "vscode": {
-   "interpreter": {
-    "hash": "5bc2ab08ee48b6366504a28e3231c27a37c154a347ee8ac6184b716eff7bdbcd"
-   }
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "# Github Repo Reader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# This is due to the fact that we use asyncio.loop_until_complete in\n",
+                "# the DiscordReader. Since the Jupyter kernel itself runs on\n",
+                "# an event loop, we need to add some help with nesting\n",
+                "!pip install nest_asyncio httpx\n",
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "%env OPENAI_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\n",
+                "from llama_index import VectorStoreIndex, GithubRepositoryReader\n",
+                "from IPython.display import Markdown, display\n",
+                "import os"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "%env GITHUB_TOKEN=github_pat_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\n",
+                "github_token = os.environ.get(\"GITHUB_TOKEN\")\n",
+                "owner = \"jerryjliu\"\n",
+                "repo = \"llama_index\"\n",
+                "branch = \"main\"\n",
+                "\n",
+                "documents = GithubRepositoryReader(\n",
+                "    github_token=github_token,\n",
+                "    owner=owner,\n",
+                "    repo=repo,\n",
+                "    use_parser=False,\n",
+                "    verbose=False,\n",
+                "    ignore_directories=[\"examples\"],\n",
+                ").load_data(branch=branch)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = VectorStoreIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# import time\n",
+                "# for document in documents:\n",
+                "#     print(document.metadata)\n",
+                "#     time.sleep(.25)\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\n",
+                "    \"What is the difference between VectorStoreIndex and SummaryIndex?\", verbose=True\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "llama_index-github-reader",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.0"
+        },
+        "orig_nbformat": 4,
+        "vscode": {
+            "interpreter": {
+                "hash": "5bc2ab08ee48b6366504a28e3231c27a37c154a347ee8ac6184b716eff7bdbcd"
+            }
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 2
+}
\ No newline at end of file
diff --git a/docs/examples/data_connectors/GoogleDocsDemo.ipynb b/docs/examples/data_connectors/GoogleDocsDemo.ipynb
index 9c6ecd8a1278bfb99e9626371aaf85f46fb7ccab..91e5e40c6652c270d70fce9474e9180c3d0324a4 100644
--- a/docs/examples/data_connectors/GoogleDocsDemo.ipynb
+++ b/docs/examples/data_connectors/GoogleDocsDemo.ipynb
@@ -30,7 +30,7 @@
             "metadata": {},
             "outputs": [],
             "source": [
-                "from llama_index import ListIndex, GoogleDocsReader\n",
+                "from llama_index import SummaryIndex, GoogleDocsReader\n",
                 "from IPython.display import Markdown, display\n",
                 "import os"
             ]
@@ -54,7 +54,7 @@
             "metadata": {},
             "outputs": [],
             "source": [
-                "index = ListIndex.from_documents(documents)"
+                "index = SummaryIndex.from_documents(documents)"
             ]
         },
         {
diff --git a/docs/examples/data_connectors/MongoDemo.ipynb b/docs/examples/data_connectors/MongoDemo.ipynb
index 954a78be5d4ec9687679f2610afafc249aa5631f..772d692459379a52074c5d654d60f94ae24e141d 100644
--- a/docs/examples/data_connectors/MongoDemo.ipynb
+++ b/docs/examples/data_connectors/MongoDemo.ipynb
@@ -1,113 +1,113 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "effeb5a7-8544-4ee4-8c11-bad0d8165394",
-   "metadata": {},
-   "source": [
-    "# MongoDB Reader\n",
-    "Demonstrates our MongoDB data connector"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "60355655",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "6ea1f66d-10ed-4417-bdcb-f8a894836ea5",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import ListIndex, SimpleMongoReader\n",
-    "from IPython.display import Markdown, display\n",
-    "import os"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "da90589a-fb44-4ec6-9706-753dba4fa968",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "host = \"<host>\"\n",
-    "port = \"<port>\"\n",
-    "db_name = \"<db_name>\"\n",
-    "collection_name = \"<collection_name>\"\n",
-    "# query_dict is passed into db.collection.find()\n",
-    "query_dict = {}\n",
-    "field_names = [\"text\"]\n",
-    "reader = SimpleMongoReader(host, port)\n",
-    "documents = reader.load_data(\n",
-    "    db_name, collection_name, field_names, query_dict=query_dict\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "341295df-2029-4728-ab3d-2ee178a7e6f1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "01c26b9d-49ec-4a6e-9c61-5c06bb86bbb2",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"<query_text>\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f160c678-2fb5-4d6d-b2bc-87abb61cfdec",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.9.6"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "effeb5a7-8544-4ee4-8c11-bad0d8165394",
+            "metadata": {},
+            "source": [
+                "# MongoDB Reader\n",
+                "Demonstrates our MongoDB data connector"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "60355655",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "6ea1f66d-10ed-4417-bdcb-f8a894836ea5",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import SummaryIndex, SimpleMongoReader\n",
+                "from IPython.display import Markdown, display\n",
+                "import os"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "da90589a-fb44-4ec6-9706-753dba4fa968",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "host = \"<host>\"\n",
+                "port = \"<port>\"\n",
+                "db_name = \"<db_name>\"\n",
+                "collection_name = \"<collection_name>\"\n",
+                "# query_dict is passed into db.collection.find()\n",
+                "query_dict = {}\n",
+                "field_names = [\"text\"]\n",
+                "reader = SimpleMongoReader(host, port)\n",
+                "documents = reader.load_data(\n",
+                "    db_name, collection_name, field_names, query_dict=query_dict\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "341295df-2029-4728-ab3d-2ee178a7e6f1",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "01c26b9d-49ec-4a6e-9c61-5c06bb86bbb2",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"<query_text>\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f160c678-2fb5-4d6d-b2bc-87abb61cfdec",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.9.6"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/data_connectors/NotionDemo.ipynb b/docs/examples/data_connectors/NotionDemo.ipynb
index 0f43b3f811c050346161dc1529648498aed90dc8..6ecc0092bbfa96118413a744efb4dd1b9a9d370a 100644
--- a/docs/examples/data_connectors/NotionDemo.ipynb
+++ b/docs/examples/data_connectors/NotionDemo.ipynb
@@ -1,151 +1,151 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "effeb5a7-8544-4ee4-8c11-bad0d8165394",
-   "metadata": {},
-   "source": [
-    "# Notion Reader\n",
-    "Demonstrates our Notion data connector"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "995afc19",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "6ea1f66d-10ed-4417-bdcb-f8a894836ea5",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import ListIndex, NotionPageReader\n",
-    "from IPython.display import Markdown, display\n",
-    "import os"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "da90589a-fb44-4ec6-9706-753dba4fa968",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "integration_token = os.getenv(\"NOTION_INTEGRATION_TOKEN\")\n",
-    "page_ids = [\"<page_id>\"]\n",
-    "documents = NotionPageReader(integration_token=integration_token).load_data(\n",
-    "    page_ids=page_ids\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "341295df-2029-4728-ab3d-2ee178a7e6f1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "01c26b9d-49ec-4a6e-9c61-5c06bb86bbb2",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"<query_text>\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f160c678-2fb5-4d6d-b2bc-87abb61cfdec",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "8e8e1b13",
-   "metadata": {},
-   "source": [
-    "You can also pass the id of a database to index all the pages in that database:"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "087431a2-b04c-441c-820f-6d6d3cdf831c",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "database_id = \"<database-id>\"\n",
-    "\n",
-    "# https://developers.notion.com/docs/working-with-databases for how to find your database id\n",
-    "\n",
-    "documents = NotionPageReader(integration_token=integration_token).load_data(\n",
-    "    database_id=database_id\n",
-    ")\n",
-    "\n",
-    "print(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "6464025d-0c5a-4e2d-8a90-91c29ece9884",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "index = ListIndex.from_documents(documents)\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"<query_text>\")\n",
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.1"
-  },
-  "vscode": {
-   "interpreter": {
-    "hash": "c32397a35d2e76e766f80c3872b208f0c0029e8a6a9b8e2a8fe7b1641cfa009b"
-   }
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "effeb5a7-8544-4ee4-8c11-bad0d8165394",
+            "metadata": {},
+            "source": [
+                "# Notion Reader\n",
+                "Demonstrates our Notion data connector"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "995afc19",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "6ea1f66d-10ed-4417-bdcb-f8a894836ea5",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import SummaryIndex, NotionPageReader\n",
+                "from IPython.display import Markdown, display\n",
+                "import os"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "da90589a-fb44-4ec6-9706-753dba4fa968",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "integration_token = os.getenv(\"NOTION_INTEGRATION_TOKEN\")\n",
+                "page_ids = [\"<page_id>\"]\n",
+                "documents = NotionPageReader(integration_token=integration_token).load_data(\n",
+                "    page_ids=page_ids\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "341295df-2029-4728-ab3d-2ee178a7e6f1",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "01c26b9d-49ec-4a6e-9c61-5c06bb86bbb2",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"<query_text>\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f160c678-2fb5-4d6d-b2bc-87abb61cfdec",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "8e8e1b13",
+            "metadata": {},
+            "source": [
+                "You can also pass the id of a database to index all the pages in that database:"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "087431a2-b04c-441c-820f-6d6d3cdf831c",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "database_id = \"<database-id>\"\n",
+                "\n",
+                "# https://developers.notion.com/docs/working-with-databases for how to find your database id\n",
+                "\n",
+                "documents = NotionPageReader(integration_token=integration_token).load_data(\n",
+                "    database_id=database_id\n",
+                ")\n",
+                "\n",
+                "print(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "6464025d-0c5a-4e2d-8a90-91c29ece9884",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "index = SummaryIndex.from_documents(documents)\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"<query_text>\")\n",
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.1"
+        },
+        "vscode": {
+            "interpreter": {
+                "hash": "c32397a35d2e76e766f80c3872b208f0c0029e8a6a9b8e2a8fe7b1641cfa009b"
+            }
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/data_connectors/PineconeDemo.ipynb b/docs/examples/data_connectors/PineconeDemo.ipynb
index b00ee6e586522eb7a2de24bf9d871ca54fdd2b61..a1828d5dd22bc818a7462329e9a3ef129886c3a2 100644
--- a/docs/examples/data_connectors/PineconeDemo.ipynb
+++ b/docs/examples/data_connectors/PineconeDemo.ipynb
@@ -1,158 +1,158 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "f3ca56f0-6ef1-426f-bac5-fd7c374d0f51",
-   "metadata": {},
-   "source": [
-    "# Pinecone Reader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "b2bd3c59",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "e2f49003-b952-4b9b-b935-2941f9303773",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "api_key = \"<api_key>\""
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "262f990a-79c8-413a-9f3c-cd9a3c191307",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.readers.pinecone import PineconeReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "252f8163-7297-44b6-a838-709e9662f3d6",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "reader = PineconeReader(api_key=api_key, environment=\"us-west1-gcp\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "53b49187-8477-436c-9718-5d2f8cc6fad0",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# the id_to_text_map specifies a mapping from the ID specified in Pinecone to your text.\n",
-    "id_to_text_map = {\n",
-    "    \"id1\": \"text blob 1\",\n",
-    "    \"id2\": \"text blob 2\",\n",
-    "}\n",
-    "\n",
-    "# the query_vector is an embedding representation of your query_vector\n",
-    "# Example query vector:\n",
-    "#   query_vector=[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3]\n",
-    "\n",
-    "query_vector = [n1, n2, n3, ...]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a88be1c4-603f-48b9-ac64-10a219af4951",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: Required args are index_name, id_to_text_map, vector.\n",
-    "# In addition, we pass-through all kwargs that can be passed into the the `Query` operation in Pinecone.\n",
-    "# See the API reference: https://docs.pinecone.io/reference/query\n",
-    "# and also the Python client: https://github.com/pinecone-io/pinecone-python-client\n",
-    "# for more details.\n",
-    "documents = reader.load_data(\n",
-    "    index_name=\"quickstart\",\n",
-    "    id_to_text_map=id_to_text_map,\n",
-    "    top_k=3,\n",
-    "    vector=query_vector,\n",
-    "    separate_documents=True,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "a4baf59e-fc97-4a1e-947f-354a6438ffa6",
-   "metadata": {},
-   "source": [
-    "### Create index "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "109d083e-f3b4-420b-886b-087c8cf3f98b",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e15b9177-9e94-4e4e-9a2e-cd3a288a7faf",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"<query_text>\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "67b50613-a589-4acf-ba16-10571b415268",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.1"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "f3ca56f0-6ef1-426f-bac5-fd7c374d0f51",
+            "metadata": {},
+            "source": [
+                "# Pinecone Reader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "b2bd3c59",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "e2f49003-b952-4b9b-b935-2941f9303773",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "api_key = \"<api_key>\""
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "262f990a-79c8-413a-9f3c-cd9a3c191307",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.readers.pinecone import PineconeReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "252f8163-7297-44b6-a838-709e9662f3d6",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "reader = PineconeReader(api_key=api_key, environment=\"us-west1-gcp\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "53b49187-8477-436c-9718-5d2f8cc6fad0",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# the id_to_text_map specifies a mapping from the ID specified in Pinecone to your text.\n",
+                "id_to_text_map = {\n",
+                "    \"id1\": \"text blob 1\",\n",
+                "    \"id2\": \"text blob 2\",\n",
+                "}\n",
+                "\n",
+                "# the query_vector is an embedding representation of your query_vector\n",
+                "# Example query vector:\n",
+                "#   query_vector=[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3]\n",
+                "\n",
+                "query_vector = [n1, n2, n3, ...]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "a88be1c4-603f-48b9-ac64-10a219af4951",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: Required args are index_name, id_to_text_map, vector.\n",
+                "# In addition, we pass-through all kwargs that can be passed into the the `Query` operation in Pinecone.\n",
+                "# See the API reference: https://docs.pinecone.io/reference/query\n",
+                "# and also the Python client: https://github.com/pinecone-io/pinecone-python-client\n",
+                "# for more details.\n",
+                "documents = reader.load_data(\n",
+                "    index_name=\"quickstart\",\n",
+                "    id_to_text_map=id_to_text_map,\n",
+                "    top_k=3,\n",
+                "    vector=query_vector,\n",
+                "    separate_documents=True,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "a4baf59e-fc97-4a1e-947f-354a6438ffa6",
+            "metadata": {},
+            "source": [
+                "### Create index "
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "109d083e-f3b4-420b-886b-087c8cf3f98b",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e15b9177-9e94-4e4e-9a2e-cd3a288a7faf",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"<query_text>\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "67b50613-a589-4acf-ba16-10571b415268",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.1"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/docs/examples/data_connectors/PsychicDemo.ipynb b/docs/examples/data_connectors/PsychicDemo.ipynb
index 90ee2296c62524e47d0ff3c1c9918c26187db31f..dccdda8d885150716407dccc76f892d885a2a8c7 100644
--- a/docs/examples/data_connectors/PsychicDemo.ipynb
+++ b/docs/examples/data_connectors/PsychicDemo.ipynb
@@ -34,7 +34,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "from llama_index import ListIndex, PsychicReader\n",
+    "from llama_index import SummaryIndex, PsychicReader\n",
     "from IPython.display import Markdown, display"
    ]
   },
@@ -100,7 +100,7 @@
    "source": [
     "# set Logging to DEBUG for more detailed outputs\n",
     "os.environ[\"OPENAI_API_KEY\"] = \"OPENAI_API_KEY\"\n",
-    "index = ListIndex.from_documents(documents)\n",
+    "index = SummaryIndex.from_documents(documents)\n",
     "query_engine = index.as_query_engine()\n",
     "response = query_engine.query(\"What is Psychic's privacy policy?\")\n",
     "display(Markdown(f\"<b>{response}</b>\"))"
diff --git a/docs/examples/data_connectors/QdrantDemo.ipynb b/docs/examples/data_connectors/QdrantDemo.ipynb
index ab8ad5dd765bbb550947df25481997f88abb7124..5bcb5160c39f640e1fc885a446c8f18f23e44c30 100644
--- a/docs/examples/data_connectors/QdrantDemo.ipynb
+++ b/docs/examples/data_connectors/QdrantDemo.ipynb
@@ -1,134 +1,134 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "f3ca56f0-6ef1-426f-bac5-fd7c374d0f51",
-   "metadata": {},
-   "source": [
-    "# Qdrant Reader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "778ee662",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "262f990a-79c8-413a-9f3c-cd9a3c191307",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.readers.qdrant import QdrantReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "252f8163-7297-44b6-a838-709e9662f3d6",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "reader = QdrantReader(host=\"localhost\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "53b49187-8477-436c-9718-5d2f8cc6fad0",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# the query_vector is an embedding representation of your query_vector\n",
-    "# Example query vector:\n",
-    "#   query_vector=[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3]\n",
-    "\n",
-    "query_vector = [n1, n2, n3, ...]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a88be1c4-603f-48b9-ac64-10a219af4951",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: Required args are collection_name, query_vector.\n",
-    "# See the Python client: https://github.com/qdrant/qdrant_client\n",
-    "# for more details.\n",
-    "documents = reader.load_data(collection_name=\"demo\", query_vector=query_vector, limit=5)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "169b4273-eb20-4d06-9ffe-71320f4570f6",
-   "metadata": {},
-   "source": [
-    "### Create index"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "ac4563a1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f06b02db",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"<query_text>\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "97d1ae80",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.1"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "f3ca56f0-6ef1-426f-bac5-fd7c374d0f51",
+            "metadata": {},
+            "source": [
+                "# Qdrant Reader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "778ee662",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "262f990a-79c8-413a-9f3c-cd9a3c191307",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.readers.qdrant import QdrantReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "252f8163-7297-44b6-a838-709e9662f3d6",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "reader = QdrantReader(host=\"localhost\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "53b49187-8477-436c-9718-5d2f8cc6fad0",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# the query_vector is an embedding representation of your query_vector\n",
+                "# Example query vector:\n",
+                "#   query_vector=[0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3, 0.3]\n",
+                "\n",
+                "query_vector = [n1, n2, n3, ...]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "a88be1c4-603f-48b9-ac64-10a219af4951",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: Required args are collection_name, query_vector.\n",
+                "# See the Python client: https://github.com/qdrant/qdrant_client\n",
+                "# for more details.\n",
+                "documents = reader.load_data(collection_name=\"demo\", query_vector=query_vector, limit=5)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "169b4273-eb20-4d06-9ffe-71320f4570f6",
+            "metadata": {},
+            "source": [
+                "### Create index"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "ac4563a1",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f06b02db",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"<query_text>\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "97d1ae80",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.1"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/docs/examples/data_connectors/SlackDemo.ipynb b/docs/examples/data_connectors/SlackDemo.ipynb
index 5b9c8a17d89a7b681db6bb35751bc434d2096d36..cedd1677bf2ab1129385882a03047264ff2f365e 100644
--- a/docs/examples/data_connectors/SlackDemo.ipynb
+++ b/docs/examples/data_connectors/SlackDemo.ipynb
@@ -30,7 +30,7 @@
             "metadata": {},
             "outputs": [],
             "source": [
-                "from llama_index import ListIndex, SlackReader\n",
+                "from llama_index import SummaryIndex, SlackReader\n",
                 "from IPython.display import Markdown, display\n",
                 "import os"
             ]
@@ -54,7 +54,7 @@
             "metadata": {},
             "outputs": [],
             "source": [
-                "index = ListIndex.from_documents(documents)"
+                "index = SummaryIndex.from_documents(documents)"
             ]
         },
         {
diff --git a/docs/examples/data_connectors/WeaviateDemo.ipynb b/docs/examples/data_connectors/WeaviateDemo.ipynb
index d8524f3199c1880c2571c12c74c8254591b42657..98583687d0a880ae9c9bbfbd2630fd8273c85fbd 100644
--- a/docs/examples/data_connectors/WeaviateDemo.ipynb
+++ b/docs/examples/data_connectors/WeaviateDemo.ipynb
@@ -1,181 +1,181 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "36e7bb96-0c27-47e9-a525-c11f40be3b86",
-   "metadata": {},
-   "source": [
-    "# Weaviate Reader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "38ca1434",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "d99bc57b-85df-46ac-8262-2409344af428",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import weaviate\n",
-    "from llama_index.readers.weaviate import WeaviateReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "fec36c7a-3766-4167-890e-b93adb831a64",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# See https://weaviate.io/developers/weaviate/current/client-libraries/python.html\n",
-    "# for more details on authentication\n",
-    "resource_owner_config = weaviate.AuthClientPassword(\n",
-    "    username=\"<username>\",\n",
-    "    password=\"<password>\",\n",
-    ")\n",
-    "\n",
-    "# initialize reader\n",
-    "reader = WeaviateReader(\n",
-    "    \"https://<cluster-id>.semi.network/\", auth_client_secret=resource_owner_config\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "ce9f299c-4f0a-4bca-bc90-79848f02b381",
-   "metadata": {},
-   "source": [
-    "You have two options for the Weaviate reader: 1) directly specify the class_name and properties, or 2) input the raw graphql_query. Examples are shown below."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "b92d69a1-d39f-45cf-a136-cb9c2f2f5cdf",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# 1) load data using class_name and properties\n",
-    "# docs = reader.load_data(\n",
-    "#    class_name=\"Author\", properties=[\"name\", \"description\"], separate_documents=True\n",
-    "# )\n",
-    "\n",
-    "documents = reader.load_data(\n",
-    "    class_name=\"<class_name>\",\n",
-    "    properties=[\"property1\", \"property2\", \"...\"],\n",
-    "    separate_documents=True,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "722b5d47-9897-4c54-9734-259ab0c1634c",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# 2) example GraphQL query\n",
-    "# query = \"\"\"\n",
-    "# {\n",
-    "#   Get {\n",
-    "#     Author {\n",
-    "#       name\n",
-    "#       description\n",
-    "#     }\n",
-    "#   }\n",
-    "# }\n",
-    "# \"\"\"\n",
-    "# docs = reader.load_data(graphql_query=query, separate_documents=True)\n",
-    "\n",
-    "query = \"\"\"\n",
-    "{\n",
-    "  Get {\n",
-    "    <class_name> {\n",
-    "      <property1>\n",
-    "      <property2>\n",
-    "      ...\n",
-    "    }\n",
-    "  }\n",
-    "}\n",
-    "\"\"\"\n",
-    "\n",
-    "documents = reader.load_data(graphql_query=query, separate_documents=True)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "169b4273-eb20-4d06-9ffe-71320f4570f6",
-   "metadata": {},
-   "source": [
-    "### Create index"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "92599a0a-93ba-4c93-80f1-9acae0663c34",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "52d93c3f-a08d-4637-98bc-0c3cc693c563",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"<query_text>\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "771b42be-4108-43a0-a1b4-b259a7819936",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.1"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "36e7bb96-0c27-47e9-a525-c11f40be3b86",
+            "metadata": {},
+            "source": [
+                "# Weaviate Reader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "38ca1434",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "d99bc57b-85df-46ac-8262-2409344af428",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import weaviate\n",
+                "from llama_index.readers.weaviate import WeaviateReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "fec36c7a-3766-4167-890e-b93adb831a64",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# See https://weaviate.io/developers/weaviate/current/client-libraries/python.html\n",
+                "# for more details on authentication\n",
+                "resource_owner_config = weaviate.AuthClientPassword(\n",
+                "    username=\"<username>\",\n",
+                "    password=\"<password>\",\n",
+                ")\n",
+                "\n",
+                "# initialize reader\n",
+                "reader = WeaviateReader(\n",
+                "    \"https://<cluster-id>.semi.network/\", auth_client_secret=resource_owner_config\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "ce9f299c-4f0a-4bca-bc90-79848f02b381",
+            "metadata": {},
+            "source": [
+                "You have two options for the Weaviate reader: 1) directly specify the class_name and properties, or 2) input the raw graphql_query. Examples are shown below."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "b92d69a1-d39f-45cf-a136-cb9c2f2f5cdf",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# 1) load data using class_name and properties\n",
+                "# docs = reader.load_data(\n",
+                "#    class_name=\"Author\", properties=[\"name\", \"description\"], separate_documents=True\n",
+                "# )\n",
+                "\n",
+                "documents = reader.load_data(\n",
+                "    class_name=\"<class_name>\",\n",
+                "    properties=[\"property1\", \"property2\", \"...\"],\n",
+                "    separate_documents=True,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "722b5d47-9897-4c54-9734-259ab0c1634c",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# 2) example GraphQL query\n",
+                "# query = \"\"\"\n",
+                "# {\n",
+                "#   Get {\n",
+                "#     Author {\n",
+                "#       name\n",
+                "#       description\n",
+                "#     }\n",
+                "#   }\n",
+                "# }\n",
+                "# \"\"\"\n",
+                "# docs = reader.load_data(graphql_query=query, separate_documents=True)\n",
+                "\n",
+                "query = \"\"\"\n",
+                "{\n",
+                "  Get {\n",
+                "    <class_name> {\n",
+                "      <property1>\n",
+                "      <property2>\n",
+                "      ...\n",
+                "    }\n",
+                "  }\n",
+                "}\n",
+                "\"\"\"\n",
+                "\n",
+                "documents = reader.load_data(graphql_query=query, separate_documents=True)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "169b4273-eb20-4d06-9ffe-71320f4570f6",
+            "metadata": {},
+            "source": [
+                "### Create index"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "92599a0a-93ba-4c93-80f1-9acae0663c34",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "52d93c3f-a08d-4637-98bc-0c3cc693c563",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"<query_text>\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "771b42be-4108-43a0-a1b4-b259a7819936",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.1"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/docs/examples/data_connectors/WebPageDemo.ipynb b/docs/examples/data_connectors/WebPageDemo.ipynb
index 5e510792e4498f4d59634dd7da95a4063ae01efb..c11e620afa5cea0a47d5334a84d72ff37946ce6b 100644
--- a/docs/examples/data_connectors/WebPageDemo.ipynb
+++ b/docs/examples/data_connectors/WebPageDemo.ipynb
@@ -1,226 +1,226 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "30146ad2-f165-4f4b-ae07-fe6597a2964f",
-   "metadata": {},
-   "source": [
-    "# Web Page Reader\n",
-    "\n",
-    "Demonstrates our web page reader."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "3c39063b",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "2315a154-f72d-4447-b1eb-cde9b66868cb",
-   "metadata": {},
-   "source": [
-    "#### Using SimpleWebPageReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "87bf7ecd-50cd-47da-9f0e-bc48d7ae45d8",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import ListIndex, SimpleWebPageReader\n",
-    "from IPython.display import Markdown, display\n",
-    "import os"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "b6de3929-51eb-4064-b4b6-c203bb6debc4",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: the html_to_text=True option requires html2text to be installed"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "663403de-2e6e-4340-ab8f-8ee681bc06aa",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "documents = SimpleWebPageReader(html_to_text=True).load_data(\n",
-    "    [\"http://paulgraham.com/worked.html\"]\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "b8cd183a-2423-4a3e-ad92-dfe89ed5454e",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "documents[0]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "26854cc3-af61-4910-ab6b-3bed6acfb447",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "5cfdf87a-97cb-481f-ad51-be5bf8b5217f",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"What did the author do growing up?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "7278d033-cae3-4ddf-96bd-75ea570ca53f",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "2708dc99-0e4d-4c7e-b180-8392286d87c2",
-   "metadata": {},
-   "source": [
-    "#### Using TrafilaturaWebReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "aa2d54c6-c694-4852-a743-165e4777bd56",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import TrafilaturaWebReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "46854f2f-426e-40a3-a87f-5fb51f90e14c",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "documents = TrafilaturaWebReader().load_data([\"http://paulgraham.com/worked.html\"])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "80752ad3-1ed8-4695-9247-22efbe475746",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "8cc9b154-1dcf-479b-b49b-251874aea506",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"What did the author do growing up?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "971b6415-8bcd-4d8b-a1de-9b7ada3cd392",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "b2b6d07c",
-   "metadata": {},
-   "source": [
-    "### Using RssReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a5ad5ca8",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import ListIndex, RssReader\n",
-    "\n",
-    "documents = RssReader().load_data(\n",
-    "    [\"https://rss.nytimes.com/services/xml/rss/nyt/HomePage.xml\"]\n",
-    ")\n",
-    "\n",
-    "index = ListIndex.from_documents(documents)\n",
-    "\n",
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"What happened in the news today?\")"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.1"
-  },
-  "vscode": {
-   "interpreter": {
-    "hash": "c32397a35d2e76e766f80c3872b208f0c0029e8a6a9b8e2a8fe7b1641cfa009b"
-   }
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "30146ad2-f165-4f4b-ae07-fe6597a2964f",
+            "metadata": {},
+            "source": [
+                "# Web Page Reader\n",
+                "\n",
+                "Demonstrates our web page reader."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "3c39063b",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "2315a154-f72d-4447-b1eb-cde9b66868cb",
+            "metadata": {},
+            "source": [
+                "#### Using SimpleWebPageReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "87bf7ecd-50cd-47da-9f0e-bc48d7ae45d8",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import SummaryIndex, SimpleWebPageReader\n",
+                "from IPython.display import Markdown, display\n",
+                "import os"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "b6de3929-51eb-4064-b4b6-c203bb6debc4",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: the html_to_text=True option requires html2text to be installed"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "663403de-2e6e-4340-ab8f-8ee681bc06aa",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "documents = SimpleWebPageReader(html_to_text=True).load_data(\n",
+                "    [\"http://paulgraham.com/worked.html\"]\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "b8cd183a-2423-4a3e-ad92-dfe89ed5454e",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "documents[0]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "26854cc3-af61-4910-ab6b-3bed6acfb447",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "5cfdf87a-97cb-481f-ad51-be5bf8b5217f",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"What did the author do growing up?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "7278d033-cae3-4ddf-96bd-75ea570ca53f",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "2708dc99-0e4d-4c7e-b180-8392286d87c2",
+            "metadata": {},
+            "source": [
+                "#### Using TrafilaturaWebReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "aa2d54c6-c694-4852-a743-165e4777bd56",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import TrafilaturaWebReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "46854f2f-426e-40a3-a87f-5fb51f90e14c",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "documents = TrafilaturaWebReader().load_data([\"http://paulgraham.com/worked.html\"])"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "80752ad3-1ed8-4695-9247-22efbe475746",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "8cc9b154-1dcf-479b-b49b-251874aea506",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"What did the author do growing up?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "971b6415-8bcd-4d8b-a1de-9b7ada3cd392",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "b2b6d07c",
+            "metadata": {},
+            "source": [
+                "### Using RssReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "a5ad5ca8",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import SummaryIndex, RssReader\n",
+                "\n",
+                "documents = RssReader().load_data(\n",
+                "    [\"https://rss.nytimes.com/services/xml/rss/nyt/HomePage.xml\"]\n",
+                ")\n",
+                "\n",
+                "index = SummaryIndex.from_documents(documents)\n",
+                "\n",
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"What happened in the news today?\")"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.1"
+        },
+        "vscode": {
+            "interpreter": {
+                "hash": "c32397a35d2e76e766f80c3872b208f0c0029e8a6a9b8e2a8fe7b1641cfa009b"
+            }
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/data_connectors/deplot/DeplotReader.ipynb b/docs/examples/data_connectors/deplot/DeplotReader.ipynb
index f887d0ba3d2cd867174f793a4679451c21c6778f..b845e593e485c6ecefa77b39e8747002d3f122de 100644
--- a/docs/examples/data_connectors/deplot/DeplotReader.ipynb
+++ b/docs/examples/data_connectors/deplot/DeplotReader.ipynb
@@ -30,7 +30,7 @@
    "outputs": [],
    "source": [
     "from llama_hub.file.image_deplot.base import ImageTabularChartReader\n",
-    "from llama_index import ListIndex\n",
+    "from llama_index import SummaryIndex\n",
     "from llama_index.response.notebook_utils import display_response\n",
     "from pathlib import Path"
    ]
@@ -106,7 +106,7 @@
     }
    ],
    "source": [
-    "list_index = ListIndex.from_documents(documents)\n",
+    "list_index = SummaryIndex.from_documents(documents)\n",
     "response = list_index.as_query_engine().query(\n",
     "    \"What is the difference between the shares of Greenland and the share of Mauritania?\"\n",
     ")"
@@ -177,7 +177,7 @@
    },
    "outputs": [],
    "source": [
-    "list_index = ListIndex.from_documents(documents)\n",
+    "list_index = SummaryIndex.from_documents(documents)\n",
     "response = list_index.as_query_engine().query(\n",
     "    \"What percentage says that the US contributes to peace and stability?\"\n",
     ")"
diff --git a/docs/examples/docstore/DocstoreDemo.ipynb b/docs/examples/docstore/DocstoreDemo.ipynb
index 261e7847c87bddec568aadf2c96de16ffd57da4f..e69f9a420a825c1d44494a166d63ccdef1b12938 100644
--- a/docs/examples/docstore/DocstoreDemo.ipynb
+++ b/docs/examples/docstore/DocstoreDemo.ipynb
@@ -40,7 +40,7 @@
    "outputs": [],
    "source": [
     "from llama_index import SimpleDirectoryReader, ServiceContext, LLMPredictor\n",
-    "from llama_index import VectorStoreIndex, ListIndex, SimpleKeywordTableIndex\n",
+    "from llama_index import VectorStoreIndex, SummaryIndex, SimpleKeywordTableIndex\n",
     "from llama_index.composability import ComposableGraph\n",
     "from llama_index.llms import OpenAI"
    ]
@@ -138,7 +138,7 @@
     "\n",
     "\n",
     "storage_context = StorageContext.from_defaults(docstore=docstore)\n",
-    "list_index = ListIndex(nodes, storage_context=storage_context)\n",
+    "list_index = SummaryIndex(nodes, storage_context=storage_context)\n",
     "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)\n",
     "keyword_table_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
    ]
diff --git a/docs/examples/docstore/DynamoDBDocstoreDemo.ipynb b/docs/examples/docstore/DynamoDBDocstoreDemo.ipynb
index c1a5b7a459c60a23ab6b003b83bf83c49b3f2330..254acf9cd81cb769c98d2e4f81e6e4408ea19c63 100644
--- a/docs/examples/docstore/DynamoDBDocstoreDemo.ipynb
+++ b/docs/examples/docstore/DynamoDBDocstoreDemo.ipynb
@@ -1,410 +1,410 @@
 {
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "a54d1c43-4b7f-4917-939f-a964f6f3dafc",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "fa67fa07-1395-4aab-a356-72bdb302f6b2",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "import os\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1d12d766-3ca8-4012-9da2-248be80bb6ab",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index import (\n",
-    "    SimpleDirectoryReader,\n",
-    "    ServiceContext,\n",
-    "    StorageContext,\n",
-    ")\n",
-    "from llama_index import VectorStoreIndex, ListIndex, SimpleKeywordTableIndex\n",
-    "from llama_index.llms import OpenAI\n",
-    "from llama_index.response.notebook_utils import display_response"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "f6dd9d5f-a601-4097-894e-fe98a0c35a5b",
-   "metadata": {},
-   "source": [
-    "#### Load Documents"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "e7cdaf9d-cfbd-4ced-8d4e-6eef8508224d",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "reader = SimpleDirectoryReader(\"../paul_graham_essay/data\")\n",
-    "documents = reader.load_data()"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "bae82b55-5c9f-432a-9e06-1fccb6f9fc7f",
-   "metadata": {},
-   "source": [
-    "#### Parse into Nodes"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "f97e558a-c29f-44ec-ab33-1f481da1a6ef",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.node_parser import SimpleNodeParser\n",
-    "\n",
-    "nodes = SimpleNodeParser().get_nodes_from_documents(documents)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "aff4c8e1-b2ba-4ea6-a8df-978c2788fedc",
-   "metadata": {},
-   "source": [
-    "#### Add to Docstore"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f9998976",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "TABLE_NAME = os.environ[\"DYNAMODB_TABLE_NAME\"]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "id": "54b9bd36",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.storage.docstore.dynamodb_docstore import DynamoDBDocumentStore\n",
-    "from llama_index.storage.index_store.dynamodb_index_store import DynamoDBIndexStore\n",
-    "from llama_index.vector_stores.dynamodb import DynamoDBVectorStore"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "1ba8b0da-67a8-4653-8cdb-09e39583a2d8",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "storage_context = StorageContext.from_defaults(\n",
-    "    docstore=DynamoDBDocumentStore.from_table_name(table_name=TABLE_NAME),\n",
-    "    index_store=DynamoDBIndexStore.from_table_name(table_name=TABLE_NAME),\n",
-    "    vector_store=DynamoDBVectorStore.from_table_name(table_name=TABLE_NAME),\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e88378b2",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "storage_context.docstore.add_documents(nodes)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "528149c1-5bde-4eba-b75a-e8fa1da17d7c",
-   "metadata": {},
-   "source": [
-    "#### Define & Add Multiple Indexes\n",
-    "\n",
-    "Each index uses the same underlying Node."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "316fb6ac-2031-4d17-9999-ffdb827f46d1",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# https://gpt-index.readthedocs.io/en/latest/api_reference/indices/list.html\n",
-    "list_index = ListIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "5c6b2141-fc77-4dec-891b-d4dad0633b35",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# https://gpt-index.readthedocs.io/en/latest/api_reference/indices/vector_store.html\n",
-    "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "144bc7eb",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "# https://gpt-index.readthedocs.io/en/latest/api_reference/indices/table.html\n",
-    "keyword_table_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "4ccbe86c",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "# NOTE: the docstore still has the same nodes\n",
-    "len(storage_context.docstore.docs)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "1059ec3c",
-   "metadata": {
-    "collapsed": false
-   },
-   "source": [
-    "#### Test out saving and loading"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "d0f258d6",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "# NOTE: docstore, index_store, and vector_index is persisted in DynamoDB by default when they are created\n",
-    "# NOTE: You can also persist simple vector store to disk by using the command below\n",
-    "storage_context.persist()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "9155c1a9",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "# note down index IDs\n",
-    "list_id = list_index.index_id\n",
-    "vector_id = vector_index.index_id\n",
-    "keyword_id = keyword_table_index.index_id"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "555de7fa",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.indices.loading import load_index_from_storage\n",
-    "\n",
-    "# re-create storage context\n",
-    "storage_context = StorageContext.from_defaults(\n",
-    "    docstore=DynamoDBDocumentStore.from_table_name(table_name=TABLE_NAME),\n",
-    "    index_store=DynamoDBIndexStore.from_table_name(table_name=TABLE_NAME),\n",
-    "    vector_store=DynamoDBVectorStore.from_table_name(table_name=TABLE_NAME),\n",
-    ")\n",
-    "\n",
-    "list_index = load_index_from_storage(storage_context=storage_context, index_id=list_id)\n",
-    "keyword_table_index = load_index_from_storage(\n",
-    "    storage_context=storage_context, index_id=keyword_id\n",
-    ")\n",
-    "\n",
-    "# You need to add \"vector_store=DynamoDBVectorStore.from_table_name(table_name=TABLE_NAME)\" to StorageContext to load vector index from DynamoDB\n",
-    "vector_index = load_index_from_storage(\n",
-    "    storage_context=storage_context, index_id=vector_id\n",
-    ")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "c5bc40a7",
-   "metadata": {
-    "collapsed": false
-   },
-   "source": [
-    "#### Test out some Queries"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "8db82de3",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
-    "service_context_chatgpt = ServiceContext.from_defaults(llm=chatgpt, chunk_size=1024)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "244bc6ae",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "query_engine = list_index.as_query_engine()\n",
-    "list_response = query_engine.query(\"What is a summary of this document?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "6cbe77ef",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "display_response(list_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "02b800ab",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "query_engine = vector_index.as_query_engine()\n",
-    "vector_response = query_engine.query(\"What did the author do growing up?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "70b63767",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "display_response(vector_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "b93478b6",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "query_engine = keyword_table_index.as_query_engine()\n",
-    "keyword_response = query_engine.query(\"What did the author do after his time at YC?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "8044da9c",
-   "metadata": {
-    "collapsed": false
-   },
-   "outputs": [],
-   "source": [
-    "display_response(keyword_response)"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "strat",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.16"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "a54d1c43-4b7f-4917-939f-a964f6f3dafc",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "fa67fa07-1395-4aab-a356-72bdb302f6b2",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "import os\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "1d12d766-3ca8-4012-9da2-248be80bb6ab",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index import (\n",
+                "    SimpleDirectoryReader,\n",
+                "    ServiceContext,\n",
+                "    StorageContext,\n",
+                ")\n",
+                "from llama_index import VectorStoreIndex, SummaryIndex, SimpleKeywordTableIndex\n",
+                "from llama_index.llms import OpenAI\n",
+                "from llama_index.response.notebook_utils import display_response"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "f6dd9d5f-a601-4097-894e-fe98a0c35a5b",
+            "metadata": {},
+            "source": [
+                "#### Load Documents"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "e7cdaf9d-cfbd-4ced-8d4e-6eef8508224d",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "reader = SimpleDirectoryReader(\"../paul_graham_essay/data\")\n",
+                "documents = reader.load_data()"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "bae82b55-5c9f-432a-9e06-1fccb6f9fc7f",
+            "metadata": {},
+            "source": [
+                "#### Parse into Nodes"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 5,
+            "id": "f97e558a-c29f-44ec-ab33-1f481da1a6ef",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.node_parser import SimpleNodeParser\n",
+                "\n",
+                "nodes = SimpleNodeParser().get_nodes_from_documents(documents)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "aff4c8e1-b2ba-4ea6-a8df-978c2788fedc",
+            "metadata": {},
+            "source": [
+                "#### Add to Docstore"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f9998976",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "TABLE_NAME = os.environ[\"DYNAMODB_TABLE_NAME\"]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 6,
+            "id": "54b9bd36",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.storage.docstore.dynamodb_docstore import DynamoDBDocumentStore\n",
+                "from llama_index.storage.index_store.dynamodb_index_store import DynamoDBIndexStore\n",
+                "from llama_index.vector_stores.dynamodb import DynamoDBVectorStore"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 7,
+            "id": "1ba8b0da-67a8-4653-8cdb-09e39583a2d8",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "storage_context = StorageContext.from_defaults(\n",
+                "    docstore=DynamoDBDocumentStore.from_table_name(table_name=TABLE_NAME),\n",
+                "    index_store=DynamoDBIndexStore.from_table_name(table_name=TABLE_NAME),\n",
+                "    vector_store=DynamoDBVectorStore.from_table_name(table_name=TABLE_NAME),\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e88378b2",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "storage_context.docstore.add_documents(nodes)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "528149c1-5bde-4eba-b75a-e8fa1da17d7c",
+            "metadata": {},
+            "source": [
+                "#### Define & Add Multiple Indexes\n",
+                "\n",
+                "Each index uses the same underlying Node."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "316fb6ac-2031-4d17-9999-ffdb827f46d1",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# https://gpt-index.readthedocs.io/en/latest/api_reference/indices/list.html\n",
+                "list_index = SummaryIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "5c6b2141-fc77-4dec-891b-d4dad0633b35",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# https://gpt-index.readthedocs.io/en/latest/api_reference/indices/vector_store.html\n",
+                "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "144bc7eb",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "# https://gpt-index.readthedocs.io/en/latest/api_reference/indices/table.html\n",
+                "keyword_table_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "4ccbe86c",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "# NOTE: the docstore still has the same nodes\n",
+                "len(storage_context.docstore.docs)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "1059ec3c",
+            "metadata": {
+                "collapsed": false
+            },
+            "source": [
+                "#### Test out saving and loading"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "d0f258d6",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "# NOTE: docstore, index_store, and vector_index is persisted in DynamoDB by default when they are created\n",
+                "# NOTE: You can also persist simple vector store to disk by using the command below\n",
+                "storage_context.persist()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "9155c1a9",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "# note down index IDs\n",
+                "list_id = list_index.index_id\n",
+                "vector_id = vector_index.index_id\n",
+                "keyword_id = keyword_table_index.index_id"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "555de7fa",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.indices.loading import load_index_from_storage\n",
+                "\n",
+                "# re-create storage context\n",
+                "storage_context = StorageContext.from_defaults(\n",
+                "    docstore=DynamoDBDocumentStore.from_table_name(table_name=TABLE_NAME),\n",
+                "    index_store=DynamoDBIndexStore.from_table_name(table_name=TABLE_NAME),\n",
+                "    vector_store=DynamoDBVectorStore.from_table_name(table_name=TABLE_NAME),\n",
+                ")\n",
+                "\n",
+                "list_index = load_index_from_storage(storage_context=storage_context, index_id=list_id)\n",
+                "keyword_table_index = load_index_from_storage(\n",
+                "    storage_context=storage_context, index_id=keyword_id\n",
+                ")\n",
+                "\n",
+                "# You need to add \"vector_store=DynamoDBVectorStore.from_table_name(table_name=TABLE_NAME)\" to StorageContext to load vector index from DynamoDB\n",
+                "vector_index = load_index_from_storage(\n",
+                "    storage_context=storage_context, index_id=vector_id\n",
+                ")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "c5bc40a7",
+            "metadata": {
+                "collapsed": false
+            },
+            "source": [
+                "#### Test out some Queries"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "8db82de3",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
+                "service_context_chatgpt = ServiceContext.from_defaults(llm=chatgpt, chunk_size=1024)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "244bc6ae",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "query_engine = list_index.as_query_engine()\n",
+                "list_response = query_engine.query(\"What is a summary of this document?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "6cbe77ef",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "display_response(list_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "02b800ab",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "query_engine = vector_index.as_query_engine()\n",
+                "vector_response = query_engine.query(\"What did the author do growing up?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "70b63767",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "display_response(vector_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "b93478b6",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "query_engine = keyword_table_index.as_query_engine()\n",
+                "keyword_response = query_engine.query(\"What did the author do after his time at YC?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "8044da9c",
+            "metadata": {
+                "collapsed": false
+            },
+            "outputs": [],
+            "source": [
+                "display_response(keyword_response)"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "strat",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.8.16"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/docstore/FirestoreDemo.ipynb b/docs/examples/docstore/FirestoreDemo.ipynb
index 26f7266158aec3bf2e7d6d016eaafaa859969ac2..ba336839eccde4d04543831ce98fefbff42e50f4 100644
--- a/docs/examples/docstore/FirestoreDemo.ipynb
+++ b/docs/examples/docstore/FirestoreDemo.ipynb
@@ -1,327 +1,327 @@
 {
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import (\n",
-    "    SimpleDirectoryReader,\n",
-    "    ServiceContext,\n",
-    "    LLMPredictor,\n",
-    "    StorageContext,\n",
-    ")\n",
-    "from llama_index import VectorStoreIndex, ListIndex, SimpleKeywordTableIndex\n",
-    "from llama_index.composability import ComposableGraph\n",
-    "from llama_index.llms import OpenAI\n",
-    "from llama_index.response.notebook_utils import display_response"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### Load Documents"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "reader = SimpleDirectoryReader(\"../paul_graham_essay/data\")\n",
-    "documents = reader.load_data()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### Parse into Nodes"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.node_parser import SimpleNodeParser\n",
-    "\n",
-    "nodes = SimpleNodeParser.from_defaults().get_nodes_from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### Add to Docstore"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.storage.kvstore.firestore_kvstore import FirestoreKVStore\n",
-    "from llama_index.storage.docstore.firestore_docstore import FirestoreDocumentStore\n",
-    "from llama_index.storage.index_store.firestore_indexstore import FirestoreIndexStore"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "kvstore = FirestoreKVStore()\n",
-    "\n",
-    "storage_context = StorageContext.from_defaults(\n",
-    "    docstore=FirestoreDocumentStore(kvstore),\n",
-    "    index_store=FirestoreIndexStore(kvstore),\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "storage_context.docstore.add_documents(nodes)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### Define Multiple Indexes\n",
-    "\n",
-    "Each index uses the same underlying Node."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "list_index = ListIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "keyword_table_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: the docstore still has the same nodes\n",
-    "len(storage_context.docstore.docs)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### Test out saving and loading"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: docstore and index_store is persisted in Firestore by default\n",
-    "# NOTE: here only need to persist simple vector store to disk\n",
-    "storage_context.persist()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# note down index IDs\n",
-    "list_id = list_index.index_id\n",
-    "vector_id = vector_index.index_id\n",
-    "keyword_id = keyword_table_index.index_id"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.indices.loading import load_index_from_storage\n",
-    "\n",
-    "kvstore = FirestoreKVStore()\n",
-    "\n",
-    "# re-create storage context\n",
-    "storage_context = StorageContext.from_defaults(\n",
-    "    docstore=FirestoreDocumentStore(kvstore),\n",
-    "    index_store=FirestoreIndexStore(kvstore),\n",
-    ")\n",
-    "\n",
-    "# load indices\n",
-    "list_index = load_index_from_storage(storage_context=storage_context, index_id=list_id)\n",
-    "vector_index = load_index_from_storage(\n",
-    "    storage_context=storage_context, vector_id=vector_id\n",
-    ")\n",
-    "keyword_table_index = load_index_from_storage(\n",
-    "    storage_context=storage_context, keyword_id=keyword_id\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### Test out some Queries"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
-    "service_context_chatgpt = ServiceContext.from_defaults(llm=chatgpt, chunk_size=1024)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "query_engine = list_index.as_query_engine()\n",
-    "list_response = query_engine.query(\"What is a summary of this document?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display_response(list_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "query_engine = vector_index.as_query_engine()\n",
-    "vector_response = query_engine.query(\"What did the author do growing up?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display_response(vector_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "query_engine = keyword_table_index.as_query_engine()\n",
-    "keyword_response = query_engine.query(\"What did the author do after his time at YC?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display_response(keyword_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.12"
-  },
-  "orig_nbformat": 4
- },
- "nbformat": 4,
- "nbformat_minor": 2
+    "cells": [
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import (\n",
+                "    SimpleDirectoryReader,\n",
+                "    ServiceContext,\n",
+                "    LLMPredictor,\n",
+                "    StorageContext,\n",
+                ")\n",
+                "from llama_index import VectorStoreIndex, SummaryIndex, SimpleKeywordTableIndex\n",
+                "from llama_index.composability import ComposableGraph\n",
+                "from llama_index.llms import OpenAI\n",
+                "from llama_index.response.notebook_utils import display_response"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "#### Load Documents"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "reader = SimpleDirectoryReader(\"../paul_graham_essay/data\")\n",
+                "documents = reader.load_data()"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "#### Parse into Nodes"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.node_parser import SimpleNodeParser\n",
+                "\n",
+                "nodes = SimpleNodeParser.from_defaults().get_nodes_from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "#### Add to Docstore"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.storage.kvstore.firestore_kvstore import FirestoreKVStore\n",
+                "from llama_index.storage.docstore.firestore_docstore import FirestoreDocumentStore\n",
+                "from llama_index.storage.index_store.firestore_indexstore import FirestoreIndexStore"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "kvstore = FirestoreKVStore()\n",
+                "\n",
+                "storage_context = StorageContext.from_defaults(\n",
+                "    docstore=FirestoreDocumentStore(kvstore),\n",
+                "    index_store=FirestoreIndexStore(kvstore),\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "storage_context.docstore.add_documents(nodes)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "#### Define Multiple Indexes\n",
+                "\n",
+                "Each index uses the same underlying Node."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "list_index = SummaryIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "keyword_table_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: the docstore still has the same nodes\n",
+                "len(storage_context.docstore.docs)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "#### Test out saving and loading"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: docstore and index_store is persisted in Firestore by default\n",
+                "# NOTE: here only need to persist simple vector store to disk\n",
+                "storage_context.persist()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# note down index IDs\n",
+                "list_id = list_index.index_id\n",
+                "vector_id = vector_index.index_id\n",
+                "keyword_id = keyword_table_index.index_id"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.indices.loading import load_index_from_storage\n",
+                "\n",
+                "kvstore = FirestoreKVStore()\n",
+                "\n",
+                "# re-create storage context\n",
+                "storage_context = StorageContext.from_defaults(\n",
+                "    docstore=FirestoreDocumentStore(kvstore),\n",
+                "    index_store=FirestoreIndexStore(kvstore),\n",
+                ")\n",
+                "\n",
+                "# load indices\n",
+                "list_index = load_index_from_storage(storage_context=storage_context, index_id=list_id)\n",
+                "vector_index = load_index_from_storage(\n",
+                "    storage_context=storage_context, vector_id=vector_id\n",
+                ")\n",
+                "keyword_table_index = load_index_from_storage(\n",
+                "    storage_context=storage_context, keyword_id=keyword_id\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "#### Test out some Queries"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
+                "service_context_chatgpt = ServiceContext.from_defaults(llm=chatgpt, chunk_size=1024)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "query_engine = list_index.as_query_engine()\n",
+                "list_response = query_engine.query(\"What is a summary of this document?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display_response(list_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "query_engine = vector_index.as_query_engine()\n",
+                "vector_response = query_engine.query(\"What did the author do growing up?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display_response(vector_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "query_engine = keyword_table_index.as_query_engine()\n",
+                "keyword_response = query_engine.query(\"What did the author do after his time at YC?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display_response(keyword_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.12"
+        },
+        "orig_nbformat": 4
+    },
+    "nbformat": 4,
+    "nbformat_minor": 2
 }
diff --git a/docs/examples/docstore/MongoDocstoreDemo.ipynb b/docs/examples/docstore/MongoDocstoreDemo.ipynb
index 0b7adc0b1d824e0a014081b00ecaddd4e49c1c4a..a14d9e521cad07137cd880048c9c25220debe715 100644
--- a/docs/examples/docstore/MongoDocstoreDemo.ipynb
+++ b/docs/examples/docstore/MongoDocstoreDemo.ipynb
@@ -1,414 +1,414 @@
 {
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a54d1c43-4b7f-4917-939f-a964f6f3dafc",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "fa67fa07-1395-4aab-a356-72bdb302f6b2",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "import os\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1d12d766-3ca8-4012-9da2-248be80bb6ab",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index import (\n",
-    "    SimpleDirectoryReader,\n",
-    "    ServiceContext,\n",
-    "    LLMPredictor,\n",
-    "    StorageContext,\n",
-    ")\n",
-    "from llama_index import VectorStoreIndex, ListIndex, SimpleKeywordTableIndex\n",
-    "from llama_index.composability import ComposableGraph\n",
-    "from llama_index.llms import OpenAI\n",
-    "from llama_index.response.notebook_utils import display_response"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "f6dd9d5f-a601-4097-894e-fe98a0c35a5b",
-   "metadata": {},
-   "source": [
-    "#### Load Documents"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e7cdaf9d-cfbd-4ced-8d4e-6eef8508224d",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "reader = SimpleDirectoryReader(\"../paul_graham_essay/data\")\n",
-    "documents = reader.load_data()"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "bae82b55-5c9f-432a-9e06-1fccb6f9fc7f",
-   "metadata": {},
-   "source": [
-    "#### Parse into Nodes"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f97e558a-c29f-44ec-ab33-1f481da1a6ef",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.node_parser import SimpleNodeParser\n",
-    "\n",
-    "nodes = SimpleNodeParser.from_defaults().get_nodes_from_documents(documents)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "aff4c8e1-b2ba-4ea6-a8df-978c2788fedc",
-   "metadata": {},
-   "source": [
-    "#### Add to Docstore"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1514211c",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "MONGO_URI = os.environ[\"MONGO_URI\"]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1ba8b0da-67a8-4653-8cdb-09e39583a2d8",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.storage.docstore import MongoDocumentStore\n",
-    "from llama_index.storage.index_store.mongo_index_store import MongoIndexStore"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "60e781d1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "storage_context = StorageContext.from_defaults(\n",
-    "    docstore=MongoDocumentStore.from_uri(uri=MONGO_URI),\n",
-    "    index_store=MongoIndexStore.from_uri(uri=MONGO_URI),\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e0b18789",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "storage_context.docstore.add_documents(nodes)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "528149c1-5bde-4eba-b75a-e8fa1da17d7c",
-   "metadata": {},
-   "source": [
-    "#### Define Multiple Indexes\n",
-    "\n",
-    "Each index uses the same underlying Node."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "316fb6ac-2031-4d17-9999-ffdb827f46d1",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "list_index = ListIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "9440f405-fa75-4788-bc7c-11d021a0a17b",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "364ef89f-4ba2-4b1a-b5e5-619e0e8420ef",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "keyword_table_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "5c6b2141-fc77-4dec-891b-d4dad0633b35",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# NOTE: the docstore still has the same nodes\n",
-    "len(storage_context.docstore.docs)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "365a025b",
-   "metadata": {},
-   "source": [
-    "#### Test out saving and loading"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1b359a08",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: docstore and index_store is persisted in MongoDB by default\n",
-    "# NOTE: here only need to persist simple vector store to disk\n",
-    "storage_context.persist()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "84b3d2f4",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# note down index IDs\n",
-    "list_id = list_index.index_id\n",
-    "vector_id = vector_index.index_id\n",
-    "keyword_id = keyword_table_index.index_id"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1593ca1d",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.indices.loading import load_index_from_storage\n",
-    "\n",
-    "# re-create storage context\n",
-    "storage_context = StorageContext.from_defaults(\n",
-    "    docstore=MongoDocumentStore.from_uri(uri=MONGO_URI),\n",
-    "    index_store=MongoIndexStore.from_uri(uri=MONGO_URI),\n",
-    ")\n",
-    "\n",
-    "# load indices\n",
-    "list_index = load_index_from_storage(storage_context=storage_context, index_id=list_id)\n",
-    "vector_index = load_index_from_storage(\n",
-    "    storage_context=storage_context, vector_id=vector_id\n",
-    ")\n",
-    "keyword_table_index = load_index_from_storage(\n",
-    "    storage_context=storage_context, keyword_id=keyword_id\n",
-    ")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "d3bf6aaf-3375-4212-8323-777969a918f7",
-   "metadata": {},
-   "source": [
-    "#### Test out some Queries"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "9bba68f3-2743-437e-93b6-ce9ba92e40c3",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
-    "service_context_chatgpt = ServiceContext.from_defaults(llm=chatgpt, chunk_size=1024)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "544c0565-72a0-434b-98e5-83138ebdaa2b",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "query_engine = list_index.as_query_engine()\n",
-    "list_response = query_engine.query(\"What is a summary of this document?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "39d250be",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "display_response(list_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "036077b7-108e-4026-9628-44c694343460",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "query_engine = vector_index.as_query_engine()\n",
-    "vector_response = query_engine.query(\"What did the author do growing up?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "42229e09",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display_response(vector_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "ecd7719c-f663-4edb-a239-d2a8f0a5c091",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "query_engine = keyword_table_index.as_query_engine()\n",
-    "keyword_response = query_engine.query(\"What did the author do after his time at YC?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "37524641-2632-4a76-8ae6-00f1285256d9",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "display_response(keyword_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "ff58018c-3117-4d50-abff-16a1873eda9c",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.9.16"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "a54d1c43-4b7f-4917-939f-a964f6f3dafc",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "fa67fa07-1395-4aab-a356-72bdb302f6b2",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "import os\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "1d12d766-3ca8-4012-9da2-248be80bb6ab",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index import (\n",
+                "    SimpleDirectoryReader,\n",
+                "    ServiceContext,\n",
+                "    LLMPredictor,\n",
+                "    StorageContext,\n",
+                ")\n",
+                "from llama_index import VectorStoreIndex, SummaryIndex, SimpleKeywordTableIndex\n",
+                "from llama_index.composability import ComposableGraph\n",
+                "from llama_index.llms import OpenAI\n",
+                "from llama_index.response.notebook_utils import display_response"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "f6dd9d5f-a601-4097-894e-fe98a0c35a5b",
+            "metadata": {},
+            "source": [
+                "#### Load Documents"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e7cdaf9d-cfbd-4ced-8d4e-6eef8508224d",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "reader = SimpleDirectoryReader(\"../paul_graham_essay/data\")\n",
+                "documents = reader.load_data()"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "bae82b55-5c9f-432a-9e06-1fccb6f9fc7f",
+            "metadata": {},
+            "source": [
+                "#### Parse into Nodes"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f97e558a-c29f-44ec-ab33-1f481da1a6ef",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.node_parser import SimpleNodeParser\n",
+                "\n",
+                "nodes = SimpleNodeParser.from_defaults().get_nodes_from_documents(documents)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "aff4c8e1-b2ba-4ea6-a8df-978c2788fedc",
+            "metadata": {},
+            "source": [
+                "#### Add to Docstore"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "1514211c",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "MONGO_URI = os.environ[\"MONGO_URI\"]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "1ba8b0da-67a8-4653-8cdb-09e39583a2d8",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.storage.docstore import MongoDocumentStore\n",
+                "from llama_index.storage.index_store.mongo_index_store import MongoIndexStore"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "60e781d1",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "storage_context = StorageContext.from_defaults(\n",
+                "    docstore=MongoDocumentStore.from_uri(uri=MONGO_URI),\n",
+                "    index_store=MongoIndexStore.from_uri(uri=MONGO_URI),\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e0b18789",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "storage_context.docstore.add_documents(nodes)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "528149c1-5bde-4eba-b75a-e8fa1da17d7c",
+            "metadata": {},
+            "source": [
+                "#### Define Multiple Indexes\n",
+                "\n",
+                "Each index uses the same underlying Node."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "316fb6ac-2031-4d17-9999-ffdb827f46d1",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "list_index = SummaryIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "9440f405-fa75-4788-bc7c-11d021a0a17b",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "364ef89f-4ba2-4b1a-b5e5-619e0e8420ef",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "keyword_table_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "5c6b2141-fc77-4dec-891b-d4dad0633b35",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# NOTE: the docstore still has the same nodes\n",
+                "len(storage_context.docstore.docs)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "365a025b",
+            "metadata": {},
+            "source": [
+                "#### Test out saving and loading"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "1b359a08",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: docstore and index_store is persisted in MongoDB by default\n",
+                "# NOTE: here only need to persist simple vector store to disk\n",
+                "storage_context.persist()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "84b3d2f4",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# note down index IDs\n",
+                "list_id = list_index.index_id\n",
+                "vector_id = vector_index.index_id\n",
+                "keyword_id = keyword_table_index.index_id"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "1593ca1d",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.indices.loading import load_index_from_storage\n",
+                "\n",
+                "# re-create storage context\n",
+                "storage_context = StorageContext.from_defaults(\n",
+                "    docstore=MongoDocumentStore.from_uri(uri=MONGO_URI),\n",
+                "    index_store=MongoIndexStore.from_uri(uri=MONGO_URI),\n",
+                ")\n",
+                "\n",
+                "# load indices\n",
+                "list_index = load_index_from_storage(storage_context=storage_context, index_id=list_id)\n",
+                "vector_index = load_index_from_storage(\n",
+                "    storage_context=storage_context, vector_id=vector_id\n",
+                ")\n",
+                "keyword_table_index = load_index_from_storage(\n",
+                "    storage_context=storage_context, keyword_id=keyword_id\n",
+                ")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "d3bf6aaf-3375-4212-8323-777969a918f7",
+            "metadata": {},
+            "source": [
+                "#### Test out some Queries"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "9bba68f3-2743-437e-93b6-ce9ba92e40c3",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
+                "service_context_chatgpt = ServiceContext.from_defaults(llm=chatgpt, chunk_size=1024)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "544c0565-72a0-434b-98e5-83138ebdaa2b",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [],
+            "source": [
+                "query_engine = list_index.as_query_engine()\n",
+                "list_response = query_engine.query(\"What is a summary of this document?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "39d250be",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [],
+            "source": [
+                "display_response(list_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "036077b7-108e-4026-9628-44c694343460",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [],
+            "source": [
+                "query_engine = vector_index.as_query_engine()\n",
+                "vector_response = query_engine.query(\"What did the author do growing up?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "42229e09",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display_response(vector_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "ecd7719c-f663-4edb-a239-d2a8f0a5c091",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "query_engine = keyword_table_index.as_query_engine()\n",
+                "keyword_response = query_engine.query(\"What did the author do after his time at YC?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "37524641-2632-4a76-8ae6-00f1285256d9",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "display_response(keyword_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "ff58018c-3117-4d50-abff-16a1873eda9c",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.9.16"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/docstore/RedisDocstoreIndexStoreDemo.ipynb b/docs/examples/docstore/RedisDocstoreIndexStoreDemo.ipynb
index 79201dff76f694793d5207c49a8819a5a9d45a3f..cba858cad826cf0e9fdae33cea5e62ce0674df5c 100644
--- a/docs/examples/docstore/RedisDocstoreIndexStoreDemo.ipynb
+++ b/docs/examples/docstore/RedisDocstoreIndexStoreDemo.ipynb
@@ -1,647 +1,647 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "f35495ae",
-   "metadata": {},
-   "source": [
-    "# Redis Docstore+Index Store Demo"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "a54d1c43-4b7f-4917-939f-a964f6f3dafc",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "fa67fa07-1395-4aab-a356-72bdb302f6b2",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "import os\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "1d12d766-3ca8-4012-9da2-248be80bb6ab",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:numexpr.utils:Note: NumExpr detected 16 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
-      "Note: NumExpr detected 16 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
-      "INFO:numexpr.utils:NumExpr defaulting to 8 threads.\n",
-      "NumExpr defaulting to 8 threads.\n"
-     ]
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "f35495ae",
+            "metadata": {},
+            "source": [
+                "# Redis Docstore+Index Store Demo"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "a54d1c43-4b7f-4917-939f-a964f6f3dafc",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "fa67fa07-1395-4aab-a356-72bdb302f6b2",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "import os\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "1d12d766-3ca8-4012-9da2-248be80bb6ab",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:numexpr.utils:Note: NumExpr detected 16 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
+                        "Note: NumExpr detected 16 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
+                        "INFO:numexpr.utils:NumExpr defaulting to 8 threads.\n",
+                        "NumExpr defaulting to 8 threads.\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "/home/loganm/miniconda3/envs/llama-index/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
+                        "  from .autonotebook import tqdm as notebook_tqdm\n"
+                    ]
+                }
+            ],
+            "source": [
+                "from llama_index import (\n",
+                "    SimpleDirectoryReader,\n",
+                "    ServiceContext,\n",
+                "    LLMPredictor,\n",
+                "    StorageContext,\n",
+                ")\n",
+                "from llama_index import VectorStoreIndex, SummaryIndex, SimpleKeywordTableIndex\n",
+                "from llama_index.composability import ComposableGraph\n",
+                "from llama_index.llms import OpenAI\n",
+                "from llama_index.response.notebook_utils import display_response"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "f6dd9d5f-a601-4097-894e-fe98a0c35a5b",
+            "metadata": {},
+            "source": [
+                "#### Load Documents"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "e7cdaf9d-cfbd-4ced-8d4e-6eef8508224d",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "reader = SimpleDirectoryReader(\"../paul_graham_essay/data\")\n",
+                "documents = reader.load_data()"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "bae82b55-5c9f-432a-9e06-1fccb6f9fc7f",
+            "metadata": {},
+            "source": [
+                "#### Parse into Nodes"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 5,
+            "id": "f97e558a-c29f-44ec-ab33-1f481da1a6ef",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.node_parser import SimpleNodeParser\n",
+                "\n",
+                "nodes = SimpleNodeParser.from_defaults().get_nodes_from_documents(documents)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "aff4c8e1-b2ba-4ea6-a8df-978c2788fedc",
+            "metadata": {},
+            "source": [
+                "#### Add to Docstore"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 6,
+            "id": "1514211c",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "REDIS_HOST = os.getenv(\"REDIS_HOST\", \"127.0.0.1\")\n",
+                "REDIS_PORT = os.getenv(\"REDIS_PORT\", 6379)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "1ba8b0da-67a8-4653-8cdb-09e39583a2d8",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "/home/loganm/miniconda3/envs/llama-index/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
+                        "  from .autonotebook import tqdm as notebook_tqdm\n"
+                    ]
+                }
+            ],
+            "source": [
+                "from llama_index.storage.docstore import RedisDocumentStore\n",
+                "from llama_index.storage.index_store import RedisIndexStore"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 8,
+            "id": "60e781d1",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "storage_context = StorageContext.from_defaults(\n",
+                "    docstore=RedisDocumentStore.from_host_and_port(\n",
+                "        host=REDIS_HOST, port=REDIS_PORT, namespace=\"llama_index\"\n",
+                "    ),\n",
+                "    index_store=RedisIndexStore.from_host_and_port(\n",
+                "        host=REDIS_HOST, port=REDIS_PORT, namespace=\"llama_index\"\n",
+                "    ),\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 9,
+            "id": "e0b18789",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "storage_context.docstore.add_documents(nodes)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 10,
+            "id": "6c7a6877",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "20"
+                        ]
+                    },
+                    "execution_count": 10,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "len(storage_context.docstore.docs)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "528149c1-5bde-4eba-b75a-e8fa1da17d7c",
+            "metadata": {},
+            "source": [
+                "#### Define Multiple Indexes\n",
+                "\n",
+                "Each index uses the same underlying Node."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 12,
+            "id": "316fb6ac-2031-4d17-9999-ffdb827f46d1",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "list_index = SummaryIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 13,
+            "id": "9440f405-fa75-4788-bc7c-11d021a0a17b",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 17050 tokens\n",
+                        "> [build_index_from_nodes] Total embedding token usage: 17050 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 14,
+            "id": "364ef89f-4ba2-4b1a-b5e5-619e0e8420ef",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "keyword_table_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 15,
+            "id": "5c6b2141-fc77-4dec-891b-d4dad0633b35",
+            "metadata": {
+                "ExecuteTime": {
+                    "end_time": "2023-06-26T15:02:41.461474Z",
+                    "start_time": "2023-06-26T15:02:41.337117Z"
+                },
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "20"
+                        ]
+                    },
+                    "execution_count": 15,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "# NOTE: the docstore still has the same nodes\n",
+                "len(storage_context.docstore.docs)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "365a025b",
+            "metadata": {},
+            "source": [
+                "#### Test out saving and loading"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 16,
+            "id": "1b359a08",
+            "metadata": {
+                "ExecuteTime": {
+                    "end_time": "2023-06-26T15:02:43.659294Z",
+                    "start_time": "2023-06-26T15:02:43.606383Z"
+                }
+            },
+            "outputs": [],
+            "source": [
+                "# NOTE: docstore and index_store is persisted in Redis by default\n",
+                "# NOTE: here only need to persist simple vector store to disk\n",
+                "storage_context.persist(persist_dir=\"./storage\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 17,
+            "id": "84b3d2f4",
+            "metadata": {
+                "ExecuteTime": {
+                    "end_time": "2023-06-26T15:02:48.342282Z",
+                    "start_time": "2023-06-26T15:02:48.180258Z"
+                }
+            },
+            "outputs": [],
+            "source": [
+                "# note down index IDs\n",
+                "list_id = list_index.index_id\n",
+                "vector_id = vector_index.index_id\n",
+                "keyword_id = keyword_table_index.index_id"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 20,
+            "id": "1593ca1d",
+            "metadata": {
+                "ExecuteTime": {
+                    "end_time": "2023-06-26T15:02:49.712732Z",
+                    "start_time": "2023-06-26T15:02:49.476875Z"
+                }
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.indices.loading:Loading indices with ids: ['24e98f9b-9586-4fc6-8341-8dce895e5bcc']\n",
+                        "Loading indices with ids: ['24e98f9b-9586-4fc6-8341-8dce895e5bcc']\n",
+                        "INFO:llama_index.indices.loading:Loading indices with ids: ['f7b2aeb3-4dad-4750-8177-78d5ae706284']\n",
+                        "Loading indices with ids: ['f7b2aeb3-4dad-4750-8177-78d5ae706284']\n",
+                        "INFO:llama_index.indices.loading:Loading indices with ids: ['9a9198b4-7cb9-4c96-97a7-5f404f43b9cd']\n",
+                        "Loading indices with ids: ['9a9198b4-7cb9-4c96-97a7-5f404f43b9cd']\n"
+                    ]
+                }
+            ],
+            "source": [
+                "from llama_index.indices.loading import load_index_from_storage\n",
+                "\n",
+                "# re-create storage context\n",
+                "storage_context = StorageContext.from_defaults(\n",
+                "    docstore=RedisDocumentStore.from_host_and_port(\n",
+                "        host=REDIS_HOST, port=REDIS_PORT, namespace=\"llama_index\"\n",
+                "    ),\n",
+                "    index_store=RedisIndexStore.from_host_and_port(\n",
+                "        host=REDIS_HOST, port=REDIS_PORT, namespace=\"llama_index\"\n",
+                "    ),\n",
+                ")\n",
+                "\n",
+                "# load indices\n",
+                "list_index = load_index_from_storage(storage_context=storage_context, index_id=list_id)\n",
+                "vector_index = load_index_from_storage(\n",
+                "    storage_context=storage_context, index_id=vector_id\n",
+                ")\n",
+                "keyword_table_index = load_index_from_storage(\n",
+                "    storage_context=storage_context, index_id=keyword_id\n",
+                ")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "d3bf6aaf-3375-4212-8323-777969a918f7",
+            "metadata": {},
+            "source": [
+                "#### Test out some Queries"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 21,
+            "id": "9bba68f3-2743-437e-93b6-ce9ba92e40c3",
+            "metadata": {
+                "ExecuteTime": {
+                    "end_time": "2023-06-26T15:02:55.782981Z",
+                    "start_time": "2023-06-26T15:02:55.699883Z"
+                },
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
+                "service_context_chatgpt = ServiceContext.from_defaults(llm=chatgpt, chunk_size=1024)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 22,
+            "id": "544c0565-72a0-434b-98e5-83138ebdaa2b",
+            "metadata": {
+                "ExecuteTime": {
+                    "end_time": "2023-06-26T15:03:15.143653Z",
+                    "start_time": "2023-06-26T15:02:56.183558Z"
+                },
+                "scrolled": true
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 26111 tokens\n",
+                        "> [get_response] Total LLM token usage: 26111 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "query_engine = list_index.as_query_engine()\n",
+                "list_response = query_engine.query(\"What is a summary of this document?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 23,
+            "id": "39d250be",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**`Final Response:`** This document is a narrative of the author's journey from writing and programming as a young person to pursuing a career in art. It describes his experiences in high school, college, and graduate school, and how he eventually decided to pursue art as a career. He applied to art schools and eventually was accepted to RISD and the Accademia di Belli Arti in Florence. He passed the entrance exam for the Accademia and began studying art there. He then moved to New York and worked freelance while writing a book on Lisp. He eventually started a company to put art galleries online, but it was unsuccessful. He then pivoted to creating software to build online stores, which eventually became successful. He had the idea to run the software on the server and let users control it by clicking on links, which meant users wouldn't need anything more than a browser. This kind of software, known as \"internet storefronts,\" was eventually successful. He and his team worked hard to make the software user-friendly and inexpensive, and eventually the company was bought by Yahoo. After the sale, he left to pursue his dream of painting, and eventually found success in New York. He was able to afford luxuries such as taxis and restaurants, and he experimented with a new kind of still life painting. He also had the idea to create a web app for making web apps, which he eventually pursued and was successful with. He then started Y Combinator, an investment firm that focused on helping startups, with his own money and the help of his friends Robert and Trevor. He wrote essays and books, invited undergrads to apply to the Summer Founders Program, and eventually married Jessica Livingston. After his mother's death, he decided to quit Y Combinator and pursue painting, but eventually ran out of steam and started writing essays and working on Lisp again. He wrote a new Lisp, called Bel, in itself in Arc, and it took him four years to complete. During this time, he worked hard to make the language user-friendly and precise, and he also took time to enjoy life with his family. He encountered various obstacles along the way, such as customs that constrained him even after the restrictions that caused them had disappeared, and he also had to deal with misinterpretations of his essays on forums. In the end, he was successful in creating Bel and was able to pursue his dream of painting."
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "display_response(list_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 24,
+            "id": "036077b7-108e-4026-9628-44c694343460",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 8 tokens\n",
+                        "> [retrieve] Total embedding token usage: 8 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 0 tokens\n",
+                        "> [get_response] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "query_engine = vector_index.as_query_engine()\n",
+                "vector_response = query_engine.query(\"What did the author do growing up?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 25,
+            "id": "42229e09",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**`Final Response:`** None"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "display_response(vector_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 26,
+            "id": "ecd7719c-f663-4edb-a239-d2a8f0a5c091",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.indices.keyword_table.retrievers:> Starting query: What did the author do after his time at YC?\n",
+                        "> Starting query: What did the author do after his time at YC?\n",
+                        "INFO:llama_index.indices.keyword_table.retrievers:query keywords: ['action', 'yc', 'after', 'time', 'author']\n",
+                        "query keywords: ['action', 'yc', 'after', 'time', 'author']\n",
+                        "INFO:llama_index.indices.keyword_table.retrievers:> Extracted keywords: ['yc', 'time']\n",
+                        "> Extracted keywords: ['yc', 'time']\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 10216 tokens\n",
+                        "> [get_response] Total LLM token usage: 10216 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "query_engine = keyword_table_index.as_query_engine()\n",
+                "keyword_response = query_engine.query(\"What did the author do after his time at YC?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 27,
+            "id": "37524641-2632-4a76-8ae6-00f1285256d9",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**`Final Response:`** After his time at YC, the author decided to pursue painting and writing. He wanted to see how good he could get if he really focused on it, so he started painting the day after he stopped working on YC. He spent most of the rest of 2014 painting and was able to become better than he had been before. He also wrote essays and started working on Lisp again in March 2015. He then spent 4 years working on a new Lisp, called Bel, which he wrote in itself in Arc. He had to ban himself from writing essays during most of this time, and he moved to England in the summer of 2016. He also wrote a book about Lisp hacking, called On Lisp, which was published in 1993. In the fall of 2019, Bel was finally finished. He also experimented with a new kind of still life painting, and tried to build a web app for making web apps, which he named Aspra. He eventually decided to build a subset of this app as an open source project, which was the new Lisp dialect he called Arc."
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "display_response(keyword_response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "ff58018c-3117-4d50-abff-16a1873eda9c",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "llama-index",
+            "language": "python",
+            "name": "llama-index"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.0"
+        }
     },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "/home/loganm/miniconda3/envs/llama-index/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
-      "  from .autonotebook import tqdm as notebook_tqdm\n"
-     ]
-    }
-   ],
-   "source": [
-    "from llama_index import (\n",
-    "    SimpleDirectoryReader,\n",
-    "    ServiceContext,\n",
-    "    LLMPredictor,\n",
-    "    StorageContext,\n",
-    ")\n",
-    "from llama_index import VectorStoreIndex, ListIndex, SimpleKeywordTableIndex\n",
-    "from llama_index.composability import ComposableGraph\n",
-    "from llama_index.llms import OpenAI\n",
-    "from llama_index.response.notebook_utils import display_response"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "f6dd9d5f-a601-4097-894e-fe98a0c35a5b",
-   "metadata": {},
-   "source": [
-    "#### Load Documents"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "e7cdaf9d-cfbd-4ced-8d4e-6eef8508224d",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "reader = SimpleDirectoryReader(\"../paul_graham_essay/data\")\n",
-    "documents = reader.load_data()"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "bae82b55-5c9f-432a-9e06-1fccb6f9fc7f",
-   "metadata": {},
-   "source": [
-    "#### Parse into Nodes"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "f97e558a-c29f-44ec-ab33-1f481da1a6ef",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.node_parser import SimpleNodeParser\n",
-    "\n",
-    "nodes = SimpleNodeParser.from_defaults().get_nodes_from_documents(documents)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "aff4c8e1-b2ba-4ea6-a8df-978c2788fedc",
-   "metadata": {},
-   "source": [
-    "#### Add to Docstore"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "id": "1514211c",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "REDIS_HOST = os.getenv(\"REDIS_HOST\", \"127.0.0.1\")\n",
-    "REDIS_PORT = os.getenv(\"REDIS_PORT\", 6379)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "1ba8b0da-67a8-4653-8cdb-09e39583a2d8",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "/home/loganm/miniconda3/envs/llama-index/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
-      "  from .autonotebook import tqdm as notebook_tqdm\n"
-     ]
-    }
-   ],
-   "source": [
-    "from llama_index.storage.docstore import RedisDocumentStore\n",
-    "from llama_index.storage.index_store import RedisIndexStore"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "id": "60e781d1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "storage_context = StorageContext.from_defaults(\n",
-    "    docstore=RedisDocumentStore.from_host_and_port(\n",
-    "        host=REDIS_HOST, port=REDIS_PORT, namespace=\"llama_index\"\n",
-    "    ),\n",
-    "    index_store=RedisIndexStore.from_host_and_port(\n",
-    "        host=REDIS_HOST, port=REDIS_PORT, namespace=\"llama_index\"\n",
-    "    ),\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "id": "e0b18789",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "storage_context.docstore.add_documents(nodes)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "id": "6c7a6877",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "20"
-      ]
-     },
-     "execution_count": 10,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "len(storage_context.docstore.docs)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "528149c1-5bde-4eba-b75a-e8fa1da17d7c",
-   "metadata": {},
-   "source": [
-    "#### Define Multiple Indexes\n",
-    "\n",
-    "Each index uses the same underlying Node."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "id": "316fb6ac-2031-4d17-9999-ffdb827f46d1",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "list_index = ListIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "id": "9440f405-fa75-4788-bc7c-11d021a0a17b",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 17050 tokens\n",
-      "> [build_index_from_nodes] Total embedding token usage: 17050 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "id": "364ef89f-4ba2-4b1a-b5e5-619e0e8420ef",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "keyword_table_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "id": "5c6b2141-fc77-4dec-891b-d4dad0633b35",
-   "metadata": {
-    "ExecuteTime": {
-     "end_time": "2023-06-26T15:02:41.461474Z",
-     "start_time": "2023-06-26T15:02:41.337117Z"
-    },
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "20"
-      ]
-     },
-     "execution_count": 15,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# NOTE: the docstore still has the same nodes\n",
-    "len(storage_context.docstore.docs)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "365a025b",
-   "metadata": {},
-   "source": [
-    "#### Test out saving and loading"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "id": "1b359a08",
-   "metadata": {
-    "ExecuteTime": {
-     "end_time": "2023-06-26T15:02:43.659294Z",
-     "start_time": "2023-06-26T15:02:43.606383Z"
-    }
-   },
-   "outputs": [],
-   "source": [
-    "# NOTE: docstore and index_store is persisted in Redis by default\n",
-    "# NOTE: here only need to persist simple vector store to disk\n",
-    "storage_context.persist(persist_dir=\"./storage\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "id": "84b3d2f4",
-   "metadata": {
-    "ExecuteTime": {
-     "end_time": "2023-06-26T15:02:48.342282Z",
-     "start_time": "2023-06-26T15:02:48.180258Z"
-    }
-   },
-   "outputs": [],
-   "source": [
-    "# note down index IDs\n",
-    "list_id = list_index.index_id\n",
-    "vector_id = vector_index.index_id\n",
-    "keyword_id = keyword_table_index.index_id"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "id": "1593ca1d",
-   "metadata": {
-    "ExecuteTime": {
-     "end_time": "2023-06-26T15:02:49.712732Z",
-     "start_time": "2023-06-26T15:02:49.476875Z"
-    }
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.indices.loading:Loading indices with ids: ['24e98f9b-9586-4fc6-8341-8dce895e5bcc']\n",
-      "Loading indices with ids: ['24e98f9b-9586-4fc6-8341-8dce895e5bcc']\n",
-      "INFO:llama_index.indices.loading:Loading indices with ids: ['f7b2aeb3-4dad-4750-8177-78d5ae706284']\n",
-      "Loading indices with ids: ['f7b2aeb3-4dad-4750-8177-78d5ae706284']\n",
-      "INFO:llama_index.indices.loading:Loading indices with ids: ['9a9198b4-7cb9-4c96-97a7-5f404f43b9cd']\n",
-      "Loading indices with ids: ['9a9198b4-7cb9-4c96-97a7-5f404f43b9cd']\n"
-     ]
-    }
-   ],
-   "source": [
-    "from llama_index.indices.loading import load_index_from_storage\n",
-    "\n",
-    "# re-create storage context\n",
-    "storage_context = StorageContext.from_defaults(\n",
-    "    docstore=RedisDocumentStore.from_host_and_port(\n",
-    "        host=REDIS_HOST, port=REDIS_PORT, namespace=\"llama_index\"\n",
-    "    ),\n",
-    "    index_store=RedisIndexStore.from_host_and_port(\n",
-    "        host=REDIS_HOST, port=REDIS_PORT, namespace=\"llama_index\"\n",
-    "    ),\n",
-    ")\n",
-    "\n",
-    "# load indices\n",
-    "list_index = load_index_from_storage(storage_context=storage_context, index_id=list_id)\n",
-    "vector_index = load_index_from_storage(\n",
-    "    storage_context=storage_context, index_id=vector_id\n",
-    ")\n",
-    "keyword_table_index = load_index_from_storage(\n",
-    "    storage_context=storage_context, index_id=keyword_id\n",
-    ")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "d3bf6aaf-3375-4212-8323-777969a918f7",
-   "metadata": {},
-   "source": [
-    "#### Test out some Queries"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "id": "9bba68f3-2743-437e-93b6-ce9ba92e40c3",
-   "metadata": {
-    "ExecuteTime": {
-     "end_time": "2023-06-26T15:02:55.782981Z",
-     "start_time": "2023-06-26T15:02:55.699883Z"
-    },
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")\n",
-    "service_context_chatgpt = ServiceContext.from_defaults(llm=chatgpt, chunk_size=1024)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "id": "544c0565-72a0-434b-98e5-83138ebdaa2b",
-   "metadata": {
-    "ExecuteTime": {
-     "end_time": "2023-06-26T15:03:15.143653Z",
-     "start_time": "2023-06-26T15:02:56.183558Z"
-    },
-    "scrolled": true
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 26111 tokens\n",
-      "> [get_response] Total LLM token usage: 26111 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "query_engine = list_index.as_query_engine()\n",
-    "list_response = query_engine.query(\"What is a summary of this document?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "id": "39d250be",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/markdown": [
-       "**`Final Response:`** This document is a narrative of the author's journey from writing and programming as a young person to pursuing a career in art. It describes his experiences in high school, college, and graduate school, and how he eventually decided to pursue art as a career. He applied to art schools and eventually was accepted to RISD and the Accademia di Belli Arti in Florence. He passed the entrance exam for the Accademia and began studying art there. He then moved to New York and worked freelance while writing a book on Lisp. He eventually started a company to put art galleries online, but it was unsuccessful. He then pivoted to creating software to build online stores, which eventually became successful. He had the idea to run the software on the server and let users control it by clicking on links, which meant users wouldn't need anything more than a browser. This kind of software, known as \"internet storefronts,\" was eventually successful. He and his team worked hard to make the software user-friendly and inexpensive, and eventually the company was bought by Yahoo. After the sale, he left to pursue his dream of painting, and eventually found success in New York. He was able to afford luxuries such as taxis and restaurants, and he experimented with a new kind of still life painting. He also had the idea to create a web app for making web apps, which he eventually pursued and was successful with. He then started Y Combinator, an investment firm that focused on helping startups, with his own money and the help of his friends Robert and Trevor. He wrote essays and books, invited undergrads to apply to the Summer Founders Program, and eventually married Jessica Livingston. After his mother's death, he decided to quit Y Combinator and pursue painting, but eventually ran out of steam and started writing essays and working on Lisp again. He wrote a new Lisp, called Bel, in itself in Arc, and it took him four years to complete. During this time, he worked hard to make the language user-friendly and precise, and he also took time to enjoy life with his family. He encountered various obstacles along the way, such as customs that constrained him even after the restrictions that caused them had disappeared, and he also had to deal with misinterpretations of his essays on forums. In the end, he was successful in creating Bel and was able to pursue his dream of painting."
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display_response(list_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "id": "036077b7-108e-4026-9628-44c694343460",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
-      "> [retrieve] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 8 tokens\n",
-      "> [retrieve] Total embedding token usage: 8 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 0 tokens\n",
-      "> [get_response] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "query_engine = vector_index.as_query_engine()\n",
-    "vector_response = query_engine.query(\"What did the author do growing up?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 25,
-   "id": "42229e09",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/markdown": [
-       "**`Final Response:`** None"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display_response(vector_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 26,
-   "id": "ecd7719c-f663-4edb-a239-d2a8f0a5c091",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.indices.keyword_table.retrievers:> Starting query: What did the author do after his time at YC?\n",
-      "> Starting query: What did the author do after his time at YC?\n",
-      "INFO:llama_index.indices.keyword_table.retrievers:query keywords: ['action', 'yc', 'after', 'time', 'author']\n",
-      "query keywords: ['action', 'yc', 'after', 'time', 'author']\n",
-      "INFO:llama_index.indices.keyword_table.retrievers:> Extracted keywords: ['yc', 'time']\n",
-      "> Extracted keywords: ['yc', 'time']\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 10216 tokens\n",
-      "> [get_response] Total LLM token usage: 10216 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "query_engine = keyword_table_index.as_query_engine()\n",
-    "keyword_response = query_engine.query(\"What did the author do after his time at YC?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 27,
-   "id": "37524641-2632-4a76-8ae6-00f1285256d9",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/markdown": [
-       "**`Final Response:`** After his time at YC, the author decided to pursue painting and writing. He wanted to see how good he could get if he really focused on it, so he started painting the day after he stopped working on YC. He spent most of the rest of 2014 painting and was able to become better than he had been before. He also wrote essays and started working on Lisp again in March 2015. He then spent 4 years working on a new Lisp, called Bel, which he wrote in itself in Arc. He had to ban himself from writing essays during most of this time, and he moved to England in the summer of 2016. He also wrote a book about Lisp hacking, called On Lisp, which was published in 1993. In the fall of 2019, Bel was finally finished. He also experimented with a new kind of still life painting, and tried to build a web app for making web apps, which he named Aspra. He eventually decided to build a subset of this app as an open source project, which was the new Lisp dialect he called Arc."
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display_response(keyword_response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "ff58018c-3117-4d50-abff-16a1873eda9c",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "llama-index",
-   "language": "python",
-   "name": "llama-index"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.0"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/llm/XinferenceLocalDeployment.ipynb b/docs/examples/llm/XinferenceLocalDeployment.ipynb
index 472b004960ec722a654aedcff2e28309cb8c49fd..f6802e48b95c2eb4201fa8e5fa2c8e80cd56e7de 100644
--- a/docs/examples/llm/XinferenceLocalDeployment.ipynb
+++ b/docs/examples/llm/XinferenceLocalDeployment.ipynb
@@ -1,241 +1,241 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "7096589b-daaf-440a-b89d-b4956f2db4b2",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "# Xorbits Inference\n",
-    "\n",
-    "In this demo notebook, we show how to use Xorbits Inference (Xinference for short) to deploy local LLMs in three steps.\n",
-    "\n",
-    "We will be using the Llama 2 chat model in GGML format in the example, but the code should be easily transfrerable to all LLM chat models supported by Xinference. Below are a few examples:\n",
-    "\n",
-    "| Name          | Type             | Language | Format  | Size (in billions) | Quantization                            |\n",
-    "|---------------|------------------|----------|---------|--------------------|-----------------------------------------|\n",
-    "| llama-2-chat  | RLHF Model       | en       | ggmlv3  | 7, 13, 70          | 'q2_K', 'q3_K_L', ... , 'q6_K', 'q8_0'  |\n",
-    "| chatglm       | SFT Model        | en, zh   | ggmlv3  | 6                  | 'q4_0', 'q4_1', 'q5_0', 'q5_1', 'q8_0'  |\n",
-    "| chatglm2      | SFT Model        | en, zh   | ggmlv3  | 6                  | 'q4_0', 'q4_1', 'q5_0', 'q5_1', 'q8_0'  |\n",
-    "| wizardlm-v1.0 | SFT Model        | en       | ggmlv3  | 7, 13, 33          | 'q2_K', 'q3_K_L', ... , 'q6_K', 'q8_0'  |\n",
-    "| wizardlm-v1.1 | SFT Model        | en       | ggmlv3  | 13                 | 'q2_K', 'q3_K_L', ... , 'q6_K', 'q8_0'  |\n",
-    "| vicuna-v1.3   | SFT Model        | en       | ggmlv3  | 7, 13              | 'q2_K', 'q3_K_L', ... , 'q6_K', 'q8_0'  |\n",
-    "\n",
-    "The latest complete list of supported models can be found in Xorbits Inference's [official GitHub page](https://github.com/xorbitsai/inference/blob/main/README.md)."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "d8cfbe6f-4c50-4c4f-90f9-03bb91201ef5",
-   "metadata": {},
-   "source": [
-    "## <span style=\"font-size: xx-large;;\">🤖  </span> Install Xinference\n",
-    "\n",
-    "i. Run `pip install \"xinference[all]\"` in a terminal window.\n",
-    "\n",
-    "ii. After installation is complete, restart this jupyter notebook.\n",
-    "\n",
-    "iii. Run `xinference` in a new terminal window.\n",
-    "\n",
-    "iv. You should see something similar to the following output:\n",
-    "\n",
-    "```\n",
-    "INFO:xinference:Xinference successfully started. Endpoint: http://127.0.0.1:9997\n",
-    "INFO:xinference.core.service:Worker 127.0.0.1:21561 has been added successfully\n",
-    "INFO:xinference.deploy.worker:Xinference worker successfully started.\n",
-    "```\n",
-    "\n",
-    "v. In the endpoint description, locate the endpoint port number after the colon. In the above case it is `9997`.\n",
-    "\n",
-    "vi. Set the port number with the following cell:"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "5d520d56",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "port = 9997  # replace with your endpoint port number"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "93139076",
-   "metadata": {},
-   "source": [
-    "## <span style=\"font-size: xx-large;;\">🚀  </span> Launch Local Models\n",
-    "\n",
-    "In this step, we begin with importing the relevant libraries from `llama_index`"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "fd1d259c",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# If Xinference can not be imported, you may need to restart jupyter notebook\n",
-    "from llama_index import (\n",
-    "    ListIndex,\n",
-    "    TreeIndex,\n",
-    "    VectorStoreIndex,\n",
-    "    KeywordTableIndex,\n",
-    "    KnowledgeGraphIndex,\n",
-    "    SimpleDirectoryReader,\n",
-    "    ServiceContext,\n",
-    ")\n",
-    "from llama_index.llms import Xinference\n",
-    "from xinference.client import RESTfulClient\n",
-    "from IPython.display import Markdown, display"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "7a2dce47",
-   "metadata": {},
-   "source": [
-    "Then, we launch a model and use it to create a service context. This allows us to connect the model to documents and queries in later steps.\n",
-    "\n",
-    "Feel free to change the parameters for better performance! In order to achieve optimal results, it is recommended to use models above 13B in size. That being said, 7B models is more than enough for this short demo.\n",
-    "\n",
-    "Here are some more parameter options for the Llama 2 chat model in GGML format, listed from the least space-consuming to the most resource-intensive but high-performing. \n",
-    "\n",
-    "\n",
-    "<span style=\"font-weight: bold; ;\">model_size_in_billions:</span> \n",
-    "\n",
-    "`7`, `13`, `70`\n",
-    "\n",
-    "<span style=\"font-weight: bold; ;\">quantization for 7B and 13B models:</span> \n",
-    "\n",
-    "`q2_K`, `q3_K_L`, `q3_K_M`, `q3_K_S`, `q4_0`, `q4_1`, `q4_K_M`, `q4_K_S`, `q5_0`, `q5_1`, `q5_K_M`, `q5_K_S`, `q6_K`, `q8_0`\n",
-    "\n",
-    "<span style=\"font-weight: bold; ;\">quantizations for 70B models:</span>\n",
-    "\n",
-    "`q4_0`\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "b48c6d7a-7a38-440b-8ecb-f43f9050ee54",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Define a client to send commands to xinference\n",
-    "client = RESTfulClient(f\"http://localhost:{port}\")\n",
-    "\n",
-    "# Download and Launch a model, this may take a while the first time\n",
-    "model_uid = client.launch_model(\n",
-    "    model_name=\"llama-2-chat\",\n",
-    "    model_size_in_billions=7,\n",
-    "    model_format=\"ggmlv3\",\n",
-    "    quantization=\"q2_K\",\n",
-    ")\n",
-    "\n",
-    "# Initiate Xinference object to use the LLM\n",
-    "llm = Xinference(\n",
-    "    endpoint=f\"http://localhost:{port}\",\n",
-    "    model_uid=model_uid,\n",
-    "    temperature=0.0,\n",
-    "    max_tokens=512,\n",
-    ")\n",
-    "\n",
-    "service_context = ServiceContext.from_defaults(llm=llm)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "094a02b7",
-   "metadata": {},
-   "source": [
-    "## <span style=\"font-size: xx-large;;\">🕺  </span> Index the Data... and Chat!\n",
-    "\n",
-    "In this step, we combine the model and the data to create a query engine. The query engine can then be used as a chat bot, answering our queries based on the given data.\n",
-    "\n",
-    "We will be using `VetorStoreIndex` since it is relatively fast. That being said, feel free to change the index for different experiences. Here are some available indexes already imported from the previous step:\n",
-    "\n",
-    "`ListIndex`, `TreeIndex`, `VetorStoreIndex`, `KeywordTableIndex`, `KnowledgeGraphIndex`\n",
-    "\n",
-    "To change index, simply replace `VetorStoreIndex` with another index in the following code. \n",
-    "\n",
-    "The latest complete list of all available indexes can be found in Llama Index's [official Docs](https://gpt-index.readthedocs.io/en/latest/core_modules/data_modules/index/modules.html)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "708b323e-d314-4b83-864b-22a1ead60de9",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# create index from the data\n",
-    "documents = SimpleDirectoryReader(\"../data/paul_graham\").load_data()\n",
-    "\n",
-    "# change index name in the following line\n",
-    "index = VectorStoreIndex.from_documents(\n",
-    "    documents=documents, service_context=service_context\n",
-    ")\n",
-    "\n",
-    "# create the query engine\n",
-    "query_engine = index.as_query_engine()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "548174a2",
-   "metadata": {},
-   "source": [
-    "We can optionally set the temperature and the max answer length (in tokens) directly through the `Xinference` object before asking a question. This allows us to change parameters for different questions without rebuilding the query engine every time.\n",
-    "\n",
-    "`temperature` is a number between 0 and 1 that controls the randomness of responses. Higher values increase creativity but may lead to off-topic replies. Setting to zero guarentees the same response every time.\n",
-    "\n",
-    "`max_tokens` is an integer that sets an upper bound for the response length. Increase it if answers seem cut off, but be aware that too long a response may exceed the context window and cause errors.\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e0b32ddb",
-   "metadata": {
-    "scrolled": false
-   },
-   "outputs": [],
-   "source": [
-    "# optionally, update the temperature and max answer length (in tokens)\n",
-    "llm.__dict__.update({\"temperature\": 0.0})\n",
-    "llm.__dict__.update({\"max_tokens\": 2048})\n",
-    "\n",
-    "# ask a question and display the answer\n",
-    "question = \"What did the author do after his time at Y Combinator?\"\n",
-    "\n",
-    "response = query_engine.query(question)\n",
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "7096589b-daaf-440a-b89d-b4956f2db4b2",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "# Xorbits Inference\n",
+                "\n",
+                "In this demo notebook, we show how to use Xorbits Inference (Xinference for short) to deploy local LLMs in three steps.\n",
+                "\n",
+                "We will be using the Llama 2 chat model in GGML format in the example, but the code should be easily transfrerable to all LLM chat models supported by Xinference. Below are a few examples:\n",
+                "\n",
+                "| Name          | Type             | Language | Format  | Size (in billions) | Quantization                            |\n",
+                "|---------------|------------------|----------|---------|--------------------|-----------------------------------------|\n",
+                "| llama-2-chat  | RLHF Model       | en       | ggmlv3  | 7, 13, 70          | 'q2_K', 'q3_K_L', ... , 'q6_K', 'q8_0'  |\n",
+                "| chatglm       | SFT Model        | en, zh   | ggmlv3  | 6                  | 'q4_0', 'q4_1', 'q5_0', 'q5_1', 'q8_0'  |\n",
+                "| chatglm2      | SFT Model        | en, zh   | ggmlv3  | 6                  | 'q4_0', 'q4_1', 'q5_0', 'q5_1', 'q8_0'  |\n",
+                "| wizardlm-v1.0 | SFT Model        | en       | ggmlv3  | 7, 13, 33          | 'q2_K', 'q3_K_L', ... , 'q6_K', 'q8_0'  |\n",
+                "| wizardlm-v1.1 | SFT Model        | en       | ggmlv3  | 13                 | 'q2_K', 'q3_K_L', ... , 'q6_K', 'q8_0'  |\n",
+                "| vicuna-v1.3   | SFT Model        | en       | ggmlv3  | 7, 13              | 'q2_K', 'q3_K_L', ... , 'q6_K', 'q8_0'  |\n",
+                "\n",
+                "The latest complete list of supported models can be found in Xorbits Inference's [official GitHub page](https://github.com/xorbitsai/inference/blob/main/README.md)."
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "d8cfbe6f-4c50-4c4f-90f9-03bb91201ef5",
+            "metadata": {},
+            "source": [
+                "## <span style=\"font-size: xx-large;;\">🤖  </span> Install Xinference\n",
+                "\n",
+                "i. Run `pip install \"xinference[all]\"` in a terminal window.\n",
+                "\n",
+                "ii. After installation is complete, restart this jupyter notebook.\n",
+                "\n",
+                "iii. Run `xinference` in a new terminal window.\n",
+                "\n",
+                "iv. You should see something similar to the following output:\n",
+                "\n",
+                "```\n",
+                "INFO:xinference:Xinference successfully started. Endpoint: http://127.0.0.1:9997\n",
+                "INFO:xinference.core.service:Worker 127.0.0.1:21561 has been added successfully\n",
+                "INFO:xinference.deploy.worker:Xinference worker successfully started.\n",
+                "```\n",
+                "\n",
+                "v. In the endpoint description, locate the endpoint port number after the colon. In the above case it is `9997`.\n",
+                "\n",
+                "vi. Set the port number with the following cell:"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "5d520d56",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "port = 9997  # replace with your endpoint port number"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "93139076",
+            "metadata": {},
+            "source": [
+                "## <span style=\"font-size: xx-large;;\">🚀  </span> Launch Local Models\n",
+                "\n",
+                "In this step, we begin with importing the relevant libraries from `llama_index`"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "fd1d259c",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# If Xinference can not be imported, you may need to restart jupyter notebook\n",
+                "from llama_index import (\n",
+                "    SummaryIndex,\n",
+                "    TreeIndex,\n",
+                "    VectorStoreIndex,\n",
+                "    KeywordTableIndex,\n",
+                "    KnowledgeGraphIndex,\n",
+                "    SimpleDirectoryReader,\n",
+                "    ServiceContext,\n",
+                ")\n",
+                "from llama_index.llms import Xinference\n",
+                "from xinference.client import RESTfulClient\n",
+                "from IPython.display import Markdown, display"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "7a2dce47",
+            "metadata": {},
+            "source": [
+                "Then, we launch a model and use it to create a service context. This allows us to connect the model to documents and queries in later steps.\n",
+                "\n",
+                "Feel free to change the parameters for better performance! In order to achieve optimal results, it is recommended to use models above 13B in size. That being said, 7B models is more than enough for this short demo.\n",
+                "\n",
+                "Here are some more parameter options for the Llama 2 chat model in GGML format, listed from the least space-consuming to the most resource-intensive but high-performing. \n",
+                "\n",
+                "\n",
+                "<span style=\"font-weight: bold; ;\">model_size_in_billions:</span> \n",
+                "\n",
+                "`7`, `13`, `70`\n",
+                "\n",
+                "<span style=\"font-weight: bold; ;\">quantization for 7B and 13B models:</span> \n",
+                "\n",
+                "`q2_K`, `q3_K_L`, `q3_K_M`, `q3_K_S`, `q4_0`, `q4_1`, `q4_K_M`, `q4_K_S`, `q5_0`, `q5_1`, `q5_K_M`, `q5_K_S`, `q6_K`, `q8_0`\n",
+                "\n",
+                "<span style=\"font-weight: bold; ;\">quantizations for 70B models:</span>\n",
+                "\n",
+                "`q4_0`\n"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "b48c6d7a-7a38-440b-8ecb-f43f9050ee54",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# Define a client to send commands to xinference\n",
+                "client = RESTfulClient(f\"http://localhost:{port}\")\n",
+                "\n",
+                "# Download and Launch a model, this may take a while the first time\n",
+                "model_uid = client.launch_model(\n",
+                "    model_name=\"llama-2-chat\",\n",
+                "    model_size_in_billions=7,\n",
+                "    model_format=\"ggmlv3\",\n",
+                "    quantization=\"q2_K\",\n",
+                ")\n",
+                "\n",
+                "# Initiate Xinference object to use the LLM\n",
+                "llm = Xinference(\n",
+                "    endpoint=f\"http://localhost:{port}\",\n",
+                "    model_uid=model_uid,\n",
+                "    temperature=0.0,\n",
+                "    max_tokens=512,\n",
+                ")\n",
+                "\n",
+                "service_context = ServiceContext.from_defaults(llm=llm)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "094a02b7",
+            "metadata": {},
+            "source": [
+                "## <span style=\"font-size: xx-large;;\">🕺  </span> Index the Data... and Chat!\n",
+                "\n",
+                "In this step, we combine the model and the data to create a query engine. The query engine can then be used as a chat bot, answering our queries based on the given data.\n",
+                "\n",
+                "We will be using `VetorStoreIndex` since it is relatively fast. That being said, feel free to change the index for different experiences. Here are some available indexes already imported from the previous step:\n",
+                "\n",
+                "`ListIndex`, `TreeIndex`, `VetorStoreIndex`, `KeywordTableIndex`, `KnowledgeGraphIndex`\n",
+                "\n",
+                "To change index, simply replace `VetorStoreIndex` with another index in the following code. \n",
+                "\n",
+                "The latest complete list of all available indexes can be found in Llama Index's [official Docs](https://gpt-index.readthedocs.io/en/latest/core_modules/data_modules/index/modules.html)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "708b323e-d314-4b83-864b-22a1ead60de9",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# create index from the data\n",
+                "documents = SimpleDirectoryReader(\"../data/paul_graham\").load_data()\n",
+                "\n",
+                "# change index name in the following line\n",
+                "index = VectorStoreIndex.from_documents(\n",
+                "    documents=documents, service_context=service_context\n",
+                ")\n",
+                "\n",
+                "# create the query engine\n",
+                "query_engine = index.as_query_engine()"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "548174a2",
+            "metadata": {},
+            "source": [
+                "We can optionally set the temperature and the max answer length (in tokens) directly through the `Xinference` object before asking a question. This allows us to change parameters for different questions without rebuilding the query engine every time.\n",
+                "\n",
+                "`temperature` is a number between 0 and 1 that controls the randomness of responses. Higher values increase creativity but may lead to off-topic replies. Setting to zero guarentees the same response every time.\n",
+                "\n",
+                "`max_tokens` is an integer that sets an upper bound for the response length. Increase it if answers seem cut off, but be aware that too long a response may exceed the context window and cause errors.\n"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e0b32ddb",
+            "metadata": {
+                "scrolled": false
+            },
+            "outputs": [],
+            "source": [
+                "# optionally, update the temperature and max answer length (in tokens)\n",
+                "llm.__dict__.update({\"temperature\": 0.0})\n",
+                "llm.__dict__.update({\"max_tokens\": 2048})\n",
+                "\n",
+                "# ask a question and display the answer\n",
+                "question = \"What did the author do after his time at Y Combinator?\"\n",
+                "\n",
+                "response = query_engine.query(question)\n",
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.3"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/docs/examples/node_postprocessor/RecencyPostprocessorDemo.ipynb b/docs/examples/node_postprocessor/RecencyPostprocessorDemo.ipynb
index 71a5f8a67fb45121a2112eb19ee8bb27c75e39b3..173ef1329662c9b117f92e5536f6bc542e80c41f 100644
--- a/docs/examples/node_postprocessor/RecencyPostprocessorDemo.ipynb
+++ b/docs/examples/node_postprocessor/RecencyPostprocessorDemo.ipynb
@@ -278,7 +278,7 @@
    },
    "outputs": [],
    "source": [
-    "from llama_index import ListIndex"
+    "from llama_index import SummaryIndex"
    ]
   },
   {
@@ -338,7 +338,7 @@
     }
    ],
    "source": [
-    "list_index = ListIndex(resp_nodes)\n",
+    "list_index = SummaryIndex(resp_nodes)\n",
     "query_engine = list_index.as_query_engine(node_postprocessors=[node_postprocessor])\n",
     "response = query_engine.query(query_str)"
    ]
diff --git a/docs/examples/node_postprocessor/TimeWeightedPostprocessorDemo.ipynb b/docs/examples/node_postprocessor/TimeWeightedPostprocessorDemo.ipynb
index 3890400b5b4478312218ae30a79c560c2bfe9634..1d8ec990ea0eedae0fd41765a5f7a9903d650e2d 100644
--- a/docs/examples/node_postprocessor/TimeWeightedPostprocessorDemo.ipynb
+++ b/docs/examples/node_postprocessor/TimeWeightedPostprocessorDemo.ipynb
@@ -272,7 +272,7 @@
    },
    "outputs": [],
    "source": [
-    "from llama_index import ListIndex"
+    "from llama_index import SummaryIndex"
    ]
   },
   {
@@ -315,7 +315,7 @@
     "# get the post-processed nodes -- which should be the top-1 sorted by date\n",
     "new_resp_nodes = node_postprocessor.postprocess_nodes(resp_nodes)\n",
     "\n",
-    "list_index = ListIndex([n.node for n in new_resp_nodes])\n",
+    "list_index = SummaryIndex([n.node for n in new_resp_nodes])\n",
     "query_engine = list_index.as_query_engine()\n",
     "response = query_engine.query(query_str)"
    ]
diff --git a/docs/examples/query_engine/RetrieverRouterQueryEngine.ipynb b/docs/examples/query_engine/RetrieverRouterQueryEngine.ipynb
index 8dc849e7d9b1019cfaf64cb08373dfd1e86b9b3a..8824b867cdec1a2efc7d87a41999f6d59e0755dd 100644
--- a/docs/examples/query_engine/RetrieverRouterQueryEngine.ipynb
+++ b/docs/examples/query_engine/RetrieverRouterQueryEngine.ipynb
@@ -1,442 +1,442 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Retriever Router Query Engine\n",
-    "In this tutorial, we define a router query engine based on a retriever. The retriever will select a set of nodes, and we will in turn select the right QueryEngine.\n",
-    "\n",
-    "We use our new `ToolRetrieverRouterQueryEngine` class for this! "
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Setup"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# NOTE: This is ONLY necessary in jupyter notebook.\n",
-    "# Details: Jupyter runs an event-loop behind the scenes.\n",
-    "#          This results in nested event-loops when we start an event-loop to make async queries.\n",
-    "#          This is normally not allowed, we use nest_asyncio to allow it for convenience.\n",
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:numexpr.utils:Note: NumExpr detected 12 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
-      "Note: NumExpr detected 12 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
-      "INFO:numexpr.utils:NumExpr defaulting to 8 threads.\n",
-      "NumExpr defaulting to 8 threads.\n"
-     ]
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "# Retriever Router Query Engine\n",
+                "In this tutorial, we define a router query engine based on a retriever. The retriever will select a set of nodes, and we will in turn select the right QueryEngine.\n",
+                "\n",
+                "We use our new `ToolRetrieverRouterQueryEngine` class for this! "
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### Setup"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# NOTE: This is ONLY necessary in jupyter notebook.\n",
+                "# Details: Jupyter runs an event-loop behind the scenes.\n",
+                "#          This results in nested event-loops when we start an event-loop to make async queries.\n",
+                "#          This is normally not allowed, we use nest_asyncio to allow it for convenience.\n",
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:numexpr.utils:Note: NumExpr detected 12 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
+                        "Note: NumExpr detected 12 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
+                        "INFO:numexpr.utils:NumExpr defaulting to 8 threads.\n",
+                        "NumExpr defaulting to 8 threads.\n"
+                    ]
+                },
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
+                        "  from .autonotebook import tqdm as notebook_tqdm\n"
+                    ]
+                }
+            ],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
+                "\n",
+                "from llama_index import (\n",
+                "    VectorStoreIndex,\n",
+                "    SummaryIndex,\n",
+                "    SimpleDirectoryReader,\n",
+                "    ServiceContext,\n",
+                "    StorageContext,\n",
+                ")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### Load Data\n",
+                "\n",
+                "We first show how to convert a Document into a set of Nodes, and insert into a DocumentStore."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# load documents\n",
+                "documents = SimpleDirectoryReader(\"../data/paul_graham\").load_data()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# initialize service context (set chunk size)\n",
+                "service_context = ServiceContext.from_defaults(chunk_size=1024)\n",
+                "nodes = service_context.node_parser.get_nodes_from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 5,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# initialize storage context (by default it's in-memory)\n",
+                "storage_context = StorageContext.from_defaults()\n",
+                "storage_context.docstore.add_documents(nodes)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### Define List Index and Vector Index over Same Data "
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 6,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 17038 tokens\n",
+                        "> [build_index_from_nodes] Total embedding token usage: 17038 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "list_index = SummaryIndex(nodes, storage_context=storage_context)\n",
+                "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### Define Query Engine and Tool for these Indices\n",
+                "\n",
+                "We define a Query Engine for each Index. We then wrap these with our `QueryEngineTool`."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 7,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.tools.query_engine import QueryEngineTool\n",
+                "\n",
+                "list_query_engine = list_index.as_query_engine(\n",
+                "    response_mode=\"tree_summarize\", use_async=True\n",
+                ")\n",
+                "vector_query_engine = vector_index.as_query_engine(\n",
+                "    response_mode=\"tree_summarize\", use_async=True\n",
+                ")\n",
+                "\n",
+                "list_tool = QueryEngineTool.from_defaults(\n",
+                "    query_engine=list_query_engine,\n",
+                "    description=\"Useful for questions asking for a biography of the author.\",\n",
+                ")\n",
+                "vector_tool = QueryEngineTool.from_defaults(\n",
+                "    query_engine=vector_query_engine,\n",
+                "    description=\"Useful for retrieving specific snippets from the author's life, like his time in college, his time in YC, or more.\",\n",
+                ")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### Define Retrieval-Augmented Router Query Engine\n",
+                "\n",
+                "We define a router query engine that's augmented with a retrieval mechanism, to help deal with the case when the set of choices is too large. \n",
+                "\n",
+                "To do this, we first define an `ObjectIndex` over the set of query engine tools. The `ObjectIndex` is defined an underlying index data structure (e.g. a vector index, keyword index), and can serialize QueryEngineTool objects to/from our indices.\n",
+                "\n",
+                "We then use our `ToolRetrieverRouterQueryEngine` class, and pass in an `ObjectRetriever` over `QueryEngineTool` objects.\n",
+                "The `ObjectRetriever` corresponds to our `ObjectIndex`. \n",
+                "\n",
+                "This retriever can then dyamically retrieve the relevant query engines during query-time. This allows us to pass in an arbitrary number of query engine tools without worrying about prompt limitations. "
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 8,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 59 tokens\n",
+                        "> [build_index_from_nodes] Total embedding token usage: 59 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "from llama_index import VectorStoreIndex\n",
+                "from llama_index.objects import ObjectIndex, SimpleToolNodeMapping\n",
+                "\n",
+                "tool_mapping = SimpleToolNodeMapping.from_objects([list_tool, vector_tool])\n",
+                "obj_index = ObjectIndex.from_objects(\n",
+                "    [list_tool, vector_tool],\n",
+                "    tool_mapping,\n",
+                "    VectorStoreIndex,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 9,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.query_engine import ToolRetrieverRouterQueryEngine\n",
+                "\n",
+                "query_engine = ToolRetrieverRouterQueryEngine(obj_index.as_retriever())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 10,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 10 tokens\n",
+                        "> [retrieve] Total embedding token usage: 10 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 0 tokens\n",
+                        "> [retrieve] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 2111 tokens\n",
+                        "> [get_response] Total LLM token usage: 2111 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 0 tokens\n",
+                        "> [retrieve] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 2148 tokens\n",
+                        "> [get_response] Total LLM token usage: 2148 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.query_engine.router_query_engine:Combining responses from multiple query engines.\n",
+                        "Combining responses from multiple query engines.\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 1063 tokens\n",
+                        "> [get_response] Total LLM token usage: 1063 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "response = query_engine.query(\"What is a biography of the author's life?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "The author is a creative person who has had a varied and interesting life. They grew up in the US and went to college, but then decided to take a break and pursue their passion for art. They applied to two art schools, RISD in the US and the Accademia di Belli Arti in Florence, and were accepted to both. They chose to go to Florence, where they took the entrance exam and passed. They then spent a year living in Florence, studying art at the Accademia and painting still lives in their bedroom. After their year in Florence, the author returned to the US and completed their BFA program at RISD. They then went on to pursue a PhD in computer science at MIT, where they wrote a dissertation on the evolution of computers. During their time at MIT, they also did consulting work and wrote essays on topics they had been thinking about. After completing their PhD, the author started a software company, Viaweb, which was eventually acquired by Yahoo. They then went on to write essays and articles about their experiences in the tech industry. They also wrote an essay about how to choose what to work on, which was based on their own experience. The author then moved back to Florence, where they found a rent-stabilized apartment and continued to pursue their interest in art. They wrote about their experiences in the art world, and experienced the reactions of readers to their essays. The author is now a successful writer and continues to write essays and articles about topics they are passionate about. \n",
+                        "\n",
+                        "In summary, the author's life has been a journey of exploration and creativity. They have experienced a wide range of different things in their life, from art school to computer science to the tech industry, and have used their experiences to inform their writing. They have pursued their passion for art, and have used their knowledge and experience to create meaningful work.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(str(response))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 12,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "\"\\nThe author is a creative person who has had a varied and interesting life. They grew up in the US and went to college, but then decided to take a break and pursue their passion for art. They applied to two art schools, RISD in the US and the Accademia di Belli Arti in Florence, and were accepted to both. They chose to go to Florence, where they took the entrance exam and passed. They then spent a year living in Florence, studying art at the Accademia and painting still lives in their bedroom. After their year in Florence, the author returned to the US and completed their BFA program at RISD. They then went on to pursue a PhD in computer science at MIT, where they wrote a dissertation on the evolution of computers. During their time at MIT, they also did consulting work and wrote essays on topics they had been thinking about. After completing their PhD, the author started a software company, Viaweb, which was eventually acquired by Yahoo. They then went on to write essays and articles about their experiences in the tech industry. They also wrote an essay about how to choose what to work on, which was based on their own experience. The author then moved back to Florence, where they found a rent-stabilized apartment and continued to pursue their interest in art. They wrote about their experiences in the art world, and experienced the reactions of readers to their essays. The author is now a successful writer and continues to write essays and articles about topics they are passionate about. \\n\\nIn summary, the author's life has been a journey of exploration and creativity. They have experienced a wide range of different things in their life, from art school to computer science to the tech industry, and have used their experiences to inform their writing. They have pursued their passion for art, and have used their knowledge and experience to create meaningful work.\""
+                        ]
+                    },
+                    "execution_count": 12,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "response"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 13,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 11 tokens\n",
+                        "> [retrieve] Total embedding token usage: 11 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 0 tokens\n",
+                        "> [retrieve] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 1947 tokens\n",
+                        "> [get_response] Total LLM token usage: 1947 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 0 tokens\n",
+                        "> [retrieve] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 1947 tokens\n",
+                        "> [get_response] Total LLM token usage: 1947 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n",
+                        "INFO:llama_index.query_engine.router_query_engine:Combining responses from multiple query engines.\n",
+                        "Combining responses from multiple query engines.\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 316 tokens\n",
+                        "> [get_response] Total LLM token usage: 316 tokens\n",
+                        "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "response = query_engine.query(\"What did Paul Graham do during his time in college?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 14,
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "Paul Graham studied philosophy in college, but he did not pursue AI. He continued to work on programming outside of school, writing simple games, a program to predict how high his model rockets would fly, and a word processor. He eventually convinced his father to buy him a TRS-80 computer, which he used to further his programming skills.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(str(response))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "llama_index_v2",
+            "language": "python",
+            "name": "llama_index_v2"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.10"
+        }
     },
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
-      "  from .autonotebook import tqdm as notebook_tqdm\n"
-     ]
-    }
-   ],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
-    "\n",
-    "from llama_index import (\n",
-    "    VectorStoreIndex,\n",
-    "    ListIndex,\n",
-    "    SimpleDirectoryReader,\n",
-    "    ServiceContext,\n",
-    "    StorageContext,\n",
-    ")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Load Data\n",
-    "\n",
-    "We first show how to convert a Document into a set of Nodes, and insert into a DocumentStore."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# load documents\n",
-    "documents = SimpleDirectoryReader(\"../data/paul_graham\").load_data()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# initialize service context (set chunk size)\n",
-    "service_context = ServiceContext.from_defaults(chunk_size=1024)\n",
-    "nodes = service_context.node_parser.get_nodes_from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# initialize storage context (by default it's in-memory)\n",
-    "storage_context = StorageContext.from_defaults()\n",
-    "storage_context.docstore.add_documents(nodes)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Define List Index and Vector Index over Same Data "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 17038 tokens\n",
-      "> [build_index_from_nodes] Total embedding token usage: 17038 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "list_index = ListIndex(nodes, storage_context=storage_context)\n",
-    "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Define Query Engine and Tool for these Indices\n",
-    "\n",
-    "We define a Query Engine for each Index. We then wrap these with our `QueryEngineTool`."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.tools.query_engine import QueryEngineTool\n",
-    "\n",
-    "list_query_engine = list_index.as_query_engine(\n",
-    "    response_mode=\"tree_summarize\", use_async=True\n",
-    ")\n",
-    "vector_query_engine = vector_index.as_query_engine(\n",
-    "    response_mode=\"tree_summarize\", use_async=True\n",
-    ")\n",
-    "\n",
-    "list_tool = QueryEngineTool.from_defaults(\n",
-    "    query_engine=list_query_engine,\n",
-    "    description=\"Useful for questions asking for a biography of the author.\",\n",
-    ")\n",
-    "vector_tool = QueryEngineTool.from_defaults(\n",
-    "    query_engine=vector_query_engine,\n",
-    "    description=\"Useful for retrieving specific snippets from the author's life, like his time in college, his time in YC, or more.\",\n",
-    ")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Define Retrieval-Augmented Router Query Engine\n",
-    "\n",
-    "We define a router query engine that's augmented with a retrieval mechanism, to help deal with the case when the set of choices is too large. \n",
-    "\n",
-    "To do this, we first define an `ObjectIndex` over the set of query engine tools. The `ObjectIndex` is defined an underlying index data structure (e.g. a vector index, keyword index), and can serialize QueryEngineTool objects to/from our indices.\n",
-    "\n",
-    "We then use our `ToolRetrieverRouterQueryEngine` class, and pass in an `ObjectRetriever` over `QueryEngineTool` objects.\n",
-    "The `ObjectRetriever` corresponds to our `ObjectIndex`. \n",
-    "\n",
-    "This retriever can then dyamically retrieve the relevant query engines during query-time. This allows us to pass in an arbitrary number of query engine tools without worrying about prompt limitations. "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [build_index_from_nodes] Total embedding token usage: 59 tokens\n",
-      "> [build_index_from_nodes] Total embedding token usage: 59 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "from llama_index import VectorStoreIndex\n",
-    "from llama_index.objects import ObjectIndex, SimpleToolNodeMapping\n",
-    "\n",
-    "tool_mapping = SimpleToolNodeMapping.from_objects([list_tool, vector_tool])\n",
-    "obj_index = ObjectIndex.from_objects(\n",
-    "    [list_tool, vector_tool],\n",
-    "    tool_mapping,\n",
-    "    VectorStoreIndex,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.query_engine import ToolRetrieverRouterQueryEngine\n",
-    "\n",
-    "query_engine = ToolRetrieverRouterQueryEngine(obj_index.as_retriever())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
-      "> [retrieve] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 10 tokens\n",
-      "> [retrieve] Total embedding token usage: 10 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
-      "> [retrieve] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 0 tokens\n",
-      "> [retrieve] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 2111 tokens\n",
-      "> [get_response] Total LLM token usage: 2111 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
-      "> [retrieve] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 0 tokens\n",
-      "> [retrieve] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 2148 tokens\n",
-      "> [get_response] Total LLM token usage: 2148 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.query_engine.router_query_engine:Combining responses from multiple query engines.\n",
-      "Combining responses from multiple query engines.\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 1063 tokens\n",
-      "> [get_response] Total LLM token usage: 1063 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "response = query_engine.query(\"What is a biography of the author's life?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "The author is a creative person who has had a varied and interesting life. They grew up in the US and went to college, but then decided to take a break and pursue their passion for art. They applied to two art schools, RISD in the US and the Accademia di Belli Arti in Florence, and were accepted to both. They chose to go to Florence, where they took the entrance exam and passed. They then spent a year living in Florence, studying art at the Accademia and painting still lives in their bedroom. After their year in Florence, the author returned to the US and completed their BFA program at RISD. They then went on to pursue a PhD in computer science at MIT, where they wrote a dissertation on the evolution of computers. During their time at MIT, they also did consulting work and wrote essays on topics they had been thinking about. After completing their PhD, the author started a software company, Viaweb, which was eventually acquired by Yahoo. They then went on to write essays and articles about their experiences in the tech industry. They also wrote an essay about how to choose what to work on, which was based on their own experience. The author then moved back to Florence, where they found a rent-stabilized apartment and continued to pursue their interest in art. They wrote about their experiences in the art world, and experienced the reactions of readers to their essays. The author is now a successful writer and continues to write essays and articles about topics they are passionate about. \n",
-      "\n",
-      "In summary, the author's life has been a journey of exploration and creativity. They have experienced a wide range of different things in their life, from art school to computer science to the tech industry, and have used their experiences to inform their writing. They have pursued their passion for art, and have used their knowledge and experience to create meaningful work.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(str(response))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "\"\\nThe author is a creative person who has had a varied and interesting life. They grew up in the US and went to college, but then decided to take a break and pursue their passion for art. They applied to two art schools, RISD in the US and the Accademia di Belli Arti in Florence, and were accepted to both. They chose to go to Florence, where they took the entrance exam and passed. They then spent a year living in Florence, studying art at the Accademia and painting still lives in their bedroom. After their year in Florence, the author returned to the US and completed their BFA program at RISD. They then went on to pursue a PhD in computer science at MIT, where they wrote a dissertation on the evolution of computers. During their time at MIT, they also did consulting work and wrote essays on topics they had been thinking about. After completing their PhD, the author started a software company, Viaweb, which was eventually acquired by Yahoo. They then went on to write essays and articles about their experiences in the tech industry. They also wrote an essay about how to choose what to work on, which was based on their own experience. The author then moved back to Florence, where they found a rent-stabilized apartment and continued to pursue their interest in art. They wrote about their experiences in the art world, and experienced the reactions of readers to their essays. The author is now a successful writer and continues to write essays and articles about topics they are passionate about. \\n\\nIn summary, the author's life has been a journey of exploration and creativity. They have experienced a wide range of different things in their life, from art school to computer science to the tech industry, and have used their experiences to inform their writing. They have pursued their passion for art, and have used their knowledge and experience to create meaningful work.\""
-      ]
-     },
-     "execution_count": 12,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "response"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
-      "> [retrieve] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 11 tokens\n",
-      "> [retrieve] Total embedding token usage: 11 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
-      "> [retrieve] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 0 tokens\n",
-      "> [retrieve] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 1947 tokens\n",
-      "> [get_response] Total LLM token usage: 1947 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens\n",
-      "> [retrieve] Total LLM token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 0 tokens\n",
-      "> [retrieve] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 1947 tokens\n",
-      "> [get_response] Total LLM token usage: 1947 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n",
-      "INFO:llama_index.query_engine.router_query_engine:Combining responses from multiple query engines.\n",
-      "Combining responses from multiple query engines.\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 316 tokens\n",
-      "> [get_response] Total LLM token usage: 316 tokens\n",
-      "INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "response = query_engine.query(\"What did Paul Graham do during his time in college?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "Paul Graham studied philosophy in college, but he did not pursue AI. He continued to work on programming outside of school, writing simple games, a program to predict how high his model rockets would fly, and a word processor. He eventually convinced his father to buy him a TRS-80 computer, which he used to further his programming skills.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(str(response))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "llama_index_v2",
-   "language": "python",
-   "name": "llama_index_v2"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.10"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 4
+    "nbformat": 4,
+    "nbformat_minor": 4
 }
diff --git a/docs/examples/query_engine/RouterQueryEngine.ipynb b/docs/examples/query_engine/RouterQueryEngine.ipynb
index 3f8ee181130689b18046c10bb43c5ac845f6ac45..a104f7ed940b10726e2f66055a2cb220c13e7ee7 100644
--- a/docs/examples/query_engine/RouterQueryEngine.ipynb
+++ b/docs/examples/query_engine/RouterQueryEngine.ipynb
@@ -1,468 +1,468 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Router Query Engine\n",
-    "In this tutorial, we define a custom router query engine that selects one out of several candidate query engines to execute a query."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Setup"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: This is ONLY necessary in jupyter notebook.\n",
-    "# Details: Jupyter runs an event-loop behind the scenes.\n",
-    "#          This results in nested event-loops when we start an event-loop to make async queries.\n",
-    "#          This is normally not allowed, we use nest_asyncio to allow it for convenience.\n",
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Note: NumExpr detected 12 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
-      "NumExpr defaulting to 8 threads.\n"
-     ]
-    }
-   ],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().handlers = []\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
-    "\n",
-    "from llama_index import (\n",
-    "    VectorStoreIndex,\n",
-    "    ListIndex,\n",
-    "    SimpleDirectoryReader,\n",
-    "    ServiceContext,\n",
-    "    StorageContext,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Load Data\n",
-    "\n",
-    "We first show how to convert a Document into a set of Nodes, and insert into a DocumentStore."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# load documents\n",
-    "documents = SimpleDirectoryReader(\"../data/paul_graham\").load_data()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# initialize service context (set chunk size)\n",
-    "service_context = ServiceContext.from_defaults(chunk_size=1024)\n",
-    "nodes = service_context.node_parser.get_nodes_from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# initialize storage context (by default it's in-memory)\n",
-    "storage_context = StorageContext.from_defaults()\n",
-    "storage_context.docstore.add_documents(nodes)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Define List Index and Vector Index over Same Data "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total embedding token usage: 17038 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "list_index = ListIndex(nodes, storage_context=storage_context)\n",
-    "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Define Query Engines and Set Metadata"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "list_query_engine = list_index.as_query_engine(\n",
-    "    response_mode=\"tree_summarize\",\n",
-    "    use_async=True,\n",
-    ")\n",
-    "vector_query_engine = vector_index.as_query_engine()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.tools.query_engine import QueryEngineTool\n",
-    "\n",
-    "\n",
-    "list_tool = QueryEngineTool.from_defaults(\n",
-    "    query_engine=list_query_engine,\n",
-    "    description=\"Useful for summarization questions related to Paul Graham eassy on What I Worked On.\",\n",
-    ")\n",
-    "\n",
-    "vector_tool = QueryEngineTool.from_defaults(\n",
-    "    query_engine=vector_query_engine,\n",
-    "    description=\"Useful for retrieving specific context from Paul Graham essay on What I Worked On.\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "### Define Router Query Engine\n",
-    "\n",
-    "There are several selectors available, each with some distinct attributes.\n",
-    "\n",
-    "The LLM selectors use the LLM to output a JSON that is parsed, and the corresponding indexes are queried.\n",
-    "\n",
-    "The Pydantic selectors (currently only supported by `gpt-4-0613` and `gpt-3.5-turbo-0613` (the default)) use the OpenAI Function Call API to produce pydantic selection objects, rather than parsing raw JSON.\n",
-    "\n",
-    "For each type of selector, there is also the option to select 1 index to route to, or multiple.\n",
-    "\n",
-    "#### PydanticSingleSelector\n",
-    "\n",
-    "Use the OpenAI Function API to generate/parse pydantic objects under the hood for the router selector."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.query_engine.router_query_engine import RouterQueryEngine\n",
-    "from llama_index.selectors.llm_selectors import LLMSingleSelector, LLMMultiSelector\n",
-    "from llama_index.selectors.pydantic_selectors import (\n",
-    "    PydanticMultiSelector,\n",
-    "    PydanticSingleSelector,\n",
-    ")\n",
-    "\n",
-    "\n",
-    "query_engine = RouterQueryEngine(\n",
-    "    selector=PydanticSingleSelector.from_defaults(),\n",
-    "    query_engine_tools=[\n",
-    "        list_tool,\n",
-    "        vector_tool,\n",
-    "    ],\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting query engine 0: The first choice is specifically related to summarization questions about Paul Graham's essay on What I Worked On..\n",
-      "> [get_response] Total LLM token usage: 3411 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total LLM token usage: 3411 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n"
-     ]
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "# Router Query Engine\n",
+                "In this tutorial, we define a custom router query engine that selects one out of several candidate query engines to execute a query."
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### Setup"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: This is ONLY necessary in jupyter notebook.\n",
+                "# Details: Jupyter runs an event-loop behind the scenes.\n",
+                "#          This results in nested event-loops when we start an event-loop to make async queries.\n",
+                "#          This is normally not allowed, we use nest_asyncio to allow it for convenience.\n",
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Note: NumExpr detected 12 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
+                        "NumExpr defaulting to 8 threads.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().handlers = []\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
+                "\n",
+                "from llama_index import (\n",
+                "    VectorStoreIndex,\n",
+                "    SummaryIndex,\n",
+                "    SimpleDirectoryReader,\n",
+                "    ServiceContext,\n",
+                "    StorageContext,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### Load Data\n",
+                "\n",
+                "We first show how to convert a Document into a set of Nodes, and insert into a DocumentStore."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# load documents\n",
+                "documents = SimpleDirectoryReader(\"../data/paul_graham\").load_data()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 5,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# initialize service context (set chunk size)\n",
+                "service_context = ServiceContext.from_defaults(chunk_size=1024)\n",
+                "nodes = service_context.node_parser.get_nodes_from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 6,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# initialize storage context (by default it's in-memory)\n",
+                "storage_context = StorageContext.from_defaults()\n",
+                "storage_context.docstore.add_documents(nodes)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### Define List Index and Vector Index over Same Data "
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 7,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total embedding token usage: 17038 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "list_index = SummaryIndex(nodes, storage_context=storage_context)\n",
+                "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### Define Query Engines and Set Metadata"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 8,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "list_query_engine = list_index.as_query_engine(\n",
+                "    response_mode=\"tree_summarize\",\n",
+                "    use_async=True,\n",
+                ")\n",
+                "vector_query_engine = vector_index.as_query_engine()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 9,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.tools.query_engine import QueryEngineTool\n",
+                "\n",
+                "\n",
+                "list_tool = QueryEngineTool.from_defaults(\n",
+                "    query_engine=list_query_engine,\n",
+                "    description=\"Useful for summarization questions related to Paul Graham eassy on What I Worked On.\",\n",
+                ")\n",
+                "\n",
+                "vector_tool = QueryEngineTool.from_defaults(\n",
+                "    query_engine=vector_query_engine,\n",
+                "    description=\"Useful for retrieving specific context from Paul Graham essay on What I Worked On.\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "### Define Router Query Engine\n",
+                "\n",
+                "There are several selectors available, each with some distinct attributes.\n",
+                "\n",
+                "The LLM selectors use the LLM to output a JSON that is parsed, and the corresponding indexes are queried.\n",
+                "\n",
+                "The Pydantic selectors (currently only supported by `gpt-4-0613` and `gpt-3.5-turbo-0613` (the default)) use the OpenAI Function Call API to produce pydantic selection objects, rather than parsing raw JSON.\n",
+                "\n",
+                "For each type of selector, there is also the option to select 1 index to route to, or multiple.\n",
+                "\n",
+                "#### PydanticSingleSelector\n",
+                "\n",
+                "Use the OpenAI Function API to generate/parse pydantic objects under the hood for the router selector."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 10,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.query_engine.router_query_engine import RouterQueryEngine\n",
+                "from llama_index.selectors.llm_selectors import LLMSingleSelector, LLMMultiSelector\n",
+                "from llama_index.selectors.pydantic_selectors import (\n",
+                "    PydanticMultiSelector,\n",
+                "    PydanticSingleSelector,\n",
+                ")\n",
+                "\n",
+                "\n",
+                "query_engine = RouterQueryEngine(\n",
+                "    selector=PydanticSingleSelector.from_defaults(),\n",
+                "    query_engine_tools=[\n",
+                "        list_tool,\n",
+                "        vector_tool,\n",
+                "    ],\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting query engine 0: The first choice is specifically related to summarization questions about Paul Graham's essay on What I Worked On..\n",
+                        "> [get_response] Total LLM token usage: 3411 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total LLM token usage: 3411 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "Response(response=\"\\nThis document is a reflection on the author's experiences with computers and writing, from his early days of programming on an IBM 1401 to his more recent work on a web application builder. He recounts his experiences with programming, painting, and starting companies, and how he eventually came to write essays about his life and the choices he made.\", source_nodes=[NodeWithScore(node=Node(text='\\t\\t\\n\\nWhat I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn\\'t much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\\n\\nI couldn\\'t have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\\n\\nAI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried', doc_id='a0e4f5a8-f852-4807-96f2-76721dc1e57d', embedding=None, doc_hash='694feb4edd2c0029159c7f9faf46df308177a12658e50be42d3a8cb434bef168', extra_info=None, node_info={'start': 0, 'end': 4040, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.NEXT: '3'>: '1992e045-1032-432c-ac01-42947c0727e9'}), score=None), NodeWithScore(node=Node(text='documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried rereading The Moon is a Harsh Mistress, so I don\\'t know how well it has aged, but when I read it I was drawn entirely into its world. It seemed only a matter of time before we\\'d have Mike, and when I saw Winograd using SHRDLU, it seemed like that time would be a few years at most. All you had to do was teach SHRDLU more words.\\n\\nThere weren\\'t any classes in AI at Cornell then, not even graduate classes, so I started trying to teach myself. Which meant learning Lisp, since in those days Lisp was regarded as the language of AI. The commonly used programming languages then were pretty primitive, and programmers\\' ideas correspondingly so. The default language at Cornell was a Pascal-like language called PL/I, and the situation was similar elsewhere. Learning Lisp expanded my concept of a program so fast that it was years before I started to have a sense of where the new limits were. This was more like it; this was what I had expected college to do. It wasn\\'t happening in a class, like it was supposed to, but that was ok. For the next couple years I was on a roll. I knew what I was going to do.\\n\\nFor my undergraduate thesis, I reverse-engineered SHRDLU. My God did I love working on that program. It was a pleasing bit of code, but what made it even more exciting was my belief — hard to imagine now, but not unique in 1985 — that it was already climbing the lower slopes of intelligence.\\n\\nI had gotten into a program at Cornell that didn\\'t make you choose a major. You could take whatever classes you liked, and choose whatever you liked to put on your degree. I of course chose \"Artificial Intelligence.\" When I got the actual physical diploma, I was dismayed to find that the quotes had been included, which made them read as scare-quotes. At the time this bothered me, but now it seems amusingly accurate, for reasons I was about to discover.\\n\\nI applied to 3 grad schools: MIT and Yale, which were renowned for AI at the time, and Harvard, which I\\'d visited because Rich Draves went there, and was also home to Bill Woods, who\\'d invented the type of parser I used in my SHRDLU clone. Only Harvard accepted me, so that was where I went.\\n\\nI don\\'t remember the moment it happened, or if there even was a specific moment, but during the first year of grad school I realized that AI, as practiced at the time, was a hoax. By which I mean the sort of AI in which a program that\\'s told \"the dog is sitting on the chair\" translates this into some formal representation and adds it to the list of things it knows.\\n\\nWhat these programs really showed was that there\\'s a subset of natural language that\\'s a formal language. But a very proper subset. It was clear that there was an unbridgeable gap between what they could do and actually understanding natural language. It was not, in fact, simply a matter of teaching SHRDLU more words. That whole way of doing AI, with explicit data structures representing concepts, was not going to work. Its brokenness did, as so often happens, generate a lot of opportunities to write papers about various band-aids that could be applied to it, but it was never going to get us Mike.\\n\\nSo I looked around to see what I could salvage from the wreckage of my plans, and there was Lisp. I knew from experience that Lisp was interesting for its own sake and not just for its association with AI, even though that was the main reason people cared about it at the time. So I decided to focus on Lisp. In fact, I decided to write a book about Lisp hacking. It\\'s scary to think how little I knew about Lisp hacking when I started writing that book. But there\\'s nothing like writing a book about something to help you learn it. The book, On Lisp, wasn\\'t published till 1993, but I wrote much of it in grad school.\\n\\nComputer Science is an uneasy alliance between two halves, theory and systems. The', doc_id='1992e045-1032-432c-ac01-42947c0727e9', embedding=None, doc_hash='a8b756a8ec7638539582ead732467c48ac3d7bdbfc15e068dd041d4c9582e497', extra_info=None, node_info={'start': 3982, 'end': 7880, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'a0e4f5a8-f852-4807-96f2-76721dc1e57d', <DocumentRelationship.NEXT: '3'>: 'e06dfa02-faa5-4617-9d58-00ae88433c58'}), score=None), NodeWithScore(node=Node(text='Science is an uneasy alliance between two halves, theory and systems. The theory people prove things, and the systems people build things. I wanted to build things. I had plenty of respect for theory — indeed, a sneaking suspicion that it was the more admirable of the two halves — but building things seemed so much more exciting.\\n\\nThe problem with systems work, though, was that it didn\\'t last. Any program you wrote today, no matter how good, would be obsolete in a couple decades at best. People might mention your software in footnotes, but no one would actually use it. And indeed, it would seem very feeble work. Only people with a sense of the history of the field would even realize that, in its time, it had been good.\\n\\nThere were some surplus Xerox Dandelions floating around the computer lab at one point. Anyone who wanted one to play around with could have one. I was briefly tempted, but they were so slow by present standards; what was the point? No one else wanted one either, so off they went. That was what happened to systems work.\\n\\nI wanted not just to build things, but to build things that would last.\\n\\nIn this dissatisfied state I went in 1988 to visit Rich Draves at CMU, where he was in grad school. One day I went to visit the Carnegie Institute, where I\\'d spent a lot of time as a kid. While looking at a painting there I realized something that might seem obvious, but was a big surprise to me. There, right on the wall, was something you could make that would last. Paintings didn\\'t become obsolete. Some of the best ones were hundreds of years old.\\n\\nAnd moreover this was something you could make a living doing. Not as easily as you could by writing software, of course, but I thought if you were really industrious and lived really cheaply, it had to be possible to make enough to survive. And as an artist you could be truly independent. You wouldn\\'t have a boss, or even need to get research funding.\\n\\nI had always liked looking at paintings. Could I make them? I had no idea. I\\'d never imagined it was even possible. I knew intellectually that people made art — that it didn\\'t just appear spontaneously — but it was as if the people who made it were a different species. They either lived long ago or were mysterious geniuses doing strange things in profiles in Life magazine. The idea of actually being able to make art, to put that verb before that noun, seemed almost miraculous.\\n\\nThat fall I started taking art classes at Harvard. Grad students could take classes in any department, and my advisor, Tom Cheatham, was very easy going. If he even knew about the strange classes I was taking, he never said anything.\\n\\nSo now I was in a PhD program in computer science, yet planning to be an artist, yet also genuinely in love with Lisp hacking and working away at On Lisp. In other words, like many a grad student, I was working energetically on multiple projects that were not my thesis.\\n\\nI didn\\'t see a way out of this situation. I didn\\'t want to drop out of grad school, but how else was I going to get out? I remember when my friend Robert Morris got kicked out of Cornell for writing the internet worm of 1988, I was envious that he\\'d found such a spectacular way to get out of grad school.\\n\\nThen one day in April 1990 a crack appeared in the wall. I ran into professor Cheatham and he asked if I was far enough along to graduate that June. I didn\\'t have a word of my dissertation written, but in what must have been the quickest bit of thinking in my life, I decided to take a shot at writing one in the 5 weeks or so that remained before the deadline, reusing parts of On Lisp where I could, and I was able to respond, with no perceptible delay \"Yes, I think so. I\\'ll give you something to read in a few days.\"\\n\\nI picked applications of continuations as the topic. In retrospect I should have written about macros and embedded languages. There\\'s a whole world there that\\'s barely been explored. But all I wanted was to get out of grad school, and my rapidly written', doc_id='e06dfa02-faa5-4617-9d58-00ae88433c58', embedding=None, doc_hash='eb0b51b99800feed137ff8ae00239c9f87ae534d686eaa5684651d0a0f641742', extra_info=None, node_info={'start': 7878, 'end': 11887, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '1992e045-1032-432c-ac01-42947c0727e9', <DocumentRelationship.NEXT: '3'>: '0fc849a1-2421-414e-9f83-f39e3ac47e41'}), score=None), NodeWithScore(node=Node(text=\"But all I wanted was to get out of grad school, and my rapidly written dissertation sufficed, just barely.\\n\\nMeanwhile I was applying to art schools. I applied to two: RISD in the US, and the Accademia di Belli Arti in Florence, which, because it was the oldest art school, I imagined would be good. RISD accepted me, and I never heard back from the Accademia, so off to Providence I went.\\n\\nI'd applied for the BFA program at RISD, which meant in effect that I had to go to college again. This was not as strange as it sounds, because I was only 25, and art schools are full of people of different ages. RISD counted me as a transfer sophomore and said I had to do the foundation that summer. The foundation means the classes that everyone has to take in fundamental subjects like drawing, color, and design.\\n\\nToward the end of the summer I got a big surprise: a letter from the Accademia, which had been delayed because they'd sent it to Cambridge England instead of Cambridge Massachusetts, inviting me to take the entrance exam in Florence that fall. This was now only weeks away. My nice landlady let me leave my stuff in her attic. I had some money saved from consulting work I'd done in grad school; there was probably enough to last a year if I lived cheaply. Now all I had to do was learn Italian.\\n\\nOnly stranieri (foreigners) had to take this entrance exam. In retrospect it may well have been a way of excluding them, because there were so many stranieri attracted by the idea of studying art in Florence that the Italian students would otherwise have been outnumbered. I was in decent shape at painting and drawing from the RISD foundation that summer, but I still don't know how I managed to pass the written exam. I remember that I answered the essay question by writing about Cezanne, and that I cranked up the intellectual level as high as I could to make the most of my limited vocabulary. [2]\\n\\nI'm only up to age 25 and already there are such conspicuous patterns. Here I was, yet again about to attend some august institution in the hopes of learning about some prestigious subject, and yet again about to be disappointed. The students and faculty in the painting department at the Accademia were the nicest people you could imagine, but they had long since arrived at an arrangement whereby the students wouldn't require the faculty to teach anything, and in return the faculty wouldn't require the students to learn anything. And at the same time all involved would adhere outwardly to the conventions of a 19th century atelier. We actually had one of those little stoves, fed with kindling, that you see in 19th century studio paintings, and a nude model sitting as close to it as possible without getting burned. Except hardly anyone else painted her besides me. The rest of the students spent their time chatting or occasionally trying to imitate things they'd seen in American art magazines.\\n\\nOur model turned out to live just down the street from me. She made a living from a combination of modelling and making fakes for a local antique dealer. She'd copy an obscure old painting out of a book, and then he'd take the copy and maltreat it to make it look old. [3]\\n\\nWhile I was a student at the Accademia I started painting still lives in my bedroom at night. These paintings were tiny, because the room was, and because I painted them on leftover scraps of canvas, which was all I could afford at the time. Painting still lives is different from painting people, because the subject, as its name suggests, can't move. People can't sit for more than about 15 minutes at a time, and when they do they don't sit very still. So the traditional m.o. for painting people is to know how to paint a generic person, which you then modify to match the specific person you're painting. Whereas a still life you can, if you want, copy pixel by pixel from what you're seeing. You don't want to stop there, of course, or you get merely photographic accuracy, and what makes a still life interesting is that\", doc_id='0fc849a1-2421-414e-9f83-f39e3ac47e41', embedding=None, doc_hash='55796dd9e91b31dd897144f980f8536700eb3febd0f6bc5e732db0b2b754cf42', extra_info=None, node_info={'start': 11894, 'end': 15911, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'e06dfa02-faa5-4617-9d58-00ae88433c58', <DocumentRelationship.NEXT: '3'>: '8e0bca16-b087-489b-983a-5beaaf393f64'}), score=None), NodeWithScore(node=Node(text='or you get merely photographic accuracy, and what makes a still life interesting is that it\\'s been through a head. You want to emphasize the visual cues that tell you, for example, that the reason the color changes suddenly at a certain point is that it\\'s the edge of an object. By subtly emphasizing such things you can make paintings that are more realistic than photographs not just in some metaphorical sense, but in the strict information-theoretic sense. [4]\\n\\nI liked painting still lives because I was curious about what I was seeing. In everyday life, we aren\\'t consciously aware of much we\\'re seeing. Most visual perception is handled by low-level processes that merely tell your brain \"that\\'s a water droplet\" without telling you details like where the lightest and darkest points are, or \"that\\'s a bush\" without telling you the shape and position of every leaf. This is a feature of brains, not a bug. In everyday life it would be distracting to notice every leaf on every bush. But when you have to paint something, you have to look more closely, and when you do there\\'s a lot to see. You can still be noticing new things after days of trying to paint something people usually take for granted, just as you can after days of trying to write an essay about something people usually take for granted.\\n\\nThis is not the only way to paint. I\\'m not 100% sure it\\'s even a good way to paint. But it seemed a good enough bet to be worth trying.\\n\\nOur teacher, professor Ulivi, was a nice guy. He could see I worked hard, and gave me a good grade, which he wrote down in a sort of passport each student had. But the Accademia wasn\\'t teaching me anything except Italian, and my money was running out, so at the end of the first year I went back to the US.\\n\\nI wanted to go back to RISD, but I was now broke and RISD was very expensive, so I decided to get a job for a year and then return to RISD the next fall. I got one at a company called Interleaf, which made software for creating documents. You mean like Microsoft Word? Exactly. That was how I learned that low end software tends to eat high end software. But Interleaf still had a few years to live yet. [5]\\n\\nInterleaf had done something pretty bold. Inspired by Emacs, they\\'d added a scripting language, and even made the scripting language a dialect of Lisp. Now they wanted a Lisp hacker to write things in it. This was the closest thing I\\'ve had to a normal job, and I hereby apologize to my boss and coworkers, because I was a bad employee. Their Lisp was the thinnest icing on a giant C cake, and since I didn\\'t know C and didn\\'t want to learn it, I never understood most of the software. Plus I was terribly irresponsible. This was back when a programming job meant showing up every day during certain working hours. That seemed unnatural to me, and on this point the rest of the world is coming around to my way of thinking, but at the time it caused a lot of friction. Toward the end of the year I spent much of my time surreptitiously working on On Lisp, which I had by this time gotten a contract to publish.\\n\\nThe good part was that I got paid huge amounts of money, especially by art student standards. In Florence, after paying my part of the rent, my budget for everything else had been $7 a day. Now I was getting paid more than 4 times that every hour, even when I was just sitting in a meeting. By living cheaply I not only managed to save enough to go back to RISD, but also paid off my college loans.\\n\\nI learned some useful things at Interleaf, though they were mostly about what not to do. I learned that it\\'s better for technology companies to be run by product people than sales people (though sales is a real skill and people who are good at it are really good at it), that it leads to bugs when code is edited by too many people, that cheap office space is no bargain if it\\'s depressing, that planned meetings are inferior to corridor conversations, that big, bureaucratic customers are a dangerous source of money, and that there\\'s not much overlap between conventional', doc_id='8e0bca16-b087-489b-983a-5beaaf393f64', embedding=None, doc_hash='381c03ecd6edff8260cd955e3762b2c94431d15ae62b58591ab3f9fc9342ed58', extra_info=None, node_info={'start': 15894, 'end': 19945, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '0fc849a1-2421-414e-9f83-f39e3ac47e41', <DocumentRelationship.NEXT: '3'>: '855d5459-1cfe-465e-8c94-f9a1d047f821'}), score=None), NodeWithScore(node=Node(text='are a dangerous source of money, and that there\\'s not much overlap between conventional office hours and the optimal time for hacking, or conventional offices and the optimal place for it.\\n\\nBut the most important thing I learned, and which I used in both Viaweb and Y Combinator, is that the low end eats the high end: that it\\'s good to be the \"entry level\" option, even though that will be less prestigious, because if you\\'re not, someone else will be, and will squash you against the ceiling. Which in turn means that prestige is a danger sign.\\n\\nWhen I left to go back to RISD the next fall, I arranged to do freelance work for the group that did projects for customers, and this was how I survived for the next several years. When I came back to visit for a project later on, someone told me about a new thing called HTML, which was, as he described it, a derivative of SGML. Markup language enthusiasts were an occupational hazard at Interleaf and I ignored him, but this HTML thing later became a big part of my life.\\n\\nIn the fall of 1992 I moved back to Providence to continue at RISD. The foundation had merely been intro stuff, and the Accademia had been a (very civilized) joke. Now I was going to see what real art school was like. But alas it was more like the Accademia than not. Better organized, certainly, and a lot more expensive, but it was now becoming clear that art school did not bear the same relationship to art that medical school bore to medicine. At least not the painting department. The textile department, which my next door neighbor belonged to, seemed to be pretty rigorous. No doubt illustration and architecture were too. But painting was post-rigorous. Painting students were supposed to express themselves, which to the more worldly ones meant to try to cook up some sort of distinctive signature style.\\n\\nA signature style is the visual equivalent of what in show business is known as a \"schtick\": something that immediately identifies the work as yours and no one else\\'s. For example, when you see a painting that looks like a certain kind of cartoon, you know it\\'s by Roy Lichtenstein. So if you see a big painting of this type hanging in the apartment of a hedge fund manager, you know he paid millions of dollars for it. That\\'s not always why artists have a signature style, but it\\'s usually why buyers pay a lot for such work. [6]\\n\\nThere were plenty of earnest students too: kids who \"could draw\" in high school, and now had come to what was supposed to be the best art school in the country, to learn to draw even better. They tended to be confused and demoralized by what they found at RISD, but they kept going, because painting was what they did. I was not one of the kids who could draw in high school, but at RISD I was definitely closer to their tribe than the tribe of signature style seekers.\\n\\nI learned a lot in the color class I took at RISD, but otherwise I was basically teaching myself to paint, and I could do that for free. So in 1993 I dropped out. I hung around Providence for a bit, and then my college friend Nancy Parmet did me a big favor. A rent-controlled apartment in a building her mother owned in New York was becoming vacant. Did I want it? It wasn\\'t much more than my current place, and New York was supposed to be where the artists were. So yes, I wanted it! [7]\\n\\nAsterix comics begin by zooming in on a tiny corner of Roman Gaul that turns out not to be controlled by the Romans. You can do something similar on a map of New York City: if you zoom in on the Upper East Side, there\\'s a tiny corner that\\'s not rich, or at least wasn\\'t in 1993. It\\'s called Yorkville, and that was my new home. Now I was a New York artist — in the strictly technical sense of making paintings and living in New York.\\n\\nI was nervous about money, because I could sense that Interleaf was on the way down. Freelance Lisp hacking work was very rare, and I', doc_id='855d5459-1cfe-465e-8c94-f9a1d047f821', embedding=None, doc_hash='3a298b8e6f42c9af6356b611a9cbe37172455b8047ba2009f49b910488e96f8d', extra_info=None, node_info={'start': 19946, 'end': 23848, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '8e0bca16-b087-489b-983a-5beaaf393f64', <DocumentRelationship.NEXT: '3'>: '3b199ba9-d04b-473a-8c73-39c293638957'}), score=None), NodeWithScore(node=Node(text='on the way down. Freelance Lisp hacking work was very rare, and I didn\\'t want to have to program in another language, which in those days would have meant C++ if I was lucky. So with my unerring nose for financial opportunity, I decided to write another book on Lisp. This would be a popular book, the sort of book that could be used as a textbook. I imagined myself living frugally off the royalties and spending all my time painting. (The painting on the cover of this book, ANSI Common Lisp, is one that I painted around this time.)\\n\\nThe best thing about New York for me was the presence of Idelle and Julian Weber. Idelle Weber was a painter, one of the early photorealists, and I\\'d taken her painting class at Harvard. I\\'ve never known a teacher more beloved by her students. Large numbers of former students kept in touch with her, including me. After I moved to New York I became her de facto studio assistant.\\n\\nShe liked to paint on big, square canvases, 4 to 5 feet on a side. One day in late 1994 as I was stretching one of these monsters there was something on the radio about a famous fund manager. He wasn\\'t that much older than me, and was super rich. The thought suddenly occurred to me: why don\\'t I become rich? Then I\\'ll be able to work on whatever I want.\\n\\nMeanwhile I\\'d been hearing more and more about this new thing called the World Wide Web. Robert Morris showed it to me when I visited him in Cambridge, where he was now in grad school at Harvard. It seemed to me that the web would be a big deal. I\\'d seen what graphical user interfaces had done for the popularity of microcomputers. It seemed like the web would do the same for the internet.\\n\\nIf I wanted to get rich, here was the next train leaving the station. I was right about that part. What I got wrong was the idea. I decided we should start a company to put art galleries online. I can\\'t honestly say, after reading so many Y Combinator applications, that this was the worst startup idea ever, but it was up there. Art galleries didn\\'t want to be online, and still don\\'t, not the fancy ones. That\\'s not how they sell. I wrote some software to generate web sites for galleries, and Robert wrote some to resize images and set up an http server to serve the pages. Then we tried to sign up galleries. To call this a difficult sale would be an understatement. It was difficult to give away. A few galleries let us make sites for them for free, but none paid us.\\n\\nThen some online stores started to appear, and I realized that except for the order buttons they were identical to the sites we\\'d been generating for galleries. This impressive-sounding thing called an \"internet storefront\" was something we already knew how to build.\\n\\nSo in the summer of 1995, after I submitted the camera-ready copy of ANSI Common Lisp to the publishers, we started trying to write software to build online stores. At first this was going to be normal desktop software, which in those days meant Windows software. That was an alarming prospect, because neither of us knew how to write Windows software or wanted to learn. We lived in the Unix world. But we decided we\\'d at least try writing a prototype store builder on Unix. Robert wrote a shopping cart, and I wrote a new site generator for stores — in Lisp, of course.\\n\\nWe were working out of Robert\\'s apartment in Cambridge. His roommate was away for big chunks of time, during which I got to sleep in his room. For some reason there was no bed frame or sheets, just a mattress on the floor. One morning as I was lying on this mattress I had an idea that made me sit up like a capital L. What if we ran the software on the server, and let users control it by clicking on links? Then we\\'d never have to write anything to run on users\\' computers. We could generate the sites on the same server we\\'d serve them from. Users wouldn\\'t need anything more than a browser.\\n\\nThis kind of software, known as a web app, is common now, but at the time it wasn\\'t clear that it was even possible. To find out, we decided to try', doc_id='3b199ba9-d04b-473a-8c73-39c293638957', embedding=None, doc_hash='193c210e6a2567bac7400ec1d44e0bcc00777378664c0e3d08495baf7db4d9ac', extra_info=None, node_info={'start': 23870, 'end': 27897, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '855d5459-1cfe-465e-8c94-f9a1d047f821', <DocumentRelationship.NEXT: '3'>: '669c0218-b2c1-428b-808c-f5408e52dcdf'}), score=None), NodeWithScore(node=Node(text='wasn\\'t clear that it was even possible. To find out, we decided to try making a version of our store builder that you could control through the browser. A couple days later, on August 12, we had one that worked. The UI was horrible, but it proved you could build a whole store through the browser, without any client software or typing anything into the command line on the server.\\n\\nNow we felt like we were really onto something. I had visions of a whole new generation of software working this way. You wouldn\\'t need versions, or ports, or any of that crap. At Interleaf there had been a whole group called Release Engineering that seemed to be at least as big as the group that actually wrote the software. Now you could just update the software right on the server.\\n\\nWe started a new company we called Viaweb, after the fact that our software worked via the web, and we got $10,000 in seed funding from Idelle\\'s husband Julian. In return for that and doing the initial legal work and giving us business advice, we gave him 10% of the company. Ten years later this deal became the model for Y Combinator\\'s. We knew founders needed something like this, because we\\'d needed it ourselves.\\n\\nAt this stage I had a negative net worth, because the thousand dollars or so I had in the bank was more than counterbalanced by what I owed the government in taxes. (Had I diligently set aside the proper proportion of the money I\\'d made consulting for Interleaf? No, I had not.) So although Robert had his graduate student stipend, I needed that seed funding to live on.\\n\\nWe originally hoped to launch in September, but we got more ambitious about the software as we worked on it. Eventually we managed to build a WYSIWYG site builder, in the sense that as you were creating pages, they looked exactly like the static ones that would be generated later, except that instead of leading to static pages, the links all referred to closures stored in a hash table on the server.\\n\\nIt helped to have studied art, because the main goal of an online store builder is to make users look legit, and the key to looking legit is high production values. If you get page layouts and fonts and colors right, you can make a guy running a store out of his bedroom look more legit than a big company.\\n\\n(If you\\'re curious why my site looks so old-fashioned, it\\'s because it\\'s still made with this software. It may look clunky today, but in 1996 it was the last word in slick.)\\n\\nIn September, Robert rebelled. \"We\\'ve been working on this for a month,\" he said, \"and it\\'s still not done.\" This is funny in retrospect, because he would still be working on it almost 3 years later. But I decided it might be prudent to recruit more programmers, and I asked Robert who else in grad school with him was really good. He recommended Trevor Blackwell, which surprised me at first, because at that point I knew Trevor mainly for his plan to reduce everything in his life to a stack of notecards, which he carried around with him. But Rtm was right, as usual. Trevor turned out to be a frighteningly effective hacker.\\n\\nIt was a lot of fun working with Robert and Trevor. They\\'re the two most independent-minded people I know, and in completely different ways. If you could see inside Rtm\\'s brain it would look like a colonial New England church, and if you could see inside Trevor\\'s it would look like the worst excesses of Austrian Rococo.\\n\\nWe opened for business, with 6 stores, in January 1996. It was just as well we waited a few months, because although we worried we were late, we were actually almost fatally early. There was a lot of talk in the press then about ecommerce, but not many people actually wanted online stores. [8]\\n\\nThere were three main parts to the software: the editor, which people used to build sites and which I wrote, the shopping cart, which Robert wrote, and the manager, which kept track of orders and statistics, and which Trevor wrote. In its time, the editor was one of the best general-purpose site builders. I kept the code tight and didn\\'t have to integrate with any other software except Robert\\'s and Trevor\\'s, so it was quite fun to work on. If all I\\'d had to do was', doc_id='669c0218-b2c1-428b-808c-f5408e52dcdf', embedding=None, doc_hash='fd9eb9ceabf16d661afa8f19d64b256664bbc61c886292817fd92dc2456b8eaa', extra_info=None, node_info={'start': 27894, 'end': 32060, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '3b199ba9-d04b-473a-8c73-39c293638957', <DocumentRelationship.NEXT: '3'>: 'eb14dc48-f3a1-461d-9a49-0d78331dd083'}), score=None), NodeWithScore(node=Node(text='so it was quite fun to work on. If all I\\'d had to do was work on this software, the next 3 years would have been the easiest of my life. Unfortunately I had to do a lot more, all of it stuff I was worse at than programming, and the next 3 years were instead the most stressful.\\n\\nThere were a lot of startups making ecommerce software in the second half of the 90s. We were determined to be the Microsoft Word, not the Interleaf. Which meant being easy to use and inexpensive. It was lucky for us that we were poor, because that caused us to make Viaweb even more inexpensive than we realized. We charged $100 a month for a small store and $300 a month for a big one. This low price was a big attraction, and a constant thorn in the sides of competitors, but it wasn\\'t because of some clever insight that we set the price low. We had no idea what businesses paid for things. $300 a month seemed like a lot of money to us.\\n\\nWe did a lot of things right by accident like that. For example, we did what\\'s now called \"doing things that don\\'t scale,\" although at the time we would have described it as \"being so lame that we\\'re driven to the most desperate measures to get users.\" The most common of which was building stores for them. This seemed particularly humiliating, since the whole raison d\\'etre of our software was that people could use it to make their own stores. But anything to get users.\\n\\nWe learned a lot more about retail than we wanted to know. For example, that if you could only have a small image of a man\\'s shirt (and all images were small then by present standards), it was better to have a closeup of the collar than a picture of the whole shirt. The reason I remember learning this was that it meant I had to rescan about 30 images of men\\'s shirts. My first set of scans were so beautiful too.\\n\\nThough this felt wrong, it was exactly the right thing to be doing. Building stores for users taught us about retail, and about how it felt to use our software. I was initially both mystified and repelled by \"business\" and thought we needed a \"business person\" to be in charge of it, but once we started to get users, I was converted, in much the same way I was converted to fatherhood once I had kids. Whatever users wanted, I was all theirs. Maybe one day we\\'d have so many users that I couldn\\'t scan their images for them, but in the meantime there was nothing more important to do.\\n\\nAnother thing I didn\\'t get at the time is that growth rate is the ultimate test of a startup. Our growth rate was fine. We had about 70 stores at the end of 1996 and about 500 at the end of 1997. I mistakenly thought the thing that mattered was the absolute number of users. And that is the thing that matters in the sense that that\\'s how much money you\\'re making, and if you\\'re not making enough, you might go out of business. But in the long term the growth rate takes care of the absolute number. If we\\'d been a startup I was advising at Y Combinator, I would have said: Stop being so stressed out, because you\\'re doing fine. You\\'re growing 7x a year. Just don\\'t hire too many more people and you\\'ll soon be profitable, and then you\\'ll control your own destiny.\\n\\nAlas I hired lots more people, partly because our investors wanted me to, and partly because that\\'s what startups did during the Internet Bubble. A company with just a handful of employees would have seemed amateurish. So we didn\\'t reach breakeven until about when Yahoo bought us in the summer of 1998. Which in turn meant we were at the mercy of investors for the entire life of the company. And since both we and our investors were noobs at startups, the result was a mess even by startup standards.\\n\\nIt was a huge relief when Yahoo bought us. In principle our Viaweb stock was valuable. It was a share in a business that was profitable and growing rapidly. But it didn\\'t feel very valuable to me; I had no idea how to value a business, but I was all too keenly aware of the near-death experiences we seemed to have every few months. Nor had I changed my grad student lifestyle significantly since we started. So when', doc_id='eb14dc48-f3a1-461d-9a49-0d78331dd083', embedding=None, doc_hash='9185047072ffc2d0a80db719ed6fa93a69773224a62bbe5c7a8f59eaed4e80e1', extra_info=None, node_info={'start': 32076, 'end': 36162, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '669c0218-b2c1-428b-808c-f5408e52dcdf', <DocumentRelationship.NEXT: '3'>: '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb'}), score=None), NodeWithScore(node=Node(text=\"few months. Nor had I changed my grad student lifestyle significantly since we started. So when Yahoo bought us it felt like going from rags to riches. Since we were going to California, I bought a car, a yellow 1998 VW GTI. I remember thinking that its leather seats alone were by far the most luxurious thing I owned.\\n\\nThe next year, from the summer of 1998 to the summer of 1999, must have been the least productive of my life. I didn't realize it at the time, but I was worn out from the effort and stress of running Viaweb. For a while after I got to California I tried to continue my usual m.o. of programming till 3 in the morning, but fatigue combined with Yahoo's prematurely aged culture and grim cube farm in Santa Clara gradually dragged me down. After a few months it felt disconcertingly like working at Interleaf.\\n\\nYahoo had given us a lot of options when they bought us. At the time I thought Yahoo was so overvalued that they'd never be worth anything, but to my astonishment the stock went up 5x in the next year. I hung on till the first chunk of options vested, then in the summer of 1999 I left. It had been so long since I'd painted anything that I'd half forgotten why I was doing this. My brain had been entirely full of software and men's shirts for 4 years. But I had done this to get rich so I could paint, I reminded myself, and now I was rich, so I should go paint.\\n\\nWhen I said I was leaving, my boss at Yahoo had a long conversation with me about my plans. I told him all about the kinds of pictures I wanted to paint. At the time I was touched that he took such an interest in me. Now I realize it was because he thought I was lying. My options at that point were worth about $2 million a month. If I was leaving that kind of money on the table, it could only be to go and start some new startup, and if I did, I might take people with me. This was the height of the Internet Bubble, and Yahoo was ground zero of it. My boss was at that moment a billionaire. Leaving then to start a new startup must have seemed to him an insanely, and yet also plausibly, ambitious plan.\\n\\nBut I really was quitting to paint, and I started immediately. There was no time to lose. I'd already burned 4 years getting rich. Now when I talk to founders who are leaving after selling their companies, my advice is always the same: take a vacation. That's what I should have done, just gone off somewhere and done nothing for a month or two, but the idea never occurred to me.\\n\\nSo I tried to paint, but I just didn't seem to have any energy or ambition. Part of the problem was that I didn't know many people in California. I'd compounded this problem by buying a house up in the Santa Cruz Mountains, with a beautiful view but miles from anywhere. I stuck it out for a few more months, then in desperation I went back to New York, where unless you understand about rent control you'll be surprised to hear I still had my apartment, sealed up like a tomb of my old life. Idelle was in New York at least, and there were other people trying to paint there, even though I didn't know any of them.\\n\\nWhen I got back to New York I resumed my old life, except now I was rich. It was as weird as it sounds. I resumed all my old patterns, except now there were doors where there hadn't been. Now when I was tired of walking, all I had to do was raise my hand, and (unless it was raining) a taxi would stop to pick me up. Now when I walked past charming little restaurants I could go in and order lunch. It was exciting for a while. Painting started to go better. I experimented with a new kind of still life where I'd paint one painting in the old way, then photograph it and print it, blown up, on canvas, and then use that as the underpainting for a second still life, painted from the same objects (which hopefully hadn't rotted yet).\\n\\nMeanwhile I looked for an apartment to buy. Now I could actually choose what\", doc_id='99b3c4c2-aeab-4f5a-9474-916f6c35c9fb', embedding=None, doc_hash='363de309e331985d57cbd2a87662a4b6036a44807432524f2dbfd50a7e6ba7e5', extra_info=None, node_info={'start': 36125, 'end': 40040, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'eb14dc48-f3a1-461d-9a49-0d78331dd083', <DocumentRelationship.NEXT: '3'>: '8a9e2472-230f-437f-b720-1494878d5933'}), score=None), NodeWithScore(node=Node(text='I looked for an apartment to buy. Now I could actually choose what neighborhood to live in. Where, I asked myself and various real estate agents, is the Cambridge of New York? Aided by occasional visits to actual Cambridge, I gradually realized there wasn\\'t one. Huh.\\n\\nAround this time, in the spring of 2000, I had an idea. It was clear from our experience with Viaweb that web apps were the future. Why not build a web app for making web apps? Why not let people edit code on our server through the browser, and then host the resulting applications for them? [9] You could run all sorts of services on the servers that these applications could use just by making an API call: making and receiving phone calls, manipulating images, taking credit card payments, etc.\\n\\nI got so excited about this idea that I couldn\\'t think about anything else. It seemed obvious that this was the future. I didn\\'t particularly want to start another company, but it was clear that this idea would have to be embodied as one, so I decided to move to Cambridge and start it. I hoped to lure Robert into working on it with me, but there I ran into a hitch. Robert was now a postdoc at MIT, and though he\\'d made a lot of money the last time I\\'d lured him into working on one of my schemes, it had also been a huge time sink. So while he agreed that it sounded like a plausible idea, he firmly refused to work on it.\\n\\nHmph. Well, I\\'d do it myself then. I recruited Dan Giffin, who had worked for Viaweb, and two undergrads who wanted summer jobs, and we got to work trying to build what it\\'s now clear is about twenty companies and several open source projects worth of software. The language for defining applications would of course be a dialect of Lisp. But I wasn\\'t so naive as to assume I could spring an overt Lisp on a general audience; we\\'d hide the parentheses, like Dylan did.\\n\\nBy then there was a name for the kind of company Viaweb was, an \"application service provider,\" or ASP. This name didn\\'t last long before it was replaced by \"software as a service,\" but it was current for long enough that I named this new company after it: it was going to be called Aspra.\\n\\nI started working on the application builder, Dan worked on network infrastructure, and the two undergrads worked on the first two services (images and phone calls). But about halfway through the summer I realized I really didn\\'t want to run a company — especially not a big one, which it was looking like this would have to be. I\\'d only started Viaweb because I needed the money. Now that I didn\\'t need money anymore, why was I doing this? If this vision had to be realized as a company, then screw the vision. I\\'d build a subset that could be done as an open source project.\\n\\nMuch to my surprise, the time I spent working on this stuff was not wasted after all. After we started Y Combinator, I would often encounter startups working on parts of this new architecture, and it was very useful to have spent so much time thinking about it and even trying to write some of it.\\n\\nThe subset I would build as an open source project was the new Lisp, whose parentheses I now wouldn\\'t even have to hide. A lot of Lisp hackers dream of building a new Lisp, partly because one of the distinctive features of the language is that it has dialects, and partly, I think, because we have in our minds a Platonic form of Lisp that all existing dialects fall short of. I certainly did. So at the end of the summer Dan and I switched to working on this new dialect of Lisp, which I called Arc, in a house I bought in Cambridge.\\n\\nThe following spring, lightning struck. I was invited to give a talk at a Lisp conference, so I gave one about how we\\'d used Lisp at Viaweb. Afterward I put a postscript file of this talk online, on paulgraham.com, which I\\'d created years before using Viaweb but had never used for anything. In one day it got 30,000 page views. What on earth had happened? The referring urls showed', doc_id='8a9e2472-230f-437f-b720-1494878d5933', embedding=None, doc_hash='1a72edc63a9582c55f626fed3dbb1fadbfb1e7e7111e93b6c789c79c66db1e11', extra_info=None, node_info={'start': 40067, 'end': 44018, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb', <DocumentRelationship.NEXT: '3'>: '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060'}), score=None), NodeWithScore(node=Node(text=\"page views. What on earth had happened? The referring urls showed that someone had posted it on Slashdot. [10]\\n\\nWow, I thought, there's an audience. If I write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\\n\\nThis had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\\n\\nIn the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\\n\\nI've worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I'd always write essays too.\\n\\nI knew that online essays would be a marginal medium at first. Socially they'd seem more like rants posted by nutjobs on their GeoCities sites than the genteel and beautifully typeset compositions published in The New Yorker. But by this point I knew enough to find that encouraging instead of discouraging.\\n\\nOne of the most conspicuous patterns I've noticed in my life is how well it has worked, for me at least, to work on things that weren't prestigious. Still life has always been the least prestigious form of painting. Viaweb and Y Combinator both seemed lame when we started them. I still get the glassy eye from strangers when they ask what I'm writing, and I explain that it's an essay I'm going to publish on my web site. Even Lisp, though prestigious intellectually in something like the way Latin is, also seems about as hip.\\n\\nIt's not that unprestigious types of work are good per se. But when you find yourself drawn to some kind of work despite its current lack of prestige, it's a sign both that there's something real to be discovered there, and that you have the right kind of motives. Impure motives are a big danger for the ambitious. If anything is going to lead you astray, it will be the desire to impress people. So while working on things that aren't prestigious doesn't guarantee you're on the right track, it at least guarantees you're not on the most common type of wrong one.\\n\\nOver the next several years I wrote lots of essays about all kinds of different topics. O'Reilly reprinted a collection of them as a book, called Hackers & Painters after one of the essays in it. I also worked on spam filters, and did some more painting. I used to have dinners for a group of friends every thursday night, which taught me how to cook for groups. And I bought another building in Cambridge, a former candy factory (and later, twas said, porn studio), to use as an office.\\n\\nOne night in October 2003 there was a big party at my house. It was a clever idea of my friend Maria Daniels, who was one of the thursday diners. Three separate hosts would all invite their friends to one party. So for every guest, two thirds of the other guests would be people they didn't know but would probably like. One of the guests was someone I didn't know but would turn out to like a lot: a woman called Jessica Livingston. A couple days later I asked her out.\\n\\nJessica was in charge of marketing at a Boston investment\", doc_id='2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060', embedding=None, doc_hash='597ba7da683f715e046031bf0824aa5d9ff24b64aa2c0902a3ad6ee1e48ea312', extra_info=None, node_info={'start': 44018, 'end': 48016, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '8a9e2472-230f-437f-b720-1494878d5933', <DocumentRelationship.NEXT: '3'>: '42f8cdbc-3613-409a-85b6-bb1e22c85fef'}), score=None), NodeWithScore(node=Node(text='I asked her out.\\n\\nJessica was in charge of marketing at a Boston investment bank. This bank thought it understood startups, but over the next year, as she met friends of mine from the startup world, she was surprised how different reality was. And how colorful their stories were. So she decided to compile a book of interviews with startup founders.\\n\\nWhen the bank had financial problems and she had to fire half her staff, she started looking for a new job. In early 2005 she interviewed for a marketing job at a Boston VC firm. It took them weeks to make up their minds, and during this time I started telling her about all the things that needed to be fixed about venture capital. They should make a larger number of smaller investments instead of a handful of giant ones, they should be funding younger, more technical founders instead of MBAs, they should let the founders remain as CEO, and so on.\\n\\nOne of my tricks for writing essays had always been to give talks. The prospect of having to stand up in front of a group of people and tell them something that won\\'t waste their time is a great spur to the imagination. When the Harvard Computer Society, the undergrad computer club, asked me to give a talk, I decided I would tell them how to start a startup. Maybe they\\'d be able to avoid the worst of the mistakes we\\'d made.\\n\\nSo I gave this talk, in the course of which I told them that the best sources of seed funding were successful startup founders, because then they\\'d be sources of advice too. Whereupon it seemed they were all looking expectantly at me. Horrified at the prospect of having my inbox flooded by business plans (if I\\'d only known), I blurted out \"But not me!\" and went on with the talk. But afterward it occurred to me that I should really stop procrastinating about angel investing. I\\'d been meaning to since Yahoo bought us, and now it was 7 years later and I still hadn\\'t done one angel investment.\\n\\nMeanwhile I had been scheming with Robert and Trevor about projects we could work on together. I missed working with them, and it seemed like there had to be something we could collaborate on.\\n\\nAs Jessica and I were walking home from dinner on March 11, at the corner of Garden and Walker streets, these three threads converged. Screw the VCs who were taking so long to make up their minds. We\\'d start our own investment firm and actually implement the ideas we\\'d been talking about. I\\'d fund it, and Jessica could quit her job and work for it, and we\\'d get Robert and Trevor as partners too. [13]\\n\\nOnce again, ignorance worked in our favor. We had no idea how to be angel investors, and in Boston in 2005 there were no Ron Conways to learn from. So we just made what seemed like the obvious choices, and some of the things we did turned out to be novel.\\n\\nThere are multiple components to Y Combinator, and we didn\\'t figure them all out at once. The part we got first was to be an angel firm. In those days, those two words didn\\'t go together. There were VC firms, which were organized companies with people whose job it was to make investments, but they only did big, million dollar investments. And there were angels, who did smaller investments, but these were individuals who were usually focused on other things and made investments on the side. And neither of them helped founders enough in the beginning. We knew how helpless founders were in some respects, because we remembered how helpless we\\'d been. For example, one thing Julian had done for us that seemed to us like magic was to get us set up as a company. We were fine writing fairly difficult software, but actually getting incorporated, with bylaws and stock and all that stuff, how on earth did you do that? Our plan was not only to make seed investments, but to do for startups everything Julian had done for us.\\n\\nYC was not organized as a fund. It was cheap enough to run that we funded it with our own money. That went right by 99% of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" But once again, this was not due to any particular insight on our part. We didn\\'t know how VC firms', doc_id='42f8cdbc-3613-409a-85b6-bb1e22c85fef', embedding=None, doc_hash='345510864fca229d2e5a22bf719f4ee408018c13c348307d3d72bc151fe61050', extra_info=None, node_info={'start': 48009, 'end': 52140, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060', <DocumentRelationship.NEXT: '3'>: 'f9e74e9d-cdd9-43c4-8742-76c38200305f'}), score=None), NodeWithScore(node=Node(text='not due to any particular insight on our part. We didn\\'t know how VC firms were organized. It never occurred to us to try to raise a fund, and if it had, we wouldn\\'t have known where to start. [14]\\n\\nThe most distinctive thing about YC is the batch model: to fund a bunch of startups all at once, twice a year, and then to spend three months focusing intensively on trying to help them. That part we discovered by accident, not merely implicitly but explicitly due to our ignorance about investing. We needed to get experience as investors. What better way, we thought, than to fund a whole bunch of startups at once? We knew undergrads got temporary jobs at tech companies during the summer. Why not organize a summer program where they\\'d start startups instead? We wouldn\\'t feel guilty for being in a sense fake investors, because they would in a similar sense be fake founders. So while we probably wouldn\\'t make much money out of it, we\\'d at least get to practice being investors on them, and they for their part would probably have a more interesting summer than they would working at Microsoft.\\n\\nWe\\'d use the building I owned in Cambridge as our headquarters. We\\'d all have dinner there once a week — on tuesdays, since I was already cooking for the thursday diners on thursdays — and after dinner we\\'d bring in experts on startups to give talks.\\n\\nWe knew undergrads were deciding then about summer jobs, so in a matter of days we cooked up something we called the Summer Founders Program, and I posted an announcement on my site, inviting undergrads to apply. I had never imagined that writing essays would be a way to get \"deal flow,\" as investors call it, but it turned out to be the perfect source. [15] We got 225 applications for the Summer Founders Program, and we were surprised to find that a lot of them were from people who\\'d already graduated, or were about to that spring. Already this SFP thing was starting to feel more serious than we\\'d intended.\\n\\nWe invited about 20 of the 225 groups to interview in person, and from those we picked 8 to fund. They were an impressive group. That first batch included reddit, Justin Kan and Emmett Shear, who went on to found Twitch, Aaron Swartz, who had already helped write the RSS spec and would a few years later become a martyr for open access, and Sam Altman, who would later become the second president of YC. I don\\'t think it was entirely luck that the first batch was so good. You had to be pretty bold to sign up for a weird thing like the Summer Founders Program instead of a summer job at a legit place like Microsoft or Goldman Sachs.\\n\\nThe deal for startups was based on a combination of the deal we did with Julian ($10k for 10%) and what Robert said MIT grad students got for the summer ($6k). We invested $6k per founder, which in the typical two-founder case was $12k, in return for 6%. That had to be fair, because it was twice as good as the deal we ourselves had taken. Plus that first summer, which was really hot, Jessica brought the founders free air conditioners. [16]\\n\\nFairly quickly I realized that we had stumbled upon the way to scale startup funding. Funding startups in batches was more convenient for us, because it meant we could do things for a lot of startups at once, but being part of a batch was better for the startups too. It solved one of the biggest problems faced by founders: the isolation. Now you not only had colleagues, but colleagues who understood the problems you were facing and could tell you how they were solving them.\\n\\nAs YC grew, we started to notice other advantages of scale. The alumni became a tight community, dedicated to helping one another, and especially the current batch, whose shoes they remembered being in. We also noticed that the startups were becoming one another\\'s customers. We used to refer jokingly to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get their initial set of customers almost entirely from among their batchmates.\\n\\nI had not originally', doc_id='f9e74e9d-cdd9-43c4-8742-76c38200305f', embedding=None, doc_hash='03e6a7d2e66c082507e29d8cfe9ec36fa77fe0b1cf352ad5ce85ad91f71a3622', extra_info=None, node_info={'start': 52144, 'end': 56170, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '42f8cdbc-3613-409a-85b6-bb1e22c85fef', <DocumentRelationship.NEXT: '3'>: 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd'}), score=None), NodeWithScore(node=Node(text='of customers almost entirely from among their batchmates.\\n\\nI had not originally intended YC to be a full-time job. I was going to do three things: hack, write essays, and work on YC. As YC grew, and I grew more excited about it, it started to take up a lot more than a third of my attention. But for the first few years I was still able to work on other things.\\n\\nIn the summer of 2006, Robert and I started working on a new version of Arc. This one was reasonably fast, because it was compiled into Scheme. To test this new Arc, I wrote Hacker News in it. It was originally meant to be a news aggregator for startup founders and was called Startup News, but after a few months I got tired of reading about nothing but startups. Plus it wasn\\'t startup founders we wanted to reach. It was future startup founders. So I changed the name to Hacker News and the topic to whatever engaged one\\'s intellectual curiosity.\\n\\nHN was no doubt good for YC, but it was also by far the biggest source of stress for me. If all I\\'d had to do was select and help founders, life would have been so easy. And that implies that HN was a mistake. Surely the biggest source of stress in one\\'s work should at least be something close to the core of the work. Whereas I was like someone who was in pain while running a marathon not from the exertion of running, but because I had a blister from an ill-fitting shoe. When I was dealing with some urgent problem during YC, there was about a 60% chance it had to do with HN, and a 40% chance it had do with everything else combined. [17]\\n\\nAs well as HN, I wrote all of YC\\'s internal software in Arc. But while I continued to work a good deal in Arc, I gradually stopped working on Arc, partly because I didn\\'t have time to, and partly because it was a lot less attractive to mess around with the language now that we had all this infrastructure depending on it. So now my three projects were reduced to two: writing essays and working on YC.\\n\\nYC was different from other kinds of work I\\'ve done. Instead of deciding for myself what to work on, the problems came to me. Every 6 months there was a new batch of startups, and their problems, whatever they were, became our problems. It was very engaging work, because their problems were quite varied, and the good founders were very effective. If you were trying to learn the most you could about startups in the shortest possible time, you couldn\\'t have picked a better way to do it.\\n\\nThere were parts of the job I didn\\'t like. Disputes between cofounders, figuring out when people were lying to us, fighting with people who maltreated the startups, and so on. But I worked hard even at the parts I didn\\'t like. I was haunted by something Kevin Hale once said about companies: \"No one works harder than the boss.\" He meant it both descriptively and prescriptively, and it was the second part that scared me. I wanted YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I\\'d better work very hard.\\n\\nOne day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember his exact words very clearly. \"You know,\" he said, \"you should make sure Y Combinator isn\\'t the last cool thing you do.\"\\n\\nAt the time I didn\\'t understand what he meant, but gradually it dawned on me that he was saying I should quit. This seemed strange advice, because YC was doing great. But if there was one thing rarer than Rtm offering advice, it was Rtm being wrong. So this set me thinking. It was true that on my current trajectory, YC would be the last thing I did, because it was only taking up more of my attention. It had already eaten Arc, and was in the process of eating', doc_id='ab7ed037-4269-4593-b5ff-0ce3d9213cbd', embedding=None, doc_hash='28da476588fa6a7c04e3fc8d0c8490de5a6aa3f4b46ada11723bd524402b1d33', extra_info=None, node_info={'start': 56162, 'end': 60161, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'f9e74e9d-cdd9-43c4-8742-76c38200305f', <DocumentRelationship.NEXT: '3'>: 'cfd50785-c54a-4e07-b474-561541968da9'}), score=None), NodeWithScore(node=Node(text=\"of my attention. It had already eaten Arc, and was in the process of eating essays too. Either YC was my life's work or I'd have to leave eventually. And it wasn't, so I would.\\n\\nIn the summer of 2012 my mother had a stroke, and the cause turned out to be a blood clot caused by colon cancer. The stroke destroyed her balance, and she was put in a nursing home, but she really wanted to get out of it and back to her house, and my sister and I were determined to help her do it. I used to fly up to Oregon to visit her regularly, and I had a lot of time to think on those flights. On one of them I realized I was ready to hand YC over to someone else.\\n\\nI asked Jessica if she wanted to be president, but she didn't, so we decided we'd try to recruit Sam Altman. We talked to Robert and Trevor and we agreed to make it a complete changing of the guard. Up till that point YC had been controlled by the original LLC we four had started. But we wanted YC to last for a long time, and to do that it couldn't be controlled by the founders. So if Sam said yes, we'd let him reorganize YC. Robert and I would retire, and Jessica and Trevor would become ordinary partners.\\n\\nWhen we asked Sam if he wanted to be president of YC, initially he said no. He wanted to start a startup to make nuclear reactors. But I kept at it, and in October 2013 he finally agreed. We decided he'd take over starting with the winter 2014 batch. For the rest of 2013 I left running YC more and more to Sam, partly so he could learn the job, and partly because I was focused on my mother, whose cancer had returned.\\n\\nShe died on January 15, 2014. We knew this was coming, but it was still hard when it did.\\n\\nI kept working on YC till March, to help get that batch of startups through Demo Day, then I checked out pretty completely. (I still talk to alumni and to new startups working on things I'm interested in, but that only takes a few hours a week.)\\n\\nWhat should I do next? Rtm's advice hadn't included anything about that. I wanted to do something completely different, so I decided I'd paint. I wanted to see how good I could get if I really focused on it. So the day after I stopped working on YC, I started painting. I was rusty and it took a while to get back into shape, but it was at least completely engaging. [18]\\n\\nI spent most of the rest of 2014 painting. I'd never been able to work so uninterruptedly before, and I got to be better than I had been. Not good enough, but better. Then in November, right in the middle of a painting, I ran out of steam. Up till that point I'd always been curious to see how the painting I was working on would turn out, but suddenly finishing this one seemed like a chore. So I stopped working on it and cleaned my brushes and haven't painted since. So far anyway.\\n\\nI realize that sounds rather wimpy. But attention is a zero sum game. If you can choose what to work on, and you choose a project that's not the best one (or at least a good one) for you, then it's getting in the way of another project that is. And at 50 there was some opportunity cost to screwing around.\\n\\nI started writing essays again, and wrote a bunch of new ones over the next few months. I even wrote a couple that weren't about startups. Then in March 2015 I started working on Lisp again.\\n\\nThe distinctive thing about Lisp is that its core is a language defined by writing an interpreter in itself. It wasn't originally intended as a programming language in the ordinary sense. It was meant to be a formal model of computation, an alternative to the Turing machine. If you want to write an interpreter for a language in itself, what's the minimum set of predefined operators you need? The Lisp that John McCarthy invented, or more accurately discovered, is an answer to that question. [19]\\n\\nMcCarthy didn't realize this Lisp could even be used to program computers\", doc_id='cfd50785-c54a-4e07-b474-561541968da9', embedding=None, doc_hash='b6524989f50c19316fcc4135d476deedc62b79ab141d9f650743a6fe5f3558c9', extra_info=None, node_info={'start': 60170, 'end': 64027, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd', <DocumentRelationship.NEXT: '3'>: '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4'}), score=None), NodeWithScore(node=Node(text='didn\\'t realize this Lisp could even be used to program computers till his grad student Steve Russell suggested it. Russell translated McCarthy\\'s interpreter into IBM 704 machine language, and from that point Lisp started also to be a programming language in the ordinary sense. But its origins as a model of computation gave it a power and elegance that other languages couldn\\'t match. It was this that attracted me in college, though I didn\\'t understand why at the time.\\n\\nMcCarthy\\'s 1960 Lisp did nothing more than interpret Lisp expressions. It was missing a lot of things you\\'d want in a programming language. So these had to be added, and when they were, they weren\\'t defined using McCarthy\\'s original axiomatic approach. That wouldn\\'t have been feasible at the time. McCarthy tested his interpreter by hand-simulating the execution of programs. But it was already getting close to the limit of interpreters you could test that way — indeed, there was a bug in it that McCarthy had overlooked. To test a more complicated interpreter, you\\'d have had to run it, and computers then weren\\'t powerful enough.\\n\\nNow they are, though. Now you could continue using McCarthy\\'s axiomatic approach till you\\'d defined a complete programming language. And as long as every change you made to McCarthy\\'s Lisp was a discoveredness-preserving transformation, you could, in principle, end up with a complete language that had this quality. Harder to do than to talk about, of course, but if it was possible in principle, why not try? So I decided to take a shot at it. It took 4 years, from March 26, 2015 to October 12, 2019. It was fortunate that I had a precisely defined goal, or it would have been hard to keep at it for so long.\\n\\nI wrote this new Lisp, called Bel, in itself in Arc. That may sound like a contradiction, but it\\'s an indication of the sort of trickery I had to engage in to make this work. By means of an egregious collection of hacks I managed to make something close enough to an interpreter written in itself that could actually run. Not fast, but fast enough to test.\\n\\nI had to ban myself from writing essays during most of this time, or I\\'d never have finished. In late 2015 I spent 3 months writing essays, and when I went back to working on Bel I could barely understand the code. Not so much because it was badly written as because the problem is so convoluted. When you\\'re working on an interpreter written in itself, it\\'s hard to keep track of what\\'s happening at what level, and errors can be practically encrypted by the time you get them.\\n\\nSo I said no more essays till Bel was done. But I told few people about Bel while I was working on it. So for years it must have seemed that I was doing nothing, when in fact I was working harder than I\\'d ever worked on anything. Occasionally after wrestling for hours with some gruesome bug I\\'d check Twitter or HN and see someone asking \"Does Paul Graham still code?\"\\n\\nWorking on Bel was hard but satisfying. I worked on it so intensively that at any given time I had a decent chunk of the code in my head and could write more there. I remember taking the boys to the coast on a sunny day in 2015 and figuring out how to deal with some problem involving continuations while I watched them play in the tide pools. It felt like I was doing life right. I remember that because I was slightly dismayed at how novel it felt. The good news is that I had more moments like this over the next few years.\\n\\nIn the summer of 2016 we moved to England. We wanted our kids to see what it was like living in another country, and since I was a British citizen by birth, that seemed the obvious choice. We only meant to stay for a year, but we liked it so much that we still live there. So most of Bel was written in England.\\n\\nIn the fall of 2019, Bel was finally finished. Like McCarthy\\'s original Lisp, it\\'s a spec rather than an implementation, although like McCarthy\\'s Lisp it\\'s a spec expressed as code.\\n\\nNow that I could write essays again, I wrote a bunch about topics I\\'d had stacked', doc_id='14d14357-e5cf-4015-8a65-2cc9fd3fb5c4', embedding=None, doc_hash='fdbb519247f837aba04548ab0aec6383f33f41fbbabe6aacfe83b2f52ad699c5', extra_info=None, node_info={'start': 64035, 'end': 68071, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'cfd50785-c54a-4e07-b474-561541968da9', <DocumentRelationship.NEXT: '3'>: '81530055-3701-4064-8170-c2c3d42f05b7'}), score=None), NodeWithScore(node=Node(text=\"I could write essays again, I wrote a bunch about topics I'd had stacked up. I kept writing essays through 2020, but I also started to think about other things I could work on. How should I choose what to do? Well, how had I chosen what to work on in the past? I wrote an essay for myself to answer that question, and I was surprised how long and messy the answer turned out to be. If this surprised me, who'd lived it, then I thought perhaps it would be interesting to other people, and encouraging to those with similarly messy lives. So I wrote a more detailed version for others to read, and this is the last sentence of it.\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nNotes\\n\\n[1] My experience skipped a step in the evolution of computers: time-sharing machines with interactive OSes. I went straight from batch processing to microcomputers, which made microcomputers seem all the more exciting.\\n\\n[2] Italian words for abstract concepts can nearly always be predicted from their English cognates (except for occasional traps like polluzione). It's the everyday words that differ. So if you string together a lot of abstract concepts with a few simple verbs, you can make a little Italian go a long way.\\n\\n[3] I lived at Piazza San Felice 4, so my walk to the Accademia went straight down the spine of old Florence: past the Pitti, across the bridge, past Orsanmichele, between the Duomo and the Baptistery, and then up Via Ricasoli to Piazza San Marco. I saw Florence at street level in every possible condition, from empty dark winter evenings to sweltering summer days when the streets were packed with tourists.\\n\\n[4] You can of course paint people like still lives if you want to, and they're willing. That sort of portrait is arguably the apex of still life painting, though the long sitting does tend to produce pained expressions in the sitters.\\n\\n[5] Interleaf was one of many companies that had smart people and built impressive technology, and yet got crushed by Moore's Law. In the 1990s the exponential growth in the power of commodity (i.e. Intel) processors rolled up high-end, special-purpose hardware and software companies like a bulldozer.\\n\\n[6] The signature style seekers at RISD weren't specifically mercenary. In the art world, money and coolness are tightly coupled. Anything expensive comes to be seen as cool, and anything seen as cool will soon become equally expensive.\\n\\n[7] Technically the apartment wasn't rent-controlled but rent-stabilized, but this is a refinement only New Yorkers would know or care about. The point is that it was really cheap, less than half market price.\\n\\n[8] Most software you can launch as soon as it's done. But when the software is an online store builder and you're hosting the stores, if you don't have any users yet, that fact will be painfully obvious. So before we could launch publicly we had to launch privately, in the sense of recruiting an initial set of users and making sure they had decent-looking stores.\\n\\n[9] We'd had a code editor in Viaweb for users to define their own page styles. They didn't know it, but they were editing Lisp expressions underneath. But this wasn't an app editor, because the code ran when the merchants' sites were generated, not when shoppers visited them.\\n\\n[10] This was the first instance of what is now a familiar experience, and so was what happened next, when I read the comments and found they were full of angry people. How could I claim that Lisp was better than other languages? Weren't they all Turing complete? People who see the responses to essays I write sometimes tell me how sorry they feel for me, but I'm not exaggerating when I reply that it has always been like this, since the very beginning. It comes with the territory. An essay must tell readers things they don't already know, and some people dislike being told such things.\\n\\n[11] People put plenty of stuff on the internet in the 90s of course, but putting something online is not the same as publishing it online. Publishing online means you\", doc_id='81530055-3701-4064-8170-c2c3d42f05b7', embedding=None, doc_hash='7ded19f889627d27a2c132b3d5c4f9dac587cd2407c5624d421e292f2b3ab1d7', extra_info=None, node_info={'start': 68067, 'end': 72048, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4', <DocumentRelationship.NEXT: '3'>: 'c851c700-5226-42a9-87da-e89e548e381e'}), score=None), NodeWithScore(node=Node(text='putting something online is not the same as publishing it online. Publishing online means you treat the online version as the (or at least a) primary version.\\n\\n[12] There is a general lesson here that our experience with Y Combinator also teaches: Customs continue to constrain you long after the restrictions that caused them have disappeared. Customary VC practice had once, like the customs about publishing essays, been based on real constraints. Startups had once been much more expensive to start, and proportionally rare. Now they could be cheap and common, but the VCs\\' customs still reflected the old world, just as customs about writing essays still reflected the constraints of the print era.\\n\\nWhich in turn implies that people who are independent-minded (i.e. less influenced by custom) will have an advantage in fields affected by rapid change (where customs are more likely to be obsolete).\\n\\nHere\\'s an interesting point, though: you can\\'t always predict which fields will be affected by rapid change. Obviously software and venture capital will be, but who would have predicted that essay writing would be?\\n\\n[13] Y Combinator was not the original name. At first we were called Cambridge Seed. But we didn\\'t want a regional name, in case someone copied us in Silicon Valley, so we renamed ourselves after one of the coolest tricks in the lambda calculus, the Y combinator.\\n\\nI picked orange as our color partly because it\\'s the warmest, and partly because no VC used it. In 2005 all the VCs used staid colors like maroon, navy blue, and forest green, because they were trying to appeal to LPs, not founders. The YC logo itself is an inside joke: the Viaweb logo had been a white V on a red circle, so I made the YC logo a white Y on an orange square.\\n\\n[14] YC did become a fund for a couple years starting in 2009, because it was getting so big I could no longer afford to fund it personally. But after Heroku got bought we had enough money to go back to being self-funded.\\n\\n[15] I\\'ve never liked the term \"deal flow,\" because it implies that the number of new startups at any given time is fixed. This is not only false, but it\\'s the purpose of YC to falsify it, by causing startups to be founded that would not otherwise have existed.\\n\\n[16] She reports that they were all different shapes and sizes, because there was a run on air conditioners and she had to get whatever she could, but that they were all heavier than she could carry now.\\n\\n[17] Another problem with HN was a bizarre edge case that occurs when you both write essays and run a forum. When you run a forum, you\\'re assumed to see if not every conversation, at least every conversation involving you. And when you write essays, people post highly imaginative misinterpretations of them on forums. Individually these two phenomena are tedious but bearable, but the combination is disastrous. You actually have to respond to the misinterpretations, because the assumption that you\\'re present in the conversation means that not responding to any sufficiently upvoted misinterpretation reads as a tacit admission that it\\'s correct. But that in turn encourages more; anyone who wants to pick a fight with you senses that now is their chance.\\n\\n[18] The worst thing about leaving YC was not working with Jessica anymore. We\\'d been working on YC almost the whole time we\\'d known each other, and we\\'d neither tried nor wanted to separate it from our personal lives, so leaving was like pulling up a deeply rooted tree.\\n\\n[19] One way to get more precise about the concept of invented vs discovered is to talk about space aliens. Any sufficiently advanced alien civilization would certainly know about the Pythagorean theorem, for example. I believe, though with less certainty, that they would also know about the Lisp in McCarthy\\'s 1960 paper.\\n\\nBut if so there\\'s no reason to suppose that this is the limit of the language that might be known to them. Presumably aliens need numbers and errors and I/O too. So it seems likely there exists at least one path out of McCarthy\\'s', doc_id='c851c700-5226-42a9-87da-e89e548e381e', embedding=None, doc_hash='8c5bdf9883547bcebedcb406fa77ba918defaa7bbedece21114163c578ac0729', extra_info=None, node_info={'start': 72029, 'end': 76071, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '81530055-3701-4064-8170-c2c3d42f05b7', <DocumentRelationship.NEXT: '3'>: '4972fc4f-6990-4659-a20c-98a8147373d8'}), score=None), NodeWithScore(node=Node(text=\"So it seems likely there exists at least one path out of McCarthy's Lisp along which discoveredness is preserved.\\n\\n\\n\\nThanks to Trevor Blackwell, John Collison, Patrick Collison, Daniel Gackle, Ralph Hazell, Jessica Livingston, Robert Morris, and Harj Taggar for reading drafts of this.\\n\\n\\n\\n\", doc_id='4972fc4f-6990-4659-a20c-98a8147373d8', embedding=None, doc_hash='9d65b6bc997db43cee91e2c4b7380e4efc1059937154b60dc2e45b8aa489e59e', extra_info=None, node_info={'start': 76083, 'end': 76372, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'c851c700-5226-42a9-87da-e89e548e381e'}), score=None)], extra_info={'a0e4f5a8-f852-4807-96f2-76721dc1e57d': None, '1992e045-1032-432c-ac01-42947c0727e9': None, 'e06dfa02-faa5-4617-9d58-00ae88433c58': None, '0fc849a1-2421-414e-9f83-f39e3ac47e41': None, '8e0bca16-b087-489b-983a-5beaaf393f64': None, '855d5459-1cfe-465e-8c94-f9a1d047f821': None, '3b199ba9-d04b-473a-8c73-39c293638957': None, '669c0218-b2c1-428b-808c-f5408e52dcdf': None, 'eb14dc48-f3a1-461d-9a49-0d78331dd083': None, '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb': None, '8a9e2472-230f-437f-b720-1494878d5933': None, '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060': None, '42f8cdbc-3613-409a-85b6-bb1e22c85fef': None, 'f9e74e9d-cdd9-43c4-8742-76c38200305f': None, 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd': None, 'cfd50785-c54a-4e07-b474-561541968da9': None, '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4': None, '81530055-3701-4064-8170-c2c3d42f05b7': None, 'c851c700-5226-42a9-87da-e89e548e381e': None, '4972fc4f-6990-4659-a20c-98a8147373d8': None})"
+                        ]
+                    },
+                    "execution_count": 11,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "query_engine.query(\"What is the summary of the document?\")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "query_engine.query(\"What did Paul Graham do after RICS?\")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "#### LLMSingleSelector\n",
+                "\n",
+                "Use OpenAI (or any other LLM) to parse generated JSON under the hood to select a sub-index for routing."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 13,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "query_engine = RouterQueryEngine(\n",
+                "    selector=LLMSingleSelector.from_defaults(),\n",
+                "    query_engine_tools=[\n",
+                "        list_tool,\n",
+                "        vector_tool,\n",
+                "    ],\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 14,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting query engine 0: It provides a summary of the document..\n",
+                        "> [get_response] Total LLM token usage: 3411 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total LLM token usage: 3411 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "Response(response=\"\\nThis document is a reflection on the author's experiences with computers and writing, from his early days of programming on an IBM 1401 to his more recent work on a web application builder. He recounts his experiences with programming, painting, and starting companies, and how he eventually came to write essays about his life and the choices he made.\", source_nodes=[NodeWithScore(node=Node(text='\\t\\t\\n\\nWhat I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn\\'t much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\\n\\nI couldn\\'t have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\\n\\nAI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried', doc_id='a0e4f5a8-f852-4807-96f2-76721dc1e57d', embedding=None, doc_hash='694feb4edd2c0029159c7f9faf46df308177a12658e50be42d3a8cb434bef168', extra_info=None, node_info={'start': 0, 'end': 4040, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.NEXT: '3'>: '1992e045-1032-432c-ac01-42947c0727e9'}), score=None), NodeWithScore(node=Node(text='documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried rereading The Moon is a Harsh Mistress, so I don\\'t know how well it has aged, but when I read it I was drawn entirely into its world. It seemed only a matter of time before we\\'d have Mike, and when I saw Winograd using SHRDLU, it seemed like that time would be a few years at most. All you had to do was teach SHRDLU more words.\\n\\nThere weren\\'t any classes in AI at Cornell then, not even graduate classes, so I started trying to teach myself. Which meant learning Lisp, since in those days Lisp was regarded as the language of AI. The commonly used programming languages then were pretty primitive, and programmers\\' ideas correspondingly so. The default language at Cornell was a Pascal-like language called PL/I, and the situation was similar elsewhere. Learning Lisp expanded my concept of a program so fast that it was years before I started to have a sense of where the new limits were. This was more like it; this was what I had expected college to do. It wasn\\'t happening in a class, like it was supposed to, but that was ok. For the next couple years I was on a roll. I knew what I was going to do.\\n\\nFor my undergraduate thesis, I reverse-engineered SHRDLU. My God did I love working on that program. It was a pleasing bit of code, but what made it even more exciting was my belief — hard to imagine now, but not unique in 1985 — that it was already climbing the lower slopes of intelligence.\\n\\nI had gotten into a program at Cornell that didn\\'t make you choose a major. You could take whatever classes you liked, and choose whatever you liked to put on your degree. I of course chose \"Artificial Intelligence.\" When I got the actual physical diploma, I was dismayed to find that the quotes had been included, which made them read as scare-quotes. At the time this bothered me, but now it seems amusingly accurate, for reasons I was about to discover.\\n\\nI applied to 3 grad schools: MIT and Yale, which were renowned for AI at the time, and Harvard, which I\\'d visited because Rich Draves went there, and was also home to Bill Woods, who\\'d invented the type of parser I used in my SHRDLU clone. Only Harvard accepted me, so that was where I went.\\n\\nI don\\'t remember the moment it happened, or if there even was a specific moment, but during the first year of grad school I realized that AI, as practiced at the time, was a hoax. By which I mean the sort of AI in which a program that\\'s told \"the dog is sitting on the chair\" translates this into some formal representation and adds it to the list of things it knows.\\n\\nWhat these programs really showed was that there\\'s a subset of natural language that\\'s a formal language. But a very proper subset. It was clear that there was an unbridgeable gap between what they could do and actually understanding natural language. It was not, in fact, simply a matter of teaching SHRDLU more words. That whole way of doing AI, with explicit data structures representing concepts, was not going to work. Its brokenness did, as so often happens, generate a lot of opportunities to write papers about various band-aids that could be applied to it, but it was never going to get us Mike.\\n\\nSo I looked around to see what I could salvage from the wreckage of my plans, and there was Lisp. I knew from experience that Lisp was interesting for its own sake and not just for its association with AI, even though that was the main reason people cared about it at the time. So I decided to focus on Lisp. In fact, I decided to write a book about Lisp hacking. It\\'s scary to think how little I knew about Lisp hacking when I started writing that book. But there\\'s nothing like writing a book about something to help you learn it. The book, On Lisp, wasn\\'t published till 1993, but I wrote much of it in grad school.\\n\\nComputer Science is an uneasy alliance between two halves, theory and systems. The', doc_id='1992e045-1032-432c-ac01-42947c0727e9', embedding=None, doc_hash='a8b756a8ec7638539582ead732467c48ac3d7bdbfc15e068dd041d4c9582e497', extra_info=None, node_info={'start': 3982, 'end': 7880, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'a0e4f5a8-f852-4807-96f2-76721dc1e57d', <DocumentRelationship.NEXT: '3'>: 'e06dfa02-faa5-4617-9d58-00ae88433c58'}), score=None), NodeWithScore(node=Node(text='Science is an uneasy alliance between two halves, theory and systems. The theory people prove things, and the systems people build things. I wanted to build things. I had plenty of respect for theory — indeed, a sneaking suspicion that it was the more admirable of the two halves — but building things seemed so much more exciting.\\n\\nThe problem with systems work, though, was that it didn\\'t last. Any program you wrote today, no matter how good, would be obsolete in a couple decades at best. People might mention your software in footnotes, but no one would actually use it. And indeed, it would seem very feeble work. Only people with a sense of the history of the field would even realize that, in its time, it had been good.\\n\\nThere were some surplus Xerox Dandelions floating around the computer lab at one point. Anyone who wanted one to play around with could have one. I was briefly tempted, but they were so slow by present standards; what was the point? No one else wanted one either, so off they went. That was what happened to systems work.\\n\\nI wanted not just to build things, but to build things that would last.\\n\\nIn this dissatisfied state I went in 1988 to visit Rich Draves at CMU, where he was in grad school. One day I went to visit the Carnegie Institute, where I\\'d spent a lot of time as a kid. While looking at a painting there I realized something that might seem obvious, but was a big surprise to me. There, right on the wall, was something you could make that would last. Paintings didn\\'t become obsolete. Some of the best ones were hundreds of years old.\\n\\nAnd moreover this was something you could make a living doing. Not as easily as you could by writing software, of course, but I thought if you were really industrious and lived really cheaply, it had to be possible to make enough to survive. And as an artist you could be truly independent. You wouldn\\'t have a boss, or even need to get research funding.\\n\\nI had always liked looking at paintings. Could I make them? I had no idea. I\\'d never imagined it was even possible. I knew intellectually that people made art — that it didn\\'t just appear spontaneously — but it was as if the people who made it were a different species. They either lived long ago or were mysterious geniuses doing strange things in profiles in Life magazine. The idea of actually being able to make art, to put that verb before that noun, seemed almost miraculous.\\n\\nThat fall I started taking art classes at Harvard. Grad students could take classes in any department, and my advisor, Tom Cheatham, was very easy going. If he even knew about the strange classes I was taking, he never said anything.\\n\\nSo now I was in a PhD program in computer science, yet planning to be an artist, yet also genuinely in love with Lisp hacking and working away at On Lisp. In other words, like many a grad student, I was working energetically on multiple projects that were not my thesis.\\n\\nI didn\\'t see a way out of this situation. I didn\\'t want to drop out of grad school, but how else was I going to get out? I remember when my friend Robert Morris got kicked out of Cornell for writing the internet worm of 1988, I was envious that he\\'d found such a spectacular way to get out of grad school.\\n\\nThen one day in April 1990 a crack appeared in the wall. I ran into professor Cheatham and he asked if I was far enough along to graduate that June. I didn\\'t have a word of my dissertation written, but in what must have been the quickest bit of thinking in my life, I decided to take a shot at writing one in the 5 weeks or so that remained before the deadline, reusing parts of On Lisp where I could, and I was able to respond, with no perceptible delay \"Yes, I think so. I\\'ll give you something to read in a few days.\"\\n\\nI picked applications of continuations as the topic. In retrospect I should have written about macros and embedded languages. There\\'s a whole world there that\\'s barely been explored. But all I wanted was to get out of grad school, and my rapidly written', doc_id='e06dfa02-faa5-4617-9d58-00ae88433c58', embedding=None, doc_hash='eb0b51b99800feed137ff8ae00239c9f87ae534d686eaa5684651d0a0f641742', extra_info=None, node_info={'start': 7878, 'end': 11887, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '1992e045-1032-432c-ac01-42947c0727e9', <DocumentRelationship.NEXT: '3'>: '0fc849a1-2421-414e-9f83-f39e3ac47e41'}), score=None), NodeWithScore(node=Node(text=\"But all I wanted was to get out of grad school, and my rapidly written dissertation sufficed, just barely.\\n\\nMeanwhile I was applying to art schools. I applied to two: RISD in the US, and the Accademia di Belli Arti in Florence, which, because it was the oldest art school, I imagined would be good. RISD accepted me, and I never heard back from the Accademia, so off to Providence I went.\\n\\nI'd applied for the BFA program at RISD, which meant in effect that I had to go to college again. This was not as strange as it sounds, because I was only 25, and art schools are full of people of different ages. RISD counted me as a transfer sophomore and said I had to do the foundation that summer. The foundation means the classes that everyone has to take in fundamental subjects like drawing, color, and design.\\n\\nToward the end of the summer I got a big surprise: a letter from the Accademia, which had been delayed because they'd sent it to Cambridge England instead of Cambridge Massachusetts, inviting me to take the entrance exam in Florence that fall. This was now only weeks away. My nice landlady let me leave my stuff in her attic. I had some money saved from consulting work I'd done in grad school; there was probably enough to last a year if I lived cheaply. Now all I had to do was learn Italian.\\n\\nOnly stranieri (foreigners) had to take this entrance exam. In retrospect it may well have been a way of excluding them, because there were so many stranieri attracted by the idea of studying art in Florence that the Italian students would otherwise have been outnumbered. I was in decent shape at painting and drawing from the RISD foundation that summer, but I still don't know how I managed to pass the written exam. I remember that I answered the essay question by writing about Cezanne, and that I cranked up the intellectual level as high as I could to make the most of my limited vocabulary. [2]\\n\\nI'm only up to age 25 and already there are such conspicuous patterns. Here I was, yet again about to attend some august institution in the hopes of learning about some prestigious subject, and yet again about to be disappointed. The students and faculty in the painting department at the Accademia were the nicest people you could imagine, but they had long since arrived at an arrangement whereby the students wouldn't require the faculty to teach anything, and in return the faculty wouldn't require the students to learn anything. And at the same time all involved would adhere outwardly to the conventions of a 19th century atelier. We actually had one of those little stoves, fed with kindling, that you see in 19th century studio paintings, and a nude model sitting as close to it as possible without getting burned. Except hardly anyone else painted her besides me. The rest of the students spent their time chatting or occasionally trying to imitate things they'd seen in American art magazines.\\n\\nOur model turned out to live just down the street from me. She made a living from a combination of modelling and making fakes for a local antique dealer. She'd copy an obscure old painting out of a book, and then he'd take the copy and maltreat it to make it look old. [3]\\n\\nWhile I was a student at the Accademia I started painting still lives in my bedroom at night. These paintings were tiny, because the room was, and because I painted them on leftover scraps of canvas, which was all I could afford at the time. Painting still lives is different from painting people, because the subject, as its name suggests, can't move. People can't sit for more than about 15 minutes at a time, and when they do they don't sit very still. So the traditional m.o. for painting people is to know how to paint a generic person, which you then modify to match the specific person you're painting. Whereas a still life you can, if you want, copy pixel by pixel from what you're seeing. You don't want to stop there, of course, or you get merely photographic accuracy, and what makes a still life interesting is that\", doc_id='0fc849a1-2421-414e-9f83-f39e3ac47e41', embedding=None, doc_hash='55796dd9e91b31dd897144f980f8536700eb3febd0f6bc5e732db0b2b754cf42', extra_info=None, node_info={'start': 11894, 'end': 15911, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'e06dfa02-faa5-4617-9d58-00ae88433c58', <DocumentRelationship.NEXT: '3'>: '8e0bca16-b087-489b-983a-5beaaf393f64'}), score=None), NodeWithScore(node=Node(text='or you get merely photographic accuracy, and what makes a still life interesting is that it\\'s been through a head. You want to emphasize the visual cues that tell you, for example, that the reason the color changes suddenly at a certain point is that it\\'s the edge of an object. By subtly emphasizing such things you can make paintings that are more realistic than photographs not just in some metaphorical sense, but in the strict information-theoretic sense. [4]\\n\\nI liked painting still lives because I was curious about what I was seeing. In everyday life, we aren\\'t consciously aware of much we\\'re seeing. Most visual perception is handled by low-level processes that merely tell your brain \"that\\'s a water droplet\" without telling you details like where the lightest and darkest points are, or \"that\\'s a bush\" without telling you the shape and position of every leaf. This is a feature of brains, not a bug. In everyday life it would be distracting to notice every leaf on every bush. But when you have to paint something, you have to look more closely, and when you do there\\'s a lot to see. You can still be noticing new things after days of trying to paint something people usually take for granted, just as you can after days of trying to write an essay about something people usually take for granted.\\n\\nThis is not the only way to paint. I\\'m not 100% sure it\\'s even a good way to paint. But it seemed a good enough bet to be worth trying.\\n\\nOur teacher, professor Ulivi, was a nice guy. He could see I worked hard, and gave me a good grade, which he wrote down in a sort of passport each student had. But the Accademia wasn\\'t teaching me anything except Italian, and my money was running out, so at the end of the first year I went back to the US.\\n\\nI wanted to go back to RISD, but I was now broke and RISD was very expensive, so I decided to get a job for a year and then return to RISD the next fall. I got one at a company called Interleaf, which made software for creating documents. You mean like Microsoft Word? Exactly. That was how I learned that low end software tends to eat high end software. But Interleaf still had a few years to live yet. [5]\\n\\nInterleaf had done something pretty bold. Inspired by Emacs, they\\'d added a scripting language, and even made the scripting language a dialect of Lisp. Now they wanted a Lisp hacker to write things in it. This was the closest thing I\\'ve had to a normal job, and I hereby apologize to my boss and coworkers, because I was a bad employee. Their Lisp was the thinnest icing on a giant C cake, and since I didn\\'t know C and didn\\'t want to learn it, I never understood most of the software. Plus I was terribly irresponsible. This was back when a programming job meant showing up every day during certain working hours. That seemed unnatural to me, and on this point the rest of the world is coming around to my way of thinking, but at the time it caused a lot of friction. Toward the end of the year I spent much of my time surreptitiously working on On Lisp, which I had by this time gotten a contract to publish.\\n\\nThe good part was that I got paid huge amounts of money, especially by art student standards. In Florence, after paying my part of the rent, my budget for everything else had been $7 a day. Now I was getting paid more than 4 times that every hour, even when I was just sitting in a meeting. By living cheaply I not only managed to save enough to go back to RISD, but also paid off my college loans.\\n\\nI learned some useful things at Interleaf, though they were mostly about what not to do. I learned that it\\'s better for technology companies to be run by product people than sales people (though sales is a real skill and people who are good at it are really good at it), that it leads to bugs when code is edited by too many people, that cheap office space is no bargain if it\\'s depressing, that planned meetings are inferior to corridor conversations, that big, bureaucratic customers are a dangerous source of money, and that there\\'s not much overlap between conventional', doc_id='8e0bca16-b087-489b-983a-5beaaf393f64', embedding=None, doc_hash='381c03ecd6edff8260cd955e3762b2c94431d15ae62b58591ab3f9fc9342ed58', extra_info=None, node_info={'start': 15894, 'end': 19945, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '0fc849a1-2421-414e-9f83-f39e3ac47e41', <DocumentRelationship.NEXT: '3'>: '855d5459-1cfe-465e-8c94-f9a1d047f821'}), score=None), NodeWithScore(node=Node(text='are a dangerous source of money, and that there\\'s not much overlap between conventional office hours and the optimal time for hacking, or conventional offices and the optimal place for it.\\n\\nBut the most important thing I learned, and which I used in both Viaweb and Y Combinator, is that the low end eats the high end: that it\\'s good to be the \"entry level\" option, even though that will be less prestigious, because if you\\'re not, someone else will be, and will squash you against the ceiling. Which in turn means that prestige is a danger sign.\\n\\nWhen I left to go back to RISD the next fall, I arranged to do freelance work for the group that did projects for customers, and this was how I survived for the next several years. When I came back to visit for a project later on, someone told me about a new thing called HTML, which was, as he described it, a derivative of SGML. Markup language enthusiasts were an occupational hazard at Interleaf and I ignored him, but this HTML thing later became a big part of my life.\\n\\nIn the fall of 1992 I moved back to Providence to continue at RISD. The foundation had merely been intro stuff, and the Accademia had been a (very civilized) joke. Now I was going to see what real art school was like. But alas it was more like the Accademia than not. Better organized, certainly, and a lot more expensive, but it was now becoming clear that art school did not bear the same relationship to art that medical school bore to medicine. At least not the painting department. The textile department, which my next door neighbor belonged to, seemed to be pretty rigorous. No doubt illustration and architecture were too. But painting was post-rigorous. Painting students were supposed to express themselves, which to the more worldly ones meant to try to cook up some sort of distinctive signature style.\\n\\nA signature style is the visual equivalent of what in show business is known as a \"schtick\": something that immediately identifies the work as yours and no one else\\'s. For example, when you see a painting that looks like a certain kind of cartoon, you know it\\'s by Roy Lichtenstein. So if you see a big painting of this type hanging in the apartment of a hedge fund manager, you know he paid millions of dollars for it. That\\'s not always why artists have a signature style, but it\\'s usually why buyers pay a lot for such work. [6]\\n\\nThere were plenty of earnest students too: kids who \"could draw\" in high school, and now had come to what was supposed to be the best art school in the country, to learn to draw even better. They tended to be confused and demoralized by what they found at RISD, but they kept going, because painting was what they did. I was not one of the kids who could draw in high school, but at RISD I was definitely closer to their tribe than the tribe of signature style seekers.\\n\\nI learned a lot in the color class I took at RISD, but otherwise I was basically teaching myself to paint, and I could do that for free. So in 1993 I dropped out. I hung around Providence for a bit, and then my college friend Nancy Parmet did me a big favor. A rent-controlled apartment in a building her mother owned in New York was becoming vacant. Did I want it? It wasn\\'t much more than my current place, and New York was supposed to be where the artists were. So yes, I wanted it! [7]\\n\\nAsterix comics begin by zooming in on a tiny corner of Roman Gaul that turns out not to be controlled by the Romans. You can do something similar on a map of New York City: if you zoom in on the Upper East Side, there\\'s a tiny corner that\\'s not rich, or at least wasn\\'t in 1993. It\\'s called Yorkville, and that was my new home. Now I was a New York artist — in the strictly technical sense of making paintings and living in New York.\\n\\nI was nervous about money, because I could sense that Interleaf was on the way down. Freelance Lisp hacking work was very rare, and I', doc_id='855d5459-1cfe-465e-8c94-f9a1d047f821', embedding=None, doc_hash='3a298b8e6f42c9af6356b611a9cbe37172455b8047ba2009f49b910488e96f8d', extra_info=None, node_info={'start': 19946, 'end': 23848, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '8e0bca16-b087-489b-983a-5beaaf393f64', <DocumentRelationship.NEXT: '3'>: '3b199ba9-d04b-473a-8c73-39c293638957'}), score=None), NodeWithScore(node=Node(text='on the way down. Freelance Lisp hacking work was very rare, and I didn\\'t want to have to program in another language, which in those days would have meant C++ if I was lucky. So with my unerring nose for financial opportunity, I decided to write another book on Lisp. This would be a popular book, the sort of book that could be used as a textbook. I imagined myself living frugally off the royalties and spending all my time painting. (The painting on the cover of this book, ANSI Common Lisp, is one that I painted around this time.)\\n\\nThe best thing about New York for me was the presence of Idelle and Julian Weber. Idelle Weber was a painter, one of the early photorealists, and I\\'d taken her painting class at Harvard. I\\'ve never known a teacher more beloved by her students. Large numbers of former students kept in touch with her, including me. After I moved to New York I became her de facto studio assistant.\\n\\nShe liked to paint on big, square canvases, 4 to 5 feet on a side. One day in late 1994 as I was stretching one of these monsters there was something on the radio about a famous fund manager. He wasn\\'t that much older than me, and was super rich. The thought suddenly occurred to me: why don\\'t I become rich? Then I\\'ll be able to work on whatever I want.\\n\\nMeanwhile I\\'d been hearing more and more about this new thing called the World Wide Web. Robert Morris showed it to me when I visited him in Cambridge, where he was now in grad school at Harvard. It seemed to me that the web would be a big deal. I\\'d seen what graphical user interfaces had done for the popularity of microcomputers. It seemed like the web would do the same for the internet.\\n\\nIf I wanted to get rich, here was the next train leaving the station. I was right about that part. What I got wrong was the idea. I decided we should start a company to put art galleries online. I can\\'t honestly say, after reading so many Y Combinator applications, that this was the worst startup idea ever, but it was up there. Art galleries didn\\'t want to be online, and still don\\'t, not the fancy ones. That\\'s not how they sell. I wrote some software to generate web sites for galleries, and Robert wrote some to resize images and set up an http server to serve the pages. Then we tried to sign up galleries. To call this a difficult sale would be an understatement. It was difficult to give away. A few galleries let us make sites for them for free, but none paid us.\\n\\nThen some online stores started to appear, and I realized that except for the order buttons they were identical to the sites we\\'d been generating for galleries. This impressive-sounding thing called an \"internet storefront\" was something we already knew how to build.\\n\\nSo in the summer of 1995, after I submitted the camera-ready copy of ANSI Common Lisp to the publishers, we started trying to write software to build online stores. At first this was going to be normal desktop software, which in those days meant Windows software. That was an alarming prospect, because neither of us knew how to write Windows software or wanted to learn. We lived in the Unix world. But we decided we\\'d at least try writing a prototype store builder on Unix. Robert wrote a shopping cart, and I wrote a new site generator for stores — in Lisp, of course.\\n\\nWe were working out of Robert\\'s apartment in Cambridge. His roommate was away for big chunks of time, during which I got to sleep in his room. For some reason there was no bed frame or sheets, just a mattress on the floor. One morning as I was lying on this mattress I had an idea that made me sit up like a capital L. What if we ran the software on the server, and let users control it by clicking on links? Then we\\'d never have to write anything to run on users\\' computers. We could generate the sites on the same server we\\'d serve them from. Users wouldn\\'t need anything more than a browser.\\n\\nThis kind of software, known as a web app, is common now, but at the time it wasn\\'t clear that it was even possible. To find out, we decided to try', doc_id='3b199ba9-d04b-473a-8c73-39c293638957', embedding=None, doc_hash='193c210e6a2567bac7400ec1d44e0bcc00777378664c0e3d08495baf7db4d9ac', extra_info=None, node_info={'start': 23870, 'end': 27897, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '855d5459-1cfe-465e-8c94-f9a1d047f821', <DocumentRelationship.NEXT: '3'>: '669c0218-b2c1-428b-808c-f5408e52dcdf'}), score=None), NodeWithScore(node=Node(text='wasn\\'t clear that it was even possible. To find out, we decided to try making a version of our store builder that you could control through the browser. A couple days later, on August 12, we had one that worked. The UI was horrible, but it proved you could build a whole store through the browser, without any client software or typing anything into the command line on the server.\\n\\nNow we felt like we were really onto something. I had visions of a whole new generation of software working this way. You wouldn\\'t need versions, or ports, or any of that crap. At Interleaf there had been a whole group called Release Engineering that seemed to be at least as big as the group that actually wrote the software. Now you could just update the software right on the server.\\n\\nWe started a new company we called Viaweb, after the fact that our software worked via the web, and we got $10,000 in seed funding from Idelle\\'s husband Julian. In return for that and doing the initial legal work and giving us business advice, we gave him 10% of the company. Ten years later this deal became the model for Y Combinator\\'s. We knew founders needed something like this, because we\\'d needed it ourselves.\\n\\nAt this stage I had a negative net worth, because the thousand dollars or so I had in the bank was more than counterbalanced by what I owed the government in taxes. (Had I diligently set aside the proper proportion of the money I\\'d made consulting for Interleaf? No, I had not.) So although Robert had his graduate student stipend, I needed that seed funding to live on.\\n\\nWe originally hoped to launch in September, but we got more ambitious about the software as we worked on it. Eventually we managed to build a WYSIWYG site builder, in the sense that as you were creating pages, they looked exactly like the static ones that would be generated later, except that instead of leading to static pages, the links all referred to closures stored in a hash table on the server.\\n\\nIt helped to have studied art, because the main goal of an online store builder is to make users look legit, and the key to looking legit is high production values. If you get page layouts and fonts and colors right, you can make a guy running a store out of his bedroom look more legit than a big company.\\n\\n(If you\\'re curious why my site looks so old-fashioned, it\\'s because it\\'s still made with this software. It may look clunky today, but in 1996 it was the last word in slick.)\\n\\nIn September, Robert rebelled. \"We\\'ve been working on this for a month,\" he said, \"and it\\'s still not done.\" This is funny in retrospect, because he would still be working on it almost 3 years later. But I decided it might be prudent to recruit more programmers, and I asked Robert who else in grad school with him was really good. He recommended Trevor Blackwell, which surprised me at first, because at that point I knew Trevor mainly for his plan to reduce everything in his life to a stack of notecards, which he carried around with him. But Rtm was right, as usual. Trevor turned out to be a frighteningly effective hacker.\\n\\nIt was a lot of fun working with Robert and Trevor. They\\'re the two most independent-minded people I know, and in completely different ways. If you could see inside Rtm\\'s brain it would look like a colonial New England church, and if you could see inside Trevor\\'s it would look like the worst excesses of Austrian Rococo.\\n\\nWe opened for business, with 6 stores, in January 1996. It was just as well we waited a few months, because although we worried we were late, we were actually almost fatally early. There was a lot of talk in the press then about ecommerce, but not many people actually wanted online stores. [8]\\n\\nThere were three main parts to the software: the editor, which people used to build sites and which I wrote, the shopping cart, which Robert wrote, and the manager, which kept track of orders and statistics, and which Trevor wrote. In its time, the editor was one of the best general-purpose site builders. I kept the code tight and didn\\'t have to integrate with any other software except Robert\\'s and Trevor\\'s, so it was quite fun to work on. If all I\\'d had to do was', doc_id='669c0218-b2c1-428b-808c-f5408e52dcdf', embedding=None, doc_hash='fd9eb9ceabf16d661afa8f19d64b256664bbc61c886292817fd92dc2456b8eaa', extra_info=None, node_info={'start': 27894, 'end': 32060, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '3b199ba9-d04b-473a-8c73-39c293638957', <DocumentRelationship.NEXT: '3'>: 'eb14dc48-f3a1-461d-9a49-0d78331dd083'}), score=None), NodeWithScore(node=Node(text='so it was quite fun to work on. If all I\\'d had to do was work on this software, the next 3 years would have been the easiest of my life. Unfortunately I had to do a lot more, all of it stuff I was worse at than programming, and the next 3 years were instead the most stressful.\\n\\nThere were a lot of startups making ecommerce software in the second half of the 90s. We were determined to be the Microsoft Word, not the Interleaf. Which meant being easy to use and inexpensive. It was lucky for us that we were poor, because that caused us to make Viaweb even more inexpensive than we realized. We charged $100 a month for a small store and $300 a month for a big one. This low price was a big attraction, and a constant thorn in the sides of competitors, but it wasn\\'t because of some clever insight that we set the price low. We had no idea what businesses paid for things. $300 a month seemed like a lot of money to us.\\n\\nWe did a lot of things right by accident like that. For example, we did what\\'s now called \"doing things that don\\'t scale,\" although at the time we would have described it as \"being so lame that we\\'re driven to the most desperate measures to get users.\" The most common of which was building stores for them. This seemed particularly humiliating, since the whole raison d\\'etre of our software was that people could use it to make their own stores. But anything to get users.\\n\\nWe learned a lot more about retail than we wanted to know. For example, that if you could only have a small image of a man\\'s shirt (and all images were small then by present standards), it was better to have a closeup of the collar than a picture of the whole shirt. The reason I remember learning this was that it meant I had to rescan about 30 images of men\\'s shirts. My first set of scans were so beautiful too.\\n\\nThough this felt wrong, it was exactly the right thing to be doing. Building stores for users taught us about retail, and about how it felt to use our software. I was initially both mystified and repelled by \"business\" and thought we needed a \"business person\" to be in charge of it, but once we started to get users, I was converted, in much the same way I was converted to fatherhood once I had kids. Whatever users wanted, I was all theirs. Maybe one day we\\'d have so many users that I couldn\\'t scan their images for them, but in the meantime there was nothing more important to do.\\n\\nAnother thing I didn\\'t get at the time is that growth rate is the ultimate test of a startup. Our growth rate was fine. We had about 70 stores at the end of 1996 and about 500 at the end of 1997. I mistakenly thought the thing that mattered was the absolute number of users. And that is the thing that matters in the sense that that\\'s how much money you\\'re making, and if you\\'re not making enough, you might go out of business. But in the long term the growth rate takes care of the absolute number. If we\\'d been a startup I was advising at Y Combinator, I would have said: Stop being so stressed out, because you\\'re doing fine. You\\'re growing 7x a year. Just don\\'t hire too many more people and you\\'ll soon be profitable, and then you\\'ll control your own destiny.\\n\\nAlas I hired lots more people, partly because our investors wanted me to, and partly because that\\'s what startups did during the Internet Bubble. A company with just a handful of employees would have seemed amateurish. So we didn\\'t reach breakeven until about when Yahoo bought us in the summer of 1998. Which in turn meant we were at the mercy of investors for the entire life of the company. And since both we and our investors were noobs at startups, the result was a mess even by startup standards.\\n\\nIt was a huge relief when Yahoo bought us. In principle our Viaweb stock was valuable. It was a share in a business that was profitable and growing rapidly. But it didn\\'t feel very valuable to me; I had no idea how to value a business, but I was all too keenly aware of the near-death experiences we seemed to have every few months. Nor had I changed my grad student lifestyle significantly since we started. So when', doc_id='eb14dc48-f3a1-461d-9a49-0d78331dd083', embedding=None, doc_hash='9185047072ffc2d0a80db719ed6fa93a69773224a62bbe5c7a8f59eaed4e80e1', extra_info=None, node_info={'start': 32076, 'end': 36162, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '669c0218-b2c1-428b-808c-f5408e52dcdf', <DocumentRelationship.NEXT: '3'>: '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb'}), score=None), NodeWithScore(node=Node(text=\"few months. Nor had I changed my grad student lifestyle significantly since we started. So when Yahoo bought us it felt like going from rags to riches. Since we were going to California, I bought a car, a yellow 1998 VW GTI. I remember thinking that its leather seats alone were by far the most luxurious thing I owned.\\n\\nThe next year, from the summer of 1998 to the summer of 1999, must have been the least productive of my life. I didn't realize it at the time, but I was worn out from the effort and stress of running Viaweb. For a while after I got to California I tried to continue my usual m.o. of programming till 3 in the morning, but fatigue combined with Yahoo's prematurely aged culture and grim cube farm in Santa Clara gradually dragged me down. After a few months it felt disconcertingly like working at Interleaf.\\n\\nYahoo had given us a lot of options when they bought us. At the time I thought Yahoo was so overvalued that they'd never be worth anything, but to my astonishment the stock went up 5x in the next year. I hung on till the first chunk of options vested, then in the summer of 1999 I left. It had been so long since I'd painted anything that I'd half forgotten why I was doing this. My brain had been entirely full of software and men's shirts for 4 years. But I had done this to get rich so I could paint, I reminded myself, and now I was rich, so I should go paint.\\n\\nWhen I said I was leaving, my boss at Yahoo had a long conversation with me about my plans. I told him all about the kinds of pictures I wanted to paint. At the time I was touched that he took such an interest in me. Now I realize it was because he thought I was lying. My options at that point were worth about $2 million a month. If I was leaving that kind of money on the table, it could only be to go and start some new startup, and if I did, I might take people with me. This was the height of the Internet Bubble, and Yahoo was ground zero of it. My boss was at that moment a billionaire. Leaving then to start a new startup must have seemed to him an insanely, and yet also plausibly, ambitious plan.\\n\\nBut I really was quitting to paint, and I started immediately. There was no time to lose. I'd already burned 4 years getting rich. Now when I talk to founders who are leaving after selling their companies, my advice is always the same: take a vacation. That's what I should have done, just gone off somewhere and done nothing for a month or two, but the idea never occurred to me.\\n\\nSo I tried to paint, but I just didn't seem to have any energy or ambition. Part of the problem was that I didn't know many people in California. I'd compounded this problem by buying a house up in the Santa Cruz Mountains, with a beautiful view but miles from anywhere. I stuck it out for a few more months, then in desperation I went back to New York, where unless you understand about rent control you'll be surprised to hear I still had my apartment, sealed up like a tomb of my old life. Idelle was in New York at least, and there were other people trying to paint there, even though I didn't know any of them.\\n\\nWhen I got back to New York I resumed my old life, except now I was rich. It was as weird as it sounds. I resumed all my old patterns, except now there were doors where there hadn't been. Now when I was tired of walking, all I had to do was raise my hand, and (unless it was raining) a taxi would stop to pick me up. Now when I walked past charming little restaurants I could go in and order lunch. It was exciting for a while. Painting started to go better. I experimented with a new kind of still life where I'd paint one painting in the old way, then photograph it and print it, blown up, on canvas, and then use that as the underpainting for a second still life, painted from the same objects (which hopefully hadn't rotted yet).\\n\\nMeanwhile I looked for an apartment to buy. Now I could actually choose what\", doc_id='99b3c4c2-aeab-4f5a-9474-916f6c35c9fb', embedding=None, doc_hash='363de309e331985d57cbd2a87662a4b6036a44807432524f2dbfd50a7e6ba7e5', extra_info=None, node_info={'start': 36125, 'end': 40040, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'eb14dc48-f3a1-461d-9a49-0d78331dd083', <DocumentRelationship.NEXT: '3'>: '8a9e2472-230f-437f-b720-1494878d5933'}), score=None), NodeWithScore(node=Node(text='I looked for an apartment to buy. Now I could actually choose what neighborhood to live in. Where, I asked myself and various real estate agents, is the Cambridge of New York? Aided by occasional visits to actual Cambridge, I gradually realized there wasn\\'t one. Huh.\\n\\nAround this time, in the spring of 2000, I had an idea. It was clear from our experience with Viaweb that web apps were the future. Why not build a web app for making web apps? Why not let people edit code on our server through the browser, and then host the resulting applications for them? [9] You could run all sorts of services on the servers that these applications could use just by making an API call: making and receiving phone calls, manipulating images, taking credit card payments, etc.\\n\\nI got so excited about this idea that I couldn\\'t think about anything else. It seemed obvious that this was the future. I didn\\'t particularly want to start another company, but it was clear that this idea would have to be embodied as one, so I decided to move to Cambridge and start it. I hoped to lure Robert into working on it with me, but there I ran into a hitch. Robert was now a postdoc at MIT, and though he\\'d made a lot of money the last time I\\'d lured him into working on one of my schemes, it had also been a huge time sink. So while he agreed that it sounded like a plausible idea, he firmly refused to work on it.\\n\\nHmph. Well, I\\'d do it myself then. I recruited Dan Giffin, who had worked for Viaweb, and two undergrads who wanted summer jobs, and we got to work trying to build what it\\'s now clear is about twenty companies and several open source projects worth of software. The language for defining applications would of course be a dialect of Lisp. But I wasn\\'t so naive as to assume I could spring an overt Lisp on a general audience; we\\'d hide the parentheses, like Dylan did.\\n\\nBy then there was a name for the kind of company Viaweb was, an \"application service provider,\" or ASP. This name didn\\'t last long before it was replaced by \"software as a service,\" but it was current for long enough that I named this new company after it: it was going to be called Aspra.\\n\\nI started working on the application builder, Dan worked on network infrastructure, and the two undergrads worked on the first two services (images and phone calls). But about halfway through the summer I realized I really didn\\'t want to run a company — especially not a big one, which it was looking like this would have to be. I\\'d only started Viaweb because I needed the money. Now that I didn\\'t need money anymore, why was I doing this? If this vision had to be realized as a company, then screw the vision. I\\'d build a subset that could be done as an open source project.\\n\\nMuch to my surprise, the time I spent working on this stuff was not wasted after all. After we started Y Combinator, I would often encounter startups working on parts of this new architecture, and it was very useful to have spent so much time thinking about it and even trying to write some of it.\\n\\nThe subset I would build as an open source project was the new Lisp, whose parentheses I now wouldn\\'t even have to hide. A lot of Lisp hackers dream of building a new Lisp, partly because one of the distinctive features of the language is that it has dialects, and partly, I think, because we have in our minds a Platonic form of Lisp that all existing dialects fall short of. I certainly did. So at the end of the summer Dan and I switched to working on this new dialect of Lisp, which I called Arc, in a house I bought in Cambridge.\\n\\nThe following spring, lightning struck. I was invited to give a talk at a Lisp conference, so I gave one about how we\\'d used Lisp at Viaweb. Afterward I put a postscript file of this talk online, on paulgraham.com, which I\\'d created years before using Viaweb but had never used for anything. In one day it got 30,000 page views. What on earth had happened? The referring urls showed', doc_id='8a9e2472-230f-437f-b720-1494878d5933', embedding=None, doc_hash='1a72edc63a9582c55f626fed3dbb1fadbfb1e7e7111e93b6c789c79c66db1e11', extra_info=None, node_info={'start': 40067, 'end': 44018, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb', <DocumentRelationship.NEXT: '3'>: '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060'}), score=None), NodeWithScore(node=Node(text=\"page views. What on earth had happened? The referring urls showed that someone had posted it on Slashdot. [10]\\n\\nWow, I thought, there's an audience. If I write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\\n\\nThis had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\\n\\nIn the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\\n\\nI've worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I'd always write essays too.\\n\\nI knew that online essays would be a marginal medium at first. Socially they'd seem more like rants posted by nutjobs on their GeoCities sites than the genteel and beautifully typeset compositions published in The New Yorker. But by this point I knew enough to find that encouraging instead of discouraging.\\n\\nOne of the most conspicuous patterns I've noticed in my life is how well it has worked, for me at least, to work on things that weren't prestigious. Still life has always been the least prestigious form of painting. Viaweb and Y Combinator both seemed lame when we started them. I still get the glassy eye from strangers when they ask what I'm writing, and I explain that it's an essay I'm going to publish on my web site. Even Lisp, though prestigious intellectually in something like the way Latin is, also seems about as hip.\\n\\nIt's not that unprestigious types of work are good per se. But when you find yourself drawn to some kind of work despite its current lack of prestige, it's a sign both that there's something real to be discovered there, and that you have the right kind of motives. Impure motives are a big danger for the ambitious. If anything is going to lead you astray, it will be the desire to impress people. So while working on things that aren't prestigious doesn't guarantee you're on the right track, it at least guarantees you're not on the most common type of wrong one.\\n\\nOver the next several years I wrote lots of essays about all kinds of different topics. O'Reilly reprinted a collection of them as a book, called Hackers & Painters after one of the essays in it. I also worked on spam filters, and did some more painting. I used to have dinners for a group of friends every thursday night, which taught me how to cook for groups. And I bought another building in Cambridge, a former candy factory (and later, twas said, porn studio), to use as an office.\\n\\nOne night in October 2003 there was a big party at my house. It was a clever idea of my friend Maria Daniels, who was one of the thursday diners. Three separate hosts would all invite their friends to one party. So for every guest, two thirds of the other guests would be people they didn't know but would probably like. One of the guests was someone I didn't know but would turn out to like a lot: a woman called Jessica Livingston. A couple days later I asked her out.\\n\\nJessica was in charge of marketing at a Boston investment\", doc_id='2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060', embedding=None, doc_hash='597ba7da683f715e046031bf0824aa5d9ff24b64aa2c0902a3ad6ee1e48ea312', extra_info=None, node_info={'start': 44018, 'end': 48016, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '8a9e2472-230f-437f-b720-1494878d5933', <DocumentRelationship.NEXT: '3'>: '42f8cdbc-3613-409a-85b6-bb1e22c85fef'}), score=None), NodeWithScore(node=Node(text='I asked her out.\\n\\nJessica was in charge of marketing at a Boston investment bank. This bank thought it understood startups, but over the next year, as she met friends of mine from the startup world, she was surprised how different reality was. And how colorful their stories were. So she decided to compile a book of interviews with startup founders.\\n\\nWhen the bank had financial problems and she had to fire half her staff, she started looking for a new job. In early 2005 she interviewed for a marketing job at a Boston VC firm. It took them weeks to make up their minds, and during this time I started telling her about all the things that needed to be fixed about venture capital. They should make a larger number of smaller investments instead of a handful of giant ones, they should be funding younger, more technical founders instead of MBAs, they should let the founders remain as CEO, and so on.\\n\\nOne of my tricks for writing essays had always been to give talks. The prospect of having to stand up in front of a group of people and tell them something that won\\'t waste their time is a great spur to the imagination. When the Harvard Computer Society, the undergrad computer club, asked me to give a talk, I decided I would tell them how to start a startup. Maybe they\\'d be able to avoid the worst of the mistakes we\\'d made.\\n\\nSo I gave this talk, in the course of which I told them that the best sources of seed funding were successful startup founders, because then they\\'d be sources of advice too. Whereupon it seemed they were all looking expectantly at me. Horrified at the prospect of having my inbox flooded by business plans (if I\\'d only known), I blurted out \"But not me!\" and went on with the talk. But afterward it occurred to me that I should really stop procrastinating about angel investing. I\\'d been meaning to since Yahoo bought us, and now it was 7 years later and I still hadn\\'t done one angel investment.\\n\\nMeanwhile I had been scheming with Robert and Trevor about projects we could work on together. I missed working with them, and it seemed like there had to be something we could collaborate on.\\n\\nAs Jessica and I were walking home from dinner on March 11, at the corner of Garden and Walker streets, these three threads converged. Screw the VCs who were taking so long to make up their minds. We\\'d start our own investment firm and actually implement the ideas we\\'d been talking about. I\\'d fund it, and Jessica could quit her job and work for it, and we\\'d get Robert and Trevor as partners too. [13]\\n\\nOnce again, ignorance worked in our favor. We had no idea how to be angel investors, and in Boston in 2005 there were no Ron Conways to learn from. So we just made what seemed like the obvious choices, and some of the things we did turned out to be novel.\\n\\nThere are multiple components to Y Combinator, and we didn\\'t figure them all out at once. The part we got first was to be an angel firm. In those days, those two words didn\\'t go together. There were VC firms, which were organized companies with people whose job it was to make investments, but they only did big, million dollar investments. And there were angels, who did smaller investments, but these were individuals who were usually focused on other things and made investments on the side. And neither of them helped founders enough in the beginning. We knew how helpless founders were in some respects, because we remembered how helpless we\\'d been. For example, one thing Julian had done for us that seemed to us like magic was to get us set up as a company. We were fine writing fairly difficult software, but actually getting incorporated, with bylaws and stock and all that stuff, how on earth did you do that? Our plan was not only to make seed investments, but to do for startups everything Julian had done for us.\\n\\nYC was not organized as a fund. It was cheap enough to run that we funded it with our own money. That went right by 99% of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" But once again, this was not due to any particular insight on our part. We didn\\'t know how VC firms', doc_id='42f8cdbc-3613-409a-85b6-bb1e22c85fef', embedding=None, doc_hash='345510864fca229d2e5a22bf719f4ee408018c13c348307d3d72bc151fe61050', extra_info=None, node_info={'start': 48009, 'end': 52140, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060', <DocumentRelationship.NEXT: '3'>: 'f9e74e9d-cdd9-43c4-8742-76c38200305f'}), score=None), NodeWithScore(node=Node(text='not due to any particular insight on our part. We didn\\'t know how VC firms were organized. It never occurred to us to try to raise a fund, and if it had, we wouldn\\'t have known where to start. [14]\\n\\nThe most distinctive thing about YC is the batch model: to fund a bunch of startups all at once, twice a year, and then to spend three months focusing intensively on trying to help them. That part we discovered by accident, not merely implicitly but explicitly due to our ignorance about investing. We needed to get experience as investors. What better way, we thought, than to fund a whole bunch of startups at once? We knew undergrads got temporary jobs at tech companies during the summer. Why not organize a summer program where they\\'d start startups instead? We wouldn\\'t feel guilty for being in a sense fake investors, because they would in a similar sense be fake founders. So while we probably wouldn\\'t make much money out of it, we\\'d at least get to practice being investors on them, and they for their part would probably have a more interesting summer than they would working at Microsoft.\\n\\nWe\\'d use the building I owned in Cambridge as our headquarters. We\\'d all have dinner there once a week — on tuesdays, since I was already cooking for the thursday diners on thursdays — and after dinner we\\'d bring in experts on startups to give talks.\\n\\nWe knew undergrads were deciding then about summer jobs, so in a matter of days we cooked up something we called the Summer Founders Program, and I posted an announcement on my site, inviting undergrads to apply. I had never imagined that writing essays would be a way to get \"deal flow,\" as investors call it, but it turned out to be the perfect source. [15] We got 225 applications for the Summer Founders Program, and we were surprised to find that a lot of them were from people who\\'d already graduated, or were about to that spring. Already this SFP thing was starting to feel more serious than we\\'d intended.\\n\\nWe invited about 20 of the 225 groups to interview in person, and from those we picked 8 to fund. They were an impressive group. That first batch included reddit, Justin Kan and Emmett Shear, who went on to found Twitch, Aaron Swartz, who had already helped write the RSS spec and would a few years later become a martyr for open access, and Sam Altman, who would later become the second president of YC. I don\\'t think it was entirely luck that the first batch was so good. You had to be pretty bold to sign up for a weird thing like the Summer Founders Program instead of a summer job at a legit place like Microsoft or Goldman Sachs.\\n\\nThe deal for startups was based on a combination of the deal we did with Julian ($10k for 10%) and what Robert said MIT grad students got for the summer ($6k). We invested $6k per founder, which in the typical two-founder case was $12k, in return for 6%. That had to be fair, because it was twice as good as the deal we ourselves had taken. Plus that first summer, which was really hot, Jessica brought the founders free air conditioners. [16]\\n\\nFairly quickly I realized that we had stumbled upon the way to scale startup funding. Funding startups in batches was more convenient for us, because it meant we could do things for a lot of startups at once, but being part of a batch was better for the startups too. It solved one of the biggest problems faced by founders: the isolation. Now you not only had colleagues, but colleagues who understood the problems you were facing and could tell you how they were solving them.\\n\\nAs YC grew, we started to notice other advantages of scale. The alumni became a tight community, dedicated to helping one another, and especially the current batch, whose shoes they remembered being in. We also noticed that the startups were becoming one another\\'s customers. We used to refer jokingly to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get their initial set of customers almost entirely from among their batchmates.\\n\\nI had not originally', doc_id='f9e74e9d-cdd9-43c4-8742-76c38200305f', embedding=None, doc_hash='03e6a7d2e66c082507e29d8cfe9ec36fa77fe0b1cf352ad5ce85ad91f71a3622', extra_info=None, node_info={'start': 52144, 'end': 56170, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '42f8cdbc-3613-409a-85b6-bb1e22c85fef', <DocumentRelationship.NEXT: '3'>: 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd'}), score=None), NodeWithScore(node=Node(text='of customers almost entirely from among their batchmates.\\n\\nI had not originally intended YC to be a full-time job. I was going to do three things: hack, write essays, and work on YC. As YC grew, and I grew more excited about it, it started to take up a lot more than a third of my attention. But for the first few years I was still able to work on other things.\\n\\nIn the summer of 2006, Robert and I started working on a new version of Arc. This one was reasonably fast, because it was compiled into Scheme. To test this new Arc, I wrote Hacker News in it. It was originally meant to be a news aggregator for startup founders and was called Startup News, but after a few months I got tired of reading about nothing but startups. Plus it wasn\\'t startup founders we wanted to reach. It was future startup founders. So I changed the name to Hacker News and the topic to whatever engaged one\\'s intellectual curiosity.\\n\\nHN was no doubt good for YC, but it was also by far the biggest source of stress for me. If all I\\'d had to do was select and help founders, life would have been so easy. And that implies that HN was a mistake. Surely the biggest source of stress in one\\'s work should at least be something close to the core of the work. Whereas I was like someone who was in pain while running a marathon not from the exertion of running, but because I had a blister from an ill-fitting shoe. When I was dealing with some urgent problem during YC, there was about a 60% chance it had to do with HN, and a 40% chance it had do with everything else combined. [17]\\n\\nAs well as HN, I wrote all of YC\\'s internal software in Arc. But while I continued to work a good deal in Arc, I gradually stopped working on Arc, partly because I didn\\'t have time to, and partly because it was a lot less attractive to mess around with the language now that we had all this infrastructure depending on it. So now my three projects were reduced to two: writing essays and working on YC.\\n\\nYC was different from other kinds of work I\\'ve done. Instead of deciding for myself what to work on, the problems came to me. Every 6 months there was a new batch of startups, and their problems, whatever they were, became our problems. It was very engaging work, because their problems were quite varied, and the good founders were very effective. If you were trying to learn the most you could about startups in the shortest possible time, you couldn\\'t have picked a better way to do it.\\n\\nThere were parts of the job I didn\\'t like. Disputes between cofounders, figuring out when people were lying to us, fighting with people who maltreated the startups, and so on. But I worked hard even at the parts I didn\\'t like. I was haunted by something Kevin Hale once said about companies: \"No one works harder than the boss.\" He meant it both descriptively and prescriptively, and it was the second part that scared me. I wanted YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I\\'d better work very hard.\\n\\nOne day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember his exact words very clearly. \"You know,\" he said, \"you should make sure Y Combinator isn\\'t the last cool thing you do.\"\\n\\nAt the time I didn\\'t understand what he meant, but gradually it dawned on me that he was saying I should quit. This seemed strange advice, because YC was doing great. But if there was one thing rarer than Rtm offering advice, it was Rtm being wrong. So this set me thinking. It was true that on my current trajectory, YC would be the last thing I did, because it was only taking up more of my attention. It had already eaten Arc, and was in the process of eating', doc_id='ab7ed037-4269-4593-b5ff-0ce3d9213cbd', embedding=None, doc_hash='28da476588fa6a7c04e3fc8d0c8490de5a6aa3f4b46ada11723bd524402b1d33', extra_info=None, node_info={'start': 56162, 'end': 60161, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'f9e74e9d-cdd9-43c4-8742-76c38200305f', <DocumentRelationship.NEXT: '3'>: 'cfd50785-c54a-4e07-b474-561541968da9'}), score=None), NodeWithScore(node=Node(text=\"of my attention. It had already eaten Arc, and was in the process of eating essays too. Either YC was my life's work or I'd have to leave eventually. And it wasn't, so I would.\\n\\nIn the summer of 2012 my mother had a stroke, and the cause turned out to be a blood clot caused by colon cancer. The stroke destroyed her balance, and she was put in a nursing home, but she really wanted to get out of it and back to her house, and my sister and I were determined to help her do it. I used to fly up to Oregon to visit her regularly, and I had a lot of time to think on those flights. On one of them I realized I was ready to hand YC over to someone else.\\n\\nI asked Jessica if she wanted to be president, but she didn't, so we decided we'd try to recruit Sam Altman. We talked to Robert and Trevor and we agreed to make it a complete changing of the guard. Up till that point YC had been controlled by the original LLC we four had started. But we wanted YC to last for a long time, and to do that it couldn't be controlled by the founders. So if Sam said yes, we'd let him reorganize YC. Robert and I would retire, and Jessica and Trevor would become ordinary partners.\\n\\nWhen we asked Sam if he wanted to be president of YC, initially he said no. He wanted to start a startup to make nuclear reactors. But I kept at it, and in October 2013 he finally agreed. We decided he'd take over starting with the winter 2014 batch. For the rest of 2013 I left running YC more and more to Sam, partly so he could learn the job, and partly because I was focused on my mother, whose cancer had returned.\\n\\nShe died on January 15, 2014. We knew this was coming, but it was still hard when it did.\\n\\nI kept working on YC till March, to help get that batch of startups through Demo Day, then I checked out pretty completely. (I still talk to alumni and to new startups working on things I'm interested in, but that only takes a few hours a week.)\\n\\nWhat should I do next? Rtm's advice hadn't included anything about that. I wanted to do something completely different, so I decided I'd paint. I wanted to see how good I could get if I really focused on it. So the day after I stopped working on YC, I started painting. I was rusty and it took a while to get back into shape, but it was at least completely engaging. [18]\\n\\nI spent most of the rest of 2014 painting. I'd never been able to work so uninterruptedly before, and I got to be better than I had been. Not good enough, but better. Then in November, right in the middle of a painting, I ran out of steam. Up till that point I'd always been curious to see how the painting I was working on would turn out, but suddenly finishing this one seemed like a chore. So I stopped working on it and cleaned my brushes and haven't painted since. So far anyway.\\n\\nI realize that sounds rather wimpy. But attention is a zero sum game. If you can choose what to work on, and you choose a project that's not the best one (or at least a good one) for you, then it's getting in the way of another project that is. And at 50 there was some opportunity cost to screwing around.\\n\\nI started writing essays again, and wrote a bunch of new ones over the next few months. I even wrote a couple that weren't about startups. Then in March 2015 I started working on Lisp again.\\n\\nThe distinctive thing about Lisp is that its core is a language defined by writing an interpreter in itself. It wasn't originally intended as a programming language in the ordinary sense. It was meant to be a formal model of computation, an alternative to the Turing machine. If you want to write an interpreter for a language in itself, what's the minimum set of predefined operators you need? The Lisp that John McCarthy invented, or more accurately discovered, is an answer to that question. [19]\\n\\nMcCarthy didn't realize this Lisp could even be used to program computers\", doc_id='cfd50785-c54a-4e07-b474-561541968da9', embedding=None, doc_hash='b6524989f50c19316fcc4135d476deedc62b79ab141d9f650743a6fe5f3558c9', extra_info=None, node_info={'start': 60170, 'end': 64027, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd', <DocumentRelationship.NEXT: '3'>: '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4'}), score=None), NodeWithScore(node=Node(text='didn\\'t realize this Lisp could even be used to program computers till his grad student Steve Russell suggested it. Russell translated McCarthy\\'s interpreter into IBM 704 machine language, and from that point Lisp started also to be a programming language in the ordinary sense. But its origins as a model of computation gave it a power and elegance that other languages couldn\\'t match. It was this that attracted me in college, though I didn\\'t understand why at the time.\\n\\nMcCarthy\\'s 1960 Lisp did nothing more than interpret Lisp expressions. It was missing a lot of things you\\'d want in a programming language. So these had to be added, and when they were, they weren\\'t defined using McCarthy\\'s original axiomatic approach. That wouldn\\'t have been feasible at the time. McCarthy tested his interpreter by hand-simulating the execution of programs. But it was already getting close to the limit of interpreters you could test that way — indeed, there was a bug in it that McCarthy had overlooked. To test a more complicated interpreter, you\\'d have had to run it, and computers then weren\\'t powerful enough.\\n\\nNow they are, though. Now you could continue using McCarthy\\'s axiomatic approach till you\\'d defined a complete programming language. And as long as every change you made to McCarthy\\'s Lisp was a discoveredness-preserving transformation, you could, in principle, end up with a complete language that had this quality. Harder to do than to talk about, of course, but if it was possible in principle, why not try? So I decided to take a shot at it. It took 4 years, from March 26, 2015 to October 12, 2019. It was fortunate that I had a precisely defined goal, or it would have been hard to keep at it for so long.\\n\\nI wrote this new Lisp, called Bel, in itself in Arc. That may sound like a contradiction, but it\\'s an indication of the sort of trickery I had to engage in to make this work. By means of an egregious collection of hacks I managed to make something close enough to an interpreter written in itself that could actually run. Not fast, but fast enough to test.\\n\\nI had to ban myself from writing essays during most of this time, or I\\'d never have finished. In late 2015 I spent 3 months writing essays, and when I went back to working on Bel I could barely understand the code. Not so much because it was badly written as because the problem is so convoluted. When you\\'re working on an interpreter written in itself, it\\'s hard to keep track of what\\'s happening at what level, and errors can be practically encrypted by the time you get them.\\n\\nSo I said no more essays till Bel was done. But I told few people about Bel while I was working on it. So for years it must have seemed that I was doing nothing, when in fact I was working harder than I\\'d ever worked on anything. Occasionally after wrestling for hours with some gruesome bug I\\'d check Twitter or HN and see someone asking \"Does Paul Graham still code?\"\\n\\nWorking on Bel was hard but satisfying. I worked on it so intensively that at any given time I had a decent chunk of the code in my head and could write more there. I remember taking the boys to the coast on a sunny day in 2015 and figuring out how to deal with some problem involving continuations while I watched them play in the tide pools. It felt like I was doing life right. I remember that because I was slightly dismayed at how novel it felt. The good news is that I had more moments like this over the next few years.\\n\\nIn the summer of 2016 we moved to England. We wanted our kids to see what it was like living in another country, and since I was a British citizen by birth, that seemed the obvious choice. We only meant to stay for a year, but we liked it so much that we still live there. So most of Bel was written in England.\\n\\nIn the fall of 2019, Bel was finally finished. Like McCarthy\\'s original Lisp, it\\'s a spec rather than an implementation, although like McCarthy\\'s Lisp it\\'s a spec expressed as code.\\n\\nNow that I could write essays again, I wrote a bunch about topics I\\'d had stacked', doc_id='14d14357-e5cf-4015-8a65-2cc9fd3fb5c4', embedding=None, doc_hash='fdbb519247f837aba04548ab0aec6383f33f41fbbabe6aacfe83b2f52ad699c5', extra_info=None, node_info={'start': 64035, 'end': 68071, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'cfd50785-c54a-4e07-b474-561541968da9', <DocumentRelationship.NEXT: '3'>: '81530055-3701-4064-8170-c2c3d42f05b7'}), score=None), NodeWithScore(node=Node(text=\"I could write essays again, I wrote a bunch about topics I'd had stacked up. I kept writing essays through 2020, but I also started to think about other things I could work on. How should I choose what to do? Well, how had I chosen what to work on in the past? I wrote an essay for myself to answer that question, and I was surprised how long and messy the answer turned out to be. If this surprised me, who'd lived it, then I thought perhaps it would be interesting to other people, and encouraging to those with similarly messy lives. So I wrote a more detailed version for others to read, and this is the last sentence of it.\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nNotes\\n\\n[1] My experience skipped a step in the evolution of computers: time-sharing machines with interactive OSes. I went straight from batch processing to microcomputers, which made microcomputers seem all the more exciting.\\n\\n[2] Italian words for abstract concepts can nearly always be predicted from their English cognates (except for occasional traps like polluzione). It's the everyday words that differ. So if you string together a lot of abstract concepts with a few simple verbs, you can make a little Italian go a long way.\\n\\n[3] I lived at Piazza San Felice 4, so my walk to the Accademia went straight down the spine of old Florence: past the Pitti, across the bridge, past Orsanmichele, between the Duomo and the Baptistery, and then up Via Ricasoli to Piazza San Marco. I saw Florence at street level in every possible condition, from empty dark winter evenings to sweltering summer days when the streets were packed with tourists.\\n\\n[4] You can of course paint people like still lives if you want to, and they're willing. That sort of portrait is arguably the apex of still life painting, though the long sitting does tend to produce pained expressions in the sitters.\\n\\n[5] Interleaf was one of many companies that had smart people and built impressive technology, and yet got crushed by Moore's Law. In the 1990s the exponential growth in the power of commodity (i.e. Intel) processors rolled up high-end, special-purpose hardware and software companies like a bulldozer.\\n\\n[6] The signature style seekers at RISD weren't specifically mercenary. In the art world, money and coolness are tightly coupled. Anything expensive comes to be seen as cool, and anything seen as cool will soon become equally expensive.\\n\\n[7] Technically the apartment wasn't rent-controlled but rent-stabilized, but this is a refinement only New Yorkers would know or care about. The point is that it was really cheap, less than half market price.\\n\\n[8] Most software you can launch as soon as it's done. But when the software is an online store builder and you're hosting the stores, if you don't have any users yet, that fact will be painfully obvious. So before we could launch publicly we had to launch privately, in the sense of recruiting an initial set of users and making sure they had decent-looking stores.\\n\\n[9] We'd had a code editor in Viaweb for users to define their own page styles. They didn't know it, but they were editing Lisp expressions underneath. But this wasn't an app editor, because the code ran when the merchants' sites were generated, not when shoppers visited them.\\n\\n[10] This was the first instance of what is now a familiar experience, and so was what happened next, when I read the comments and found they were full of angry people. How could I claim that Lisp was better than other languages? Weren't they all Turing complete? People who see the responses to essays I write sometimes tell me how sorry they feel for me, but I'm not exaggerating when I reply that it has always been like this, since the very beginning. It comes with the territory. An essay must tell readers things they don't already know, and some people dislike being told such things.\\n\\n[11] People put plenty of stuff on the internet in the 90s of course, but putting something online is not the same as publishing it online. Publishing online means you\", doc_id='81530055-3701-4064-8170-c2c3d42f05b7', embedding=None, doc_hash='7ded19f889627d27a2c132b3d5c4f9dac587cd2407c5624d421e292f2b3ab1d7', extra_info=None, node_info={'start': 68067, 'end': 72048, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4', <DocumentRelationship.NEXT: '3'>: 'c851c700-5226-42a9-87da-e89e548e381e'}), score=None), NodeWithScore(node=Node(text='putting something online is not the same as publishing it online. Publishing online means you treat the online version as the (or at least a) primary version.\\n\\n[12] There is a general lesson here that our experience with Y Combinator also teaches: Customs continue to constrain you long after the restrictions that caused them have disappeared. Customary VC practice had once, like the customs about publishing essays, been based on real constraints. Startups had once been much more expensive to start, and proportionally rare. Now they could be cheap and common, but the VCs\\' customs still reflected the old world, just as customs about writing essays still reflected the constraints of the print era.\\n\\nWhich in turn implies that people who are independent-minded (i.e. less influenced by custom) will have an advantage in fields affected by rapid change (where customs are more likely to be obsolete).\\n\\nHere\\'s an interesting point, though: you can\\'t always predict which fields will be affected by rapid change. Obviously software and venture capital will be, but who would have predicted that essay writing would be?\\n\\n[13] Y Combinator was not the original name. At first we were called Cambridge Seed. But we didn\\'t want a regional name, in case someone copied us in Silicon Valley, so we renamed ourselves after one of the coolest tricks in the lambda calculus, the Y combinator.\\n\\nI picked orange as our color partly because it\\'s the warmest, and partly because no VC used it. In 2005 all the VCs used staid colors like maroon, navy blue, and forest green, because they were trying to appeal to LPs, not founders. The YC logo itself is an inside joke: the Viaweb logo had been a white V on a red circle, so I made the YC logo a white Y on an orange square.\\n\\n[14] YC did become a fund for a couple years starting in 2009, because it was getting so big I could no longer afford to fund it personally. But after Heroku got bought we had enough money to go back to being self-funded.\\n\\n[15] I\\'ve never liked the term \"deal flow,\" because it implies that the number of new startups at any given time is fixed. This is not only false, but it\\'s the purpose of YC to falsify it, by causing startups to be founded that would not otherwise have existed.\\n\\n[16] She reports that they were all different shapes and sizes, because there was a run on air conditioners and she had to get whatever she could, but that they were all heavier than she could carry now.\\n\\n[17] Another problem with HN was a bizarre edge case that occurs when you both write essays and run a forum. When you run a forum, you\\'re assumed to see if not every conversation, at least every conversation involving you. And when you write essays, people post highly imaginative misinterpretations of them on forums. Individually these two phenomena are tedious but bearable, but the combination is disastrous. You actually have to respond to the misinterpretations, because the assumption that you\\'re present in the conversation means that not responding to any sufficiently upvoted misinterpretation reads as a tacit admission that it\\'s correct. But that in turn encourages more; anyone who wants to pick a fight with you senses that now is their chance.\\n\\n[18] The worst thing about leaving YC was not working with Jessica anymore. We\\'d been working on YC almost the whole time we\\'d known each other, and we\\'d neither tried nor wanted to separate it from our personal lives, so leaving was like pulling up a deeply rooted tree.\\n\\n[19] One way to get more precise about the concept of invented vs discovered is to talk about space aliens. Any sufficiently advanced alien civilization would certainly know about the Pythagorean theorem, for example. I believe, though with less certainty, that they would also know about the Lisp in McCarthy\\'s 1960 paper.\\n\\nBut if so there\\'s no reason to suppose that this is the limit of the language that might be known to them. Presumably aliens need numbers and errors and I/O too. So it seems likely there exists at least one path out of McCarthy\\'s', doc_id='c851c700-5226-42a9-87da-e89e548e381e', embedding=None, doc_hash='8c5bdf9883547bcebedcb406fa77ba918defaa7bbedece21114163c578ac0729', extra_info=None, node_info={'start': 72029, 'end': 76071, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '81530055-3701-4064-8170-c2c3d42f05b7', <DocumentRelationship.NEXT: '3'>: '4972fc4f-6990-4659-a20c-98a8147373d8'}), score=None), NodeWithScore(node=Node(text=\"So it seems likely there exists at least one path out of McCarthy's Lisp along which discoveredness is preserved.\\n\\n\\n\\nThanks to Trevor Blackwell, John Collison, Patrick Collison, Daniel Gackle, Ralph Hazell, Jessica Livingston, Robert Morris, and Harj Taggar for reading drafts of this.\\n\\n\\n\\n\", doc_id='4972fc4f-6990-4659-a20c-98a8147373d8', embedding=None, doc_hash='9d65b6bc997db43cee91e2c4b7380e4efc1059937154b60dc2e45b8aa489e59e', extra_info=None, node_info={'start': 76083, 'end': 76372, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'c851c700-5226-42a9-87da-e89e548e381e'}), score=None)], extra_info={'a0e4f5a8-f852-4807-96f2-76721dc1e57d': None, '1992e045-1032-432c-ac01-42947c0727e9': None, 'e06dfa02-faa5-4617-9d58-00ae88433c58': None, '0fc849a1-2421-414e-9f83-f39e3ac47e41': None, '8e0bca16-b087-489b-983a-5beaaf393f64': None, '855d5459-1cfe-465e-8c94-f9a1d047f821': None, '3b199ba9-d04b-473a-8c73-39c293638957': None, '669c0218-b2c1-428b-808c-f5408e52dcdf': None, 'eb14dc48-f3a1-461d-9a49-0d78331dd083': None, '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb': None, '8a9e2472-230f-437f-b720-1494878d5933': None, '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060': None, '42f8cdbc-3613-409a-85b6-bb1e22c85fef': None, 'f9e74e9d-cdd9-43c4-8742-76c38200305f': None, 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd': None, 'cfd50785-c54a-4e07-b474-561541968da9': None, '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4': None, '81530055-3701-4064-8170-c2c3d42f05b7': None, 'c851c700-5226-42a9-87da-e89e548e381e': None, '4972fc4f-6990-4659-a20c-98a8147373d8': None})"
+                        ]
+                    },
+                    "execution_count": 14,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "query_engine.query(\"What is the summary of the document?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 15,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting query engine 1: Useful for retrieving specific context from Paul Graham essay on What I Worked On..\n",
+                        "> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "> [retrieve] Total embedding token usage: 9 tokens\n",
+                        "> [get_response] Total LLM token usage: 1924 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "Response(response=\"\\nAfter RICS, Paul Graham decided to focus on Y Combinator and help get the startups through Demo Day. He also started writing essays again and wrote a few that weren't about startups. In November 2014, he ran out of steam while painting and stopped working on it. He then started working on Lisp again in March 2015.\", source_nodes=[NodeWithScore(node=Node(text=\"of my attention. It had already eaten Arc, and was in the process of eating essays too. Either YC was my life's work or I'd have to leave eventually. And it wasn't, so I would.\\n\\nIn the summer of 2012 my mother had a stroke, and the cause turned out to be a blood clot caused by colon cancer. The stroke destroyed her balance, and she was put in a nursing home, but she really wanted to get out of it and back to her house, and my sister and I were determined to help her do it. I used to fly up to Oregon to visit her regularly, and I had a lot of time to think on those flights. On one of them I realized I was ready to hand YC over to someone else.\\n\\nI asked Jessica if she wanted to be president, but she didn't, so we decided we'd try to recruit Sam Altman. We talked to Robert and Trevor and we agreed to make it a complete changing of the guard. Up till that point YC had been controlled by the original LLC we four had started. But we wanted YC to last for a long time, and to do that it couldn't be controlled by the founders. So if Sam said yes, we'd let him reorganize YC. Robert and I would retire, and Jessica and Trevor would become ordinary partners.\\n\\nWhen we asked Sam if he wanted to be president of YC, initially he said no. He wanted to start a startup to make nuclear reactors. But I kept at it, and in October 2013 he finally agreed. We decided he'd take over starting with the winter 2014 batch. For the rest of 2013 I left running YC more and more to Sam, partly so he could learn the job, and partly because I was focused on my mother, whose cancer had returned.\\n\\nShe died on January 15, 2014. We knew this was coming, but it was still hard when it did.\\n\\nI kept working on YC till March, to help get that batch of startups through Demo Day, then I checked out pretty completely. (I still talk to alumni and to new startups working on things I'm interested in, but that only takes a few hours a week.)\\n\\nWhat should I do next? Rtm's advice hadn't included anything about that. I wanted to do something completely different, so I decided I'd paint. I wanted to see how good I could get if I really focused on it. So the day after I stopped working on YC, I started painting. I was rusty and it took a while to get back into shape, but it was at least completely engaging. [18]\\n\\nI spent most of the rest of 2014 painting. I'd never been able to work so uninterruptedly before, and I got to be better than I had been. Not good enough, but better. Then in November, right in the middle of a painting, I ran out of steam. Up till that point I'd always been curious to see how the painting I was working on would turn out, but suddenly finishing this one seemed like a chore. So I stopped working on it and cleaned my brushes and haven't painted since. So far anyway.\\n\\nI realize that sounds rather wimpy. But attention is a zero sum game. If you can choose what to work on, and you choose a project that's not the best one (or at least a good one) for you, then it's getting in the way of another project that is. And at 50 there was some opportunity cost to screwing around.\\n\\nI started writing essays again, and wrote a bunch of new ones over the next few months. I even wrote a couple that weren't about startups. Then in March 2015 I started working on Lisp again.\\n\\nThe distinctive thing about Lisp is that its core is a language defined by writing an interpreter in itself. It wasn't originally intended as a programming language in the ordinary sense. It was meant to be a formal model of computation, an alternative to the Turing machine. If you want to write an interpreter for a language in itself, what's the minimum set of predefined operators you need? The Lisp that John McCarthy invented, or more accurately discovered, is an answer to that question. [19]\\n\\nMcCarthy didn't realize this Lisp could even be used to program computers\", doc_id='cfd50785-c54a-4e07-b474-561541968da9', embedding=None, doc_hash='b6524989f50c19316fcc4135d476deedc62b79ab141d9f650743a6fe5f3558c9', extra_info=None, node_info={'start': 60170, 'end': 64027, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd', <DocumentRelationship.NEXT: '3'>: '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4'}), score=0.7906051406335054), NodeWithScore(node=Node(text='of customers almost entirely from among their batchmates.\\n\\nI had not originally intended YC to be a full-time job. I was going to do three things: hack, write essays, and work on YC. As YC grew, and I grew more excited about it, it started to take up a lot more than a third of my attention. But for the first few years I was still able to work on other things.\\n\\nIn the summer of 2006, Robert and I started working on a new version of Arc. This one was reasonably fast, because it was compiled into Scheme. To test this new Arc, I wrote Hacker News in it. It was originally meant to be a news aggregator for startup founders and was called Startup News, but after a few months I got tired of reading about nothing but startups. Plus it wasn\\'t startup founders we wanted to reach. It was future startup founders. So I changed the name to Hacker News and the topic to whatever engaged one\\'s intellectual curiosity.\\n\\nHN was no doubt good for YC, but it was also by far the biggest source of stress for me. If all I\\'d had to do was select and help founders, life would have been so easy. And that implies that HN was a mistake. Surely the biggest source of stress in one\\'s work should at least be something close to the core of the work. Whereas I was like someone who was in pain while running a marathon not from the exertion of running, but because I had a blister from an ill-fitting shoe. When I was dealing with some urgent problem during YC, there was about a 60% chance it had to do with HN, and a 40% chance it had do with everything else combined. [17]\\n\\nAs well as HN, I wrote all of YC\\'s internal software in Arc. But while I continued to work a good deal in Arc, I gradually stopped working on Arc, partly because I didn\\'t have time to, and partly because it was a lot less attractive to mess around with the language now that we had all this infrastructure depending on it. So now my three projects were reduced to two: writing essays and working on YC.\\n\\nYC was different from other kinds of work I\\'ve done. Instead of deciding for myself what to work on, the problems came to me. Every 6 months there was a new batch of startups, and their problems, whatever they were, became our problems. It was very engaging work, because their problems were quite varied, and the good founders were very effective. If you were trying to learn the most you could about startups in the shortest possible time, you couldn\\'t have picked a better way to do it.\\n\\nThere were parts of the job I didn\\'t like. Disputes between cofounders, figuring out when people were lying to us, fighting with people who maltreated the startups, and so on. But I worked hard even at the parts I didn\\'t like. I was haunted by something Kevin Hale once said about companies: \"No one works harder than the boss.\" He meant it both descriptively and prescriptively, and it was the second part that scared me. I wanted YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I\\'d better work very hard.\\n\\nOne day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember his exact words very clearly. \"You know,\" he said, \"you should make sure Y Combinator isn\\'t the last cool thing you do.\"\\n\\nAt the time I didn\\'t understand what he meant, but gradually it dawned on me that he was saying I should quit. This seemed strange advice, because YC was doing great. But if there was one thing rarer than Rtm offering advice, it was Rtm being wrong. So this set me thinking. It was true that on my current trajectory, YC would be the last thing I did, because it was only taking up more of my attention. It had already eaten Arc, and was in the process of eating', doc_id='ab7ed037-4269-4593-b5ff-0ce3d9213cbd', embedding=None, doc_hash='28da476588fa6a7c04e3fc8d0c8490de5a6aa3f4b46ada11723bd524402b1d33', extra_info=None, node_info={'start': 56162, 'end': 60161, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'f9e74e9d-cdd9-43c4-8742-76c38200305f', <DocumentRelationship.NEXT: '3'>: 'cfd50785-c54a-4e07-b474-561541968da9'}), score=0.7863763143524524)], extra_info={'cfd50785-c54a-4e07-b474-561541968da9': None, 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd': None})"
+                        ]
+                    },
+                    "execution_count": 15,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "query_engine.query(\"What did Paul Graham do after RICS?\")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "metadata": {},
+            "source": [
+                "#### PydanticMultiSelector\n",
+                "\n",
+                "In case you are expecting queries to be routed to multiple indexes, you should use a multi selector. The multi selector sends to query to multiple sub-indexes, and then aggregates all responses using a list index to form a complete answer."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n"
+                    ]
+                }
+            ],
+            "source": [
+                "from llama_index import SimpleKeywordTableIndex\n",
+                "\n",
+                "keyword_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)\n",
+                "\n",
+                "keyword_tool = QueryEngineTool.from_defaults(\n",
+                "    query_engine=vector_query_engine,\n",
+                "    description=\"Useful for retrieving specific context using keywords from Paul Graham essay on What I Worked On.\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 12,
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "query_engine = RouterQueryEngine(\n",
+                "    selector=PydanticMultiSelector.from_defaults(),\n",
+                "    query_engine_tools=[\n",
+                "        list_tool,\n",
+                "        vector_tool,\n",
+                "        keyword_tool,\n",
+                "    ],\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 13,
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting query engine 1: Retrieving specific context from Paul Graham essay on What I Worked On..\n",
+                        "> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "> [retrieve] Total embedding token usage: 18 tokens\n",
+                        "> [get_response] Total LLM token usage: 1995 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n",
+                        "Selecting query engine 2: Retrieving specific context using keywords from Paul Graham essay on What I Worked On..\n",
+                        "> [retrieve] Total LLM token usage: 0 tokens\n",
+                        "> [retrieve] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total LLM token usage: 2055 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
+                        "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total LLM token usage: 658 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n",
+                        "> [get_response] Total LLM token usage: 658 tokens\n",
+                        "> [get_response] Total embedding token usage: 0 tokens\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "Response(response=\"\\nNotable events and people from the author's time at Interleaf and YC include: \\n\\nInterleaf: \\n- Founding of Interleaf in 1989\\n- Acquisition of Interleaf by Lernout & Hauspie in 1999\\n- The author's work on Lisp, which led to the development of the Lisp programming language. \\n- The author's work on Arc, which led to the development of the Hacker News website. \\n\\nYC: \\n- Founding of YC in 2005\\n- Launch of Hacker News in 2006\\n- Recruitment of Sam Altman as President in 2013\\n- The author's work with Robert Morris, Trevor Blackwell, and Jessica Livingston to create Y Combinator. \\n- The author's work with Sam Altman to reorganize YC and make it a lasting organization. \\n- The author's work with YC startups to help them succeed. \\n- The author's work on Hacker News, which became a major source of stress. \\n- The author's work on internal software for YC, written in Arc. \\n- The author's work with Kevin Hale, who offered the author unsolicited advice. \\n- The author's mother's stroke and death in 2012 and 2014 respectively\\n- Author's retirement from YC in 2014\\n- Author's decision to take up painting in 2014\\n- Author's return to writing essays and Lisp in 2015\", source_nodes=[NodeWithScore(node=Node(text=\"\\nNotable events and people from the author's time at Interleaf and YC include: \\n\\nInterleaf: \\n- Founding of Interleaf in 1989\\n- Acquisition of Interleaf by Lernout & Hauspie in 1999\\n\\nYC: \\n- Founding of YC in 2005\\n- Launch of Hacker News in 2006\\n- Recruitment of Sam Altman as President in 2013\\n- Author's mother's stroke and death in 2012 and 2014 respectively\\n- Author's retirement from YC in 2014\\n- Author's decision to take up painting in 2014\\n- Author's return to writing essays and Lisp in 2015\", doc_id='cd546791-d1e2-420a-9e9c-fde68d2d51dd', embedding=None, doc_hash='0e61517dfdb144c42c1251f3ed80d58fa2c3859a03f9d7a9ae92d513036690c5', extra_info=None, node_info={'start': 0, 'end': 498, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: '4183ef8b-b14b-4c73-9754-864d64842c1b'}), score=None), NodeWithScore(node=Node(text=\"\\nNotable events and people from the author's time at Interleaf and YC include: \\n\\nInterleaf: \\n- The author's work on Lisp, which led to the development of the Lisp programming language. \\n- The author's work on Arc, which led to the development of the Hacker News website. \\n\\nYC: \\n- The author's work with Robert Morris, Trevor Blackwell, and Jessica Livingston to create Y Combinator. \\n- The author's work with Sam Altman to reorganize YC and make it a lasting organization. \\n- The author's work with YC startups to help them succeed. \\n- The author's work on Hacker News, which became a major source of stress. \\n- The author's work on internal software for YC, written in Arc. \\n- The author's work with Kevin Hale, who offered the author unsolicited advice.\", doc_id='cee04688-dbe7-4749-809e-5a3723e61ac7', embedding=None, doc_hash='246f0f5349eab9d4639f1584170456843b8bd47fcf2862c88437e976309e3a57', extra_info=None, node_info={'start': 0, 'end': 755, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: '283de7d5-81ed-4dcc-bc5e-98fe4494c19c'}), score=None)], extra_info={'cd546791-d1e2-420a-9e9c-fde68d2d51dd': None, 'cee04688-dbe7-4749-809e-5a3723e61ac7': None})"
+                        ]
+                    },
+                    "execution_count": 13,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "# This query could use either a keyword or vector query engine, so it will combine responses from both\n",
+                "query_engine.query(\n",
+                "    \"What were noteable events and people from the authors time at Interleaf and YC?\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.10"
+        }
     },
-    {
-     "data": {
-      "text/plain": [
-       "Response(response=\"\\nThis document is a reflection on the author's experiences with computers and writing, from his early days of programming on an IBM 1401 to his more recent work on a web application builder. He recounts his experiences with programming, painting, and starting companies, and how he eventually came to write essays about his life and the choices he made.\", source_nodes=[NodeWithScore(node=Node(text='\\t\\t\\n\\nWhat I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn\\'t much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\\n\\nI couldn\\'t have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\\n\\nAI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried', doc_id='a0e4f5a8-f852-4807-96f2-76721dc1e57d', embedding=None, doc_hash='694feb4edd2c0029159c7f9faf46df308177a12658e50be42d3a8cb434bef168', extra_info=None, node_info={'start': 0, 'end': 4040, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.NEXT: '3'>: '1992e045-1032-432c-ac01-42947c0727e9'}), score=None), NodeWithScore(node=Node(text='documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried rereading The Moon is a Harsh Mistress, so I don\\'t know how well it has aged, but when I read it I was drawn entirely into its world. It seemed only a matter of time before we\\'d have Mike, and when I saw Winograd using SHRDLU, it seemed like that time would be a few years at most. All you had to do was teach SHRDLU more words.\\n\\nThere weren\\'t any classes in AI at Cornell then, not even graduate classes, so I started trying to teach myself. Which meant learning Lisp, since in those days Lisp was regarded as the language of AI. The commonly used programming languages then were pretty primitive, and programmers\\' ideas correspondingly so. The default language at Cornell was a Pascal-like language called PL/I, and the situation was similar elsewhere. Learning Lisp expanded my concept of a program so fast that it was years before I started to have a sense of where the new limits were. This was more like it; this was what I had expected college to do. It wasn\\'t happening in a class, like it was supposed to, but that was ok. For the next couple years I was on a roll. I knew what I was going to do.\\n\\nFor my undergraduate thesis, I reverse-engineered SHRDLU. My God did I love working on that program. It was a pleasing bit of code, but what made it even more exciting was my belief — hard to imagine now, but not unique in 1985 — that it was already climbing the lower slopes of intelligence.\\n\\nI had gotten into a program at Cornell that didn\\'t make you choose a major. You could take whatever classes you liked, and choose whatever you liked to put on your degree. I of course chose \"Artificial Intelligence.\" When I got the actual physical diploma, I was dismayed to find that the quotes had been included, which made them read as scare-quotes. At the time this bothered me, but now it seems amusingly accurate, for reasons I was about to discover.\\n\\nI applied to 3 grad schools: MIT and Yale, which were renowned for AI at the time, and Harvard, which I\\'d visited because Rich Draves went there, and was also home to Bill Woods, who\\'d invented the type of parser I used in my SHRDLU clone. Only Harvard accepted me, so that was where I went.\\n\\nI don\\'t remember the moment it happened, or if there even was a specific moment, but during the first year of grad school I realized that AI, as practiced at the time, was a hoax. By which I mean the sort of AI in which a program that\\'s told \"the dog is sitting on the chair\" translates this into some formal representation and adds it to the list of things it knows.\\n\\nWhat these programs really showed was that there\\'s a subset of natural language that\\'s a formal language. But a very proper subset. It was clear that there was an unbridgeable gap between what they could do and actually understanding natural language. It was not, in fact, simply a matter of teaching SHRDLU more words. That whole way of doing AI, with explicit data structures representing concepts, was not going to work. Its brokenness did, as so often happens, generate a lot of opportunities to write papers about various band-aids that could be applied to it, but it was never going to get us Mike.\\n\\nSo I looked around to see what I could salvage from the wreckage of my plans, and there was Lisp. I knew from experience that Lisp was interesting for its own sake and not just for its association with AI, even though that was the main reason people cared about it at the time. So I decided to focus on Lisp. In fact, I decided to write a book about Lisp hacking. It\\'s scary to think how little I knew about Lisp hacking when I started writing that book. But there\\'s nothing like writing a book about something to help you learn it. The book, On Lisp, wasn\\'t published till 1993, but I wrote much of it in grad school.\\n\\nComputer Science is an uneasy alliance between two halves, theory and systems. The', doc_id='1992e045-1032-432c-ac01-42947c0727e9', embedding=None, doc_hash='a8b756a8ec7638539582ead732467c48ac3d7bdbfc15e068dd041d4c9582e497', extra_info=None, node_info={'start': 3982, 'end': 7880, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'a0e4f5a8-f852-4807-96f2-76721dc1e57d', <DocumentRelationship.NEXT: '3'>: 'e06dfa02-faa5-4617-9d58-00ae88433c58'}), score=None), NodeWithScore(node=Node(text='Science is an uneasy alliance between two halves, theory and systems. The theory people prove things, and the systems people build things. I wanted to build things. I had plenty of respect for theory — indeed, a sneaking suspicion that it was the more admirable of the two halves — but building things seemed so much more exciting.\\n\\nThe problem with systems work, though, was that it didn\\'t last. Any program you wrote today, no matter how good, would be obsolete in a couple decades at best. People might mention your software in footnotes, but no one would actually use it. And indeed, it would seem very feeble work. Only people with a sense of the history of the field would even realize that, in its time, it had been good.\\n\\nThere were some surplus Xerox Dandelions floating around the computer lab at one point. Anyone who wanted one to play around with could have one. I was briefly tempted, but they were so slow by present standards; what was the point? No one else wanted one either, so off they went. That was what happened to systems work.\\n\\nI wanted not just to build things, but to build things that would last.\\n\\nIn this dissatisfied state I went in 1988 to visit Rich Draves at CMU, where he was in grad school. One day I went to visit the Carnegie Institute, where I\\'d spent a lot of time as a kid. While looking at a painting there I realized something that might seem obvious, but was a big surprise to me. There, right on the wall, was something you could make that would last. Paintings didn\\'t become obsolete. Some of the best ones were hundreds of years old.\\n\\nAnd moreover this was something you could make a living doing. Not as easily as you could by writing software, of course, but I thought if you were really industrious and lived really cheaply, it had to be possible to make enough to survive. And as an artist you could be truly independent. You wouldn\\'t have a boss, or even need to get research funding.\\n\\nI had always liked looking at paintings. Could I make them? I had no idea. I\\'d never imagined it was even possible. I knew intellectually that people made art — that it didn\\'t just appear spontaneously — but it was as if the people who made it were a different species. They either lived long ago or were mysterious geniuses doing strange things in profiles in Life magazine. The idea of actually being able to make art, to put that verb before that noun, seemed almost miraculous.\\n\\nThat fall I started taking art classes at Harvard. Grad students could take classes in any department, and my advisor, Tom Cheatham, was very easy going. If he even knew about the strange classes I was taking, he never said anything.\\n\\nSo now I was in a PhD program in computer science, yet planning to be an artist, yet also genuinely in love with Lisp hacking and working away at On Lisp. In other words, like many a grad student, I was working energetically on multiple projects that were not my thesis.\\n\\nI didn\\'t see a way out of this situation. I didn\\'t want to drop out of grad school, but how else was I going to get out? I remember when my friend Robert Morris got kicked out of Cornell for writing the internet worm of 1988, I was envious that he\\'d found such a spectacular way to get out of grad school.\\n\\nThen one day in April 1990 a crack appeared in the wall. I ran into professor Cheatham and he asked if I was far enough along to graduate that June. I didn\\'t have a word of my dissertation written, but in what must have been the quickest bit of thinking in my life, I decided to take a shot at writing one in the 5 weeks or so that remained before the deadline, reusing parts of On Lisp where I could, and I was able to respond, with no perceptible delay \"Yes, I think so. I\\'ll give you something to read in a few days.\"\\n\\nI picked applications of continuations as the topic. In retrospect I should have written about macros and embedded languages. There\\'s a whole world there that\\'s barely been explored. But all I wanted was to get out of grad school, and my rapidly written', doc_id='e06dfa02-faa5-4617-9d58-00ae88433c58', embedding=None, doc_hash='eb0b51b99800feed137ff8ae00239c9f87ae534d686eaa5684651d0a0f641742', extra_info=None, node_info={'start': 7878, 'end': 11887, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '1992e045-1032-432c-ac01-42947c0727e9', <DocumentRelationship.NEXT: '3'>: '0fc849a1-2421-414e-9f83-f39e3ac47e41'}), score=None), NodeWithScore(node=Node(text=\"But all I wanted was to get out of grad school, and my rapidly written dissertation sufficed, just barely.\\n\\nMeanwhile I was applying to art schools. I applied to two: RISD in the US, and the Accademia di Belli Arti in Florence, which, because it was the oldest art school, I imagined would be good. RISD accepted me, and I never heard back from the Accademia, so off to Providence I went.\\n\\nI'd applied for the BFA program at RISD, which meant in effect that I had to go to college again. This was not as strange as it sounds, because I was only 25, and art schools are full of people of different ages. RISD counted me as a transfer sophomore and said I had to do the foundation that summer. The foundation means the classes that everyone has to take in fundamental subjects like drawing, color, and design.\\n\\nToward the end of the summer I got a big surprise: a letter from the Accademia, which had been delayed because they'd sent it to Cambridge England instead of Cambridge Massachusetts, inviting me to take the entrance exam in Florence that fall. This was now only weeks away. My nice landlady let me leave my stuff in her attic. I had some money saved from consulting work I'd done in grad school; there was probably enough to last a year if I lived cheaply. Now all I had to do was learn Italian.\\n\\nOnly stranieri (foreigners) had to take this entrance exam. In retrospect it may well have been a way of excluding them, because there were so many stranieri attracted by the idea of studying art in Florence that the Italian students would otherwise have been outnumbered. I was in decent shape at painting and drawing from the RISD foundation that summer, but I still don't know how I managed to pass the written exam. I remember that I answered the essay question by writing about Cezanne, and that I cranked up the intellectual level as high as I could to make the most of my limited vocabulary. [2]\\n\\nI'm only up to age 25 and already there are such conspicuous patterns. Here I was, yet again about to attend some august institution in the hopes of learning about some prestigious subject, and yet again about to be disappointed. The students and faculty in the painting department at the Accademia were the nicest people you could imagine, but they had long since arrived at an arrangement whereby the students wouldn't require the faculty to teach anything, and in return the faculty wouldn't require the students to learn anything. And at the same time all involved would adhere outwardly to the conventions of a 19th century atelier. We actually had one of those little stoves, fed with kindling, that you see in 19th century studio paintings, and a nude model sitting as close to it as possible without getting burned. Except hardly anyone else painted her besides me. The rest of the students spent their time chatting or occasionally trying to imitate things they'd seen in American art magazines.\\n\\nOur model turned out to live just down the street from me. She made a living from a combination of modelling and making fakes for a local antique dealer. She'd copy an obscure old painting out of a book, and then he'd take the copy and maltreat it to make it look old. [3]\\n\\nWhile I was a student at the Accademia I started painting still lives in my bedroom at night. These paintings were tiny, because the room was, and because I painted them on leftover scraps of canvas, which was all I could afford at the time. Painting still lives is different from painting people, because the subject, as its name suggests, can't move. People can't sit for more than about 15 minutes at a time, and when they do they don't sit very still. So the traditional m.o. for painting people is to know how to paint a generic person, which you then modify to match the specific person you're painting. Whereas a still life you can, if you want, copy pixel by pixel from what you're seeing. You don't want to stop there, of course, or you get merely photographic accuracy, and what makes a still life interesting is that\", doc_id='0fc849a1-2421-414e-9f83-f39e3ac47e41', embedding=None, doc_hash='55796dd9e91b31dd897144f980f8536700eb3febd0f6bc5e732db0b2b754cf42', extra_info=None, node_info={'start': 11894, 'end': 15911, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'e06dfa02-faa5-4617-9d58-00ae88433c58', <DocumentRelationship.NEXT: '3'>: '8e0bca16-b087-489b-983a-5beaaf393f64'}), score=None), NodeWithScore(node=Node(text='or you get merely photographic accuracy, and what makes a still life interesting is that it\\'s been through a head. You want to emphasize the visual cues that tell you, for example, that the reason the color changes suddenly at a certain point is that it\\'s the edge of an object. By subtly emphasizing such things you can make paintings that are more realistic than photographs not just in some metaphorical sense, but in the strict information-theoretic sense. [4]\\n\\nI liked painting still lives because I was curious about what I was seeing. In everyday life, we aren\\'t consciously aware of much we\\'re seeing. Most visual perception is handled by low-level processes that merely tell your brain \"that\\'s a water droplet\" without telling you details like where the lightest and darkest points are, or \"that\\'s a bush\" without telling you the shape and position of every leaf. This is a feature of brains, not a bug. In everyday life it would be distracting to notice every leaf on every bush. But when you have to paint something, you have to look more closely, and when you do there\\'s a lot to see. You can still be noticing new things after days of trying to paint something people usually take for granted, just as you can after days of trying to write an essay about something people usually take for granted.\\n\\nThis is not the only way to paint. I\\'m not 100% sure it\\'s even a good way to paint. But it seemed a good enough bet to be worth trying.\\n\\nOur teacher, professor Ulivi, was a nice guy. He could see I worked hard, and gave me a good grade, which he wrote down in a sort of passport each student had. But the Accademia wasn\\'t teaching me anything except Italian, and my money was running out, so at the end of the first year I went back to the US.\\n\\nI wanted to go back to RISD, but I was now broke and RISD was very expensive, so I decided to get a job for a year and then return to RISD the next fall. I got one at a company called Interleaf, which made software for creating documents. You mean like Microsoft Word? Exactly. That was how I learned that low end software tends to eat high end software. But Interleaf still had a few years to live yet. [5]\\n\\nInterleaf had done something pretty bold. Inspired by Emacs, they\\'d added a scripting language, and even made the scripting language a dialect of Lisp. Now they wanted a Lisp hacker to write things in it. This was the closest thing I\\'ve had to a normal job, and I hereby apologize to my boss and coworkers, because I was a bad employee. Their Lisp was the thinnest icing on a giant C cake, and since I didn\\'t know C and didn\\'t want to learn it, I never understood most of the software. Plus I was terribly irresponsible. This was back when a programming job meant showing up every day during certain working hours. That seemed unnatural to me, and on this point the rest of the world is coming around to my way of thinking, but at the time it caused a lot of friction. Toward the end of the year I spent much of my time surreptitiously working on On Lisp, which I had by this time gotten a contract to publish.\\n\\nThe good part was that I got paid huge amounts of money, especially by art student standards. In Florence, after paying my part of the rent, my budget for everything else had been $7 a day. Now I was getting paid more than 4 times that every hour, even when I was just sitting in a meeting. By living cheaply I not only managed to save enough to go back to RISD, but also paid off my college loans.\\n\\nI learned some useful things at Interleaf, though they were mostly about what not to do. I learned that it\\'s better for technology companies to be run by product people than sales people (though sales is a real skill and people who are good at it are really good at it), that it leads to bugs when code is edited by too many people, that cheap office space is no bargain if it\\'s depressing, that planned meetings are inferior to corridor conversations, that big, bureaucratic customers are a dangerous source of money, and that there\\'s not much overlap between conventional', doc_id='8e0bca16-b087-489b-983a-5beaaf393f64', embedding=None, doc_hash='381c03ecd6edff8260cd955e3762b2c94431d15ae62b58591ab3f9fc9342ed58', extra_info=None, node_info={'start': 15894, 'end': 19945, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '0fc849a1-2421-414e-9f83-f39e3ac47e41', <DocumentRelationship.NEXT: '3'>: '855d5459-1cfe-465e-8c94-f9a1d047f821'}), score=None), NodeWithScore(node=Node(text='are a dangerous source of money, and that there\\'s not much overlap between conventional office hours and the optimal time for hacking, or conventional offices and the optimal place for it.\\n\\nBut the most important thing I learned, and which I used in both Viaweb and Y Combinator, is that the low end eats the high end: that it\\'s good to be the \"entry level\" option, even though that will be less prestigious, because if you\\'re not, someone else will be, and will squash you against the ceiling. Which in turn means that prestige is a danger sign.\\n\\nWhen I left to go back to RISD the next fall, I arranged to do freelance work for the group that did projects for customers, and this was how I survived for the next several years. When I came back to visit for a project later on, someone told me about a new thing called HTML, which was, as he described it, a derivative of SGML. Markup language enthusiasts were an occupational hazard at Interleaf and I ignored him, but this HTML thing later became a big part of my life.\\n\\nIn the fall of 1992 I moved back to Providence to continue at RISD. The foundation had merely been intro stuff, and the Accademia had been a (very civilized) joke. Now I was going to see what real art school was like. But alas it was more like the Accademia than not. Better organized, certainly, and a lot more expensive, but it was now becoming clear that art school did not bear the same relationship to art that medical school bore to medicine. At least not the painting department. The textile department, which my next door neighbor belonged to, seemed to be pretty rigorous. No doubt illustration and architecture were too. But painting was post-rigorous. Painting students were supposed to express themselves, which to the more worldly ones meant to try to cook up some sort of distinctive signature style.\\n\\nA signature style is the visual equivalent of what in show business is known as a \"schtick\": something that immediately identifies the work as yours and no one else\\'s. For example, when you see a painting that looks like a certain kind of cartoon, you know it\\'s by Roy Lichtenstein. So if you see a big painting of this type hanging in the apartment of a hedge fund manager, you know he paid millions of dollars for it. That\\'s not always why artists have a signature style, but it\\'s usually why buyers pay a lot for such work. [6]\\n\\nThere were plenty of earnest students too: kids who \"could draw\" in high school, and now had come to what was supposed to be the best art school in the country, to learn to draw even better. They tended to be confused and demoralized by what they found at RISD, but they kept going, because painting was what they did. I was not one of the kids who could draw in high school, but at RISD I was definitely closer to their tribe than the tribe of signature style seekers.\\n\\nI learned a lot in the color class I took at RISD, but otherwise I was basically teaching myself to paint, and I could do that for free. So in 1993 I dropped out. I hung around Providence for a bit, and then my college friend Nancy Parmet did me a big favor. A rent-controlled apartment in a building her mother owned in New York was becoming vacant. Did I want it? It wasn\\'t much more than my current place, and New York was supposed to be where the artists were. So yes, I wanted it! [7]\\n\\nAsterix comics begin by zooming in on a tiny corner of Roman Gaul that turns out not to be controlled by the Romans. You can do something similar on a map of New York City: if you zoom in on the Upper East Side, there\\'s a tiny corner that\\'s not rich, or at least wasn\\'t in 1993. It\\'s called Yorkville, and that was my new home. Now I was a New York artist — in the strictly technical sense of making paintings and living in New York.\\n\\nI was nervous about money, because I could sense that Interleaf was on the way down. Freelance Lisp hacking work was very rare, and I', doc_id='855d5459-1cfe-465e-8c94-f9a1d047f821', embedding=None, doc_hash='3a298b8e6f42c9af6356b611a9cbe37172455b8047ba2009f49b910488e96f8d', extra_info=None, node_info={'start': 19946, 'end': 23848, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '8e0bca16-b087-489b-983a-5beaaf393f64', <DocumentRelationship.NEXT: '3'>: '3b199ba9-d04b-473a-8c73-39c293638957'}), score=None), NodeWithScore(node=Node(text='on the way down. Freelance Lisp hacking work was very rare, and I didn\\'t want to have to program in another language, which in those days would have meant C++ if I was lucky. So with my unerring nose for financial opportunity, I decided to write another book on Lisp. This would be a popular book, the sort of book that could be used as a textbook. I imagined myself living frugally off the royalties and spending all my time painting. (The painting on the cover of this book, ANSI Common Lisp, is one that I painted around this time.)\\n\\nThe best thing about New York for me was the presence of Idelle and Julian Weber. Idelle Weber was a painter, one of the early photorealists, and I\\'d taken her painting class at Harvard. I\\'ve never known a teacher more beloved by her students. Large numbers of former students kept in touch with her, including me. After I moved to New York I became her de facto studio assistant.\\n\\nShe liked to paint on big, square canvases, 4 to 5 feet on a side. One day in late 1994 as I was stretching one of these monsters there was something on the radio about a famous fund manager. He wasn\\'t that much older than me, and was super rich. The thought suddenly occurred to me: why don\\'t I become rich? Then I\\'ll be able to work on whatever I want.\\n\\nMeanwhile I\\'d been hearing more and more about this new thing called the World Wide Web. Robert Morris showed it to me when I visited him in Cambridge, where he was now in grad school at Harvard. It seemed to me that the web would be a big deal. I\\'d seen what graphical user interfaces had done for the popularity of microcomputers. It seemed like the web would do the same for the internet.\\n\\nIf I wanted to get rich, here was the next train leaving the station. I was right about that part. What I got wrong was the idea. I decided we should start a company to put art galleries online. I can\\'t honestly say, after reading so many Y Combinator applications, that this was the worst startup idea ever, but it was up there. Art galleries didn\\'t want to be online, and still don\\'t, not the fancy ones. That\\'s not how they sell. I wrote some software to generate web sites for galleries, and Robert wrote some to resize images and set up an http server to serve the pages. Then we tried to sign up galleries. To call this a difficult sale would be an understatement. It was difficult to give away. A few galleries let us make sites for them for free, but none paid us.\\n\\nThen some online stores started to appear, and I realized that except for the order buttons they were identical to the sites we\\'d been generating for galleries. This impressive-sounding thing called an \"internet storefront\" was something we already knew how to build.\\n\\nSo in the summer of 1995, after I submitted the camera-ready copy of ANSI Common Lisp to the publishers, we started trying to write software to build online stores. At first this was going to be normal desktop software, which in those days meant Windows software. That was an alarming prospect, because neither of us knew how to write Windows software or wanted to learn. We lived in the Unix world. But we decided we\\'d at least try writing a prototype store builder on Unix. Robert wrote a shopping cart, and I wrote a new site generator for stores — in Lisp, of course.\\n\\nWe were working out of Robert\\'s apartment in Cambridge. His roommate was away for big chunks of time, during which I got to sleep in his room. For some reason there was no bed frame or sheets, just a mattress on the floor. One morning as I was lying on this mattress I had an idea that made me sit up like a capital L. What if we ran the software on the server, and let users control it by clicking on links? Then we\\'d never have to write anything to run on users\\' computers. We could generate the sites on the same server we\\'d serve them from. Users wouldn\\'t need anything more than a browser.\\n\\nThis kind of software, known as a web app, is common now, but at the time it wasn\\'t clear that it was even possible. To find out, we decided to try', doc_id='3b199ba9-d04b-473a-8c73-39c293638957', embedding=None, doc_hash='193c210e6a2567bac7400ec1d44e0bcc00777378664c0e3d08495baf7db4d9ac', extra_info=None, node_info={'start': 23870, 'end': 27897, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '855d5459-1cfe-465e-8c94-f9a1d047f821', <DocumentRelationship.NEXT: '3'>: '669c0218-b2c1-428b-808c-f5408e52dcdf'}), score=None), NodeWithScore(node=Node(text='wasn\\'t clear that it was even possible. To find out, we decided to try making a version of our store builder that you could control through the browser. A couple days later, on August 12, we had one that worked. The UI was horrible, but it proved you could build a whole store through the browser, without any client software or typing anything into the command line on the server.\\n\\nNow we felt like we were really onto something. I had visions of a whole new generation of software working this way. You wouldn\\'t need versions, or ports, or any of that crap. At Interleaf there had been a whole group called Release Engineering that seemed to be at least as big as the group that actually wrote the software. Now you could just update the software right on the server.\\n\\nWe started a new company we called Viaweb, after the fact that our software worked via the web, and we got $10,000 in seed funding from Idelle\\'s husband Julian. In return for that and doing the initial legal work and giving us business advice, we gave him 10% of the company. Ten years later this deal became the model for Y Combinator\\'s. We knew founders needed something like this, because we\\'d needed it ourselves.\\n\\nAt this stage I had a negative net worth, because the thousand dollars or so I had in the bank was more than counterbalanced by what I owed the government in taxes. (Had I diligently set aside the proper proportion of the money I\\'d made consulting for Interleaf? No, I had not.) So although Robert had his graduate student stipend, I needed that seed funding to live on.\\n\\nWe originally hoped to launch in September, but we got more ambitious about the software as we worked on it. Eventually we managed to build a WYSIWYG site builder, in the sense that as you were creating pages, they looked exactly like the static ones that would be generated later, except that instead of leading to static pages, the links all referred to closures stored in a hash table on the server.\\n\\nIt helped to have studied art, because the main goal of an online store builder is to make users look legit, and the key to looking legit is high production values. If you get page layouts and fonts and colors right, you can make a guy running a store out of his bedroom look more legit than a big company.\\n\\n(If you\\'re curious why my site looks so old-fashioned, it\\'s because it\\'s still made with this software. It may look clunky today, but in 1996 it was the last word in slick.)\\n\\nIn September, Robert rebelled. \"We\\'ve been working on this for a month,\" he said, \"and it\\'s still not done.\" This is funny in retrospect, because he would still be working on it almost 3 years later. But I decided it might be prudent to recruit more programmers, and I asked Robert who else in grad school with him was really good. He recommended Trevor Blackwell, which surprised me at first, because at that point I knew Trevor mainly for his plan to reduce everything in his life to a stack of notecards, which he carried around with him. But Rtm was right, as usual. Trevor turned out to be a frighteningly effective hacker.\\n\\nIt was a lot of fun working with Robert and Trevor. They\\'re the two most independent-minded people I know, and in completely different ways. If you could see inside Rtm\\'s brain it would look like a colonial New England church, and if you could see inside Trevor\\'s it would look like the worst excesses of Austrian Rococo.\\n\\nWe opened for business, with 6 stores, in January 1996. It was just as well we waited a few months, because although we worried we were late, we were actually almost fatally early. There was a lot of talk in the press then about ecommerce, but not many people actually wanted online stores. [8]\\n\\nThere were three main parts to the software: the editor, which people used to build sites and which I wrote, the shopping cart, which Robert wrote, and the manager, which kept track of orders and statistics, and which Trevor wrote. In its time, the editor was one of the best general-purpose site builders. I kept the code tight and didn\\'t have to integrate with any other software except Robert\\'s and Trevor\\'s, so it was quite fun to work on. If all I\\'d had to do was', doc_id='669c0218-b2c1-428b-808c-f5408e52dcdf', embedding=None, doc_hash='fd9eb9ceabf16d661afa8f19d64b256664bbc61c886292817fd92dc2456b8eaa', extra_info=None, node_info={'start': 27894, 'end': 32060, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '3b199ba9-d04b-473a-8c73-39c293638957', <DocumentRelationship.NEXT: '3'>: 'eb14dc48-f3a1-461d-9a49-0d78331dd083'}), score=None), NodeWithScore(node=Node(text='so it was quite fun to work on. If all I\\'d had to do was work on this software, the next 3 years would have been the easiest of my life. Unfortunately I had to do a lot more, all of it stuff I was worse at than programming, and the next 3 years were instead the most stressful.\\n\\nThere were a lot of startups making ecommerce software in the second half of the 90s. We were determined to be the Microsoft Word, not the Interleaf. Which meant being easy to use and inexpensive. It was lucky for us that we were poor, because that caused us to make Viaweb even more inexpensive than we realized. We charged $100 a month for a small store and $300 a month for a big one. This low price was a big attraction, and a constant thorn in the sides of competitors, but it wasn\\'t because of some clever insight that we set the price low. We had no idea what businesses paid for things. $300 a month seemed like a lot of money to us.\\n\\nWe did a lot of things right by accident like that. For example, we did what\\'s now called \"doing things that don\\'t scale,\" although at the time we would have described it as \"being so lame that we\\'re driven to the most desperate measures to get users.\" The most common of which was building stores for them. This seemed particularly humiliating, since the whole raison d\\'etre of our software was that people could use it to make their own stores. But anything to get users.\\n\\nWe learned a lot more about retail than we wanted to know. For example, that if you could only have a small image of a man\\'s shirt (and all images were small then by present standards), it was better to have a closeup of the collar than a picture of the whole shirt. The reason I remember learning this was that it meant I had to rescan about 30 images of men\\'s shirts. My first set of scans were so beautiful too.\\n\\nThough this felt wrong, it was exactly the right thing to be doing. Building stores for users taught us about retail, and about how it felt to use our software. I was initially both mystified and repelled by \"business\" and thought we needed a \"business person\" to be in charge of it, but once we started to get users, I was converted, in much the same way I was converted to fatherhood once I had kids. Whatever users wanted, I was all theirs. Maybe one day we\\'d have so many users that I couldn\\'t scan their images for them, but in the meantime there was nothing more important to do.\\n\\nAnother thing I didn\\'t get at the time is that growth rate is the ultimate test of a startup. Our growth rate was fine. We had about 70 stores at the end of 1996 and about 500 at the end of 1997. I mistakenly thought the thing that mattered was the absolute number of users. And that is the thing that matters in the sense that that\\'s how much money you\\'re making, and if you\\'re not making enough, you might go out of business. But in the long term the growth rate takes care of the absolute number. If we\\'d been a startup I was advising at Y Combinator, I would have said: Stop being so stressed out, because you\\'re doing fine. You\\'re growing 7x a year. Just don\\'t hire too many more people and you\\'ll soon be profitable, and then you\\'ll control your own destiny.\\n\\nAlas I hired lots more people, partly because our investors wanted me to, and partly because that\\'s what startups did during the Internet Bubble. A company with just a handful of employees would have seemed amateurish. So we didn\\'t reach breakeven until about when Yahoo bought us in the summer of 1998. Which in turn meant we were at the mercy of investors for the entire life of the company. And since both we and our investors were noobs at startups, the result was a mess even by startup standards.\\n\\nIt was a huge relief when Yahoo bought us. In principle our Viaweb stock was valuable. It was a share in a business that was profitable and growing rapidly. But it didn\\'t feel very valuable to me; I had no idea how to value a business, but I was all too keenly aware of the near-death experiences we seemed to have every few months. Nor had I changed my grad student lifestyle significantly since we started. So when', doc_id='eb14dc48-f3a1-461d-9a49-0d78331dd083', embedding=None, doc_hash='9185047072ffc2d0a80db719ed6fa93a69773224a62bbe5c7a8f59eaed4e80e1', extra_info=None, node_info={'start': 32076, 'end': 36162, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '669c0218-b2c1-428b-808c-f5408e52dcdf', <DocumentRelationship.NEXT: '3'>: '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb'}), score=None), NodeWithScore(node=Node(text=\"few months. Nor had I changed my grad student lifestyle significantly since we started. So when Yahoo bought us it felt like going from rags to riches. Since we were going to California, I bought a car, a yellow 1998 VW GTI. I remember thinking that its leather seats alone were by far the most luxurious thing I owned.\\n\\nThe next year, from the summer of 1998 to the summer of 1999, must have been the least productive of my life. I didn't realize it at the time, but I was worn out from the effort and stress of running Viaweb. For a while after I got to California I tried to continue my usual m.o. of programming till 3 in the morning, but fatigue combined with Yahoo's prematurely aged culture and grim cube farm in Santa Clara gradually dragged me down. After a few months it felt disconcertingly like working at Interleaf.\\n\\nYahoo had given us a lot of options when they bought us. At the time I thought Yahoo was so overvalued that they'd never be worth anything, but to my astonishment the stock went up 5x in the next year. I hung on till the first chunk of options vested, then in the summer of 1999 I left. It had been so long since I'd painted anything that I'd half forgotten why I was doing this. My brain had been entirely full of software and men's shirts for 4 years. But I had done this to get rich so I could paint, I reminded myself, and now I was rich, so I should go paint.\\n\\nWhen I said I was leaving, my boss at Yahoo had a long conversation with me about my plans. I told him all about the kinds of pictures I wanted to paint. At the time I was touched that he took such an interest in me. Now I realize it was because he thought I was lying. My options at that point were worth about $2 million a month. If I was leaving that kind of money on the table, it could only be to go and start some new startup, and if I did, I might take people with me. This was the height of the Internet Bubble, and Yahoo was ground zero of it. My boss was at that moment a billionaire. Leaving then to start a new startup must have seemed to him an insanely, and yet also plausibly, ambitious plan.\\n\\nBut I really was quitting to paint, and I started immediately. There was no time to lose. I'd already burned 4 years getting rich. Now when I talk to founders who are leaving after selling their companies, my advice is always the same: take a vacation. That's what I should have done, just gone off somewhere and done nothing for a month or two, but the idea never occurred to me.\\n\\nSo I tried to paint, but I just didn't seem to have any energy or ambition. Part of the problem was that I didn't know many people in California. I'd compounded this problem by buying a house up in the Santa Cruz Mountains, with a beautiful view but miles from anywhere. I stuck it out for a few more months, then in desperation I went back to New York, where unless you understand about rent control you'll be surprised to hear I still had my apartment, sealed up like a tomb of my old life. Idelle was in New York at least, and there were other people trying to paint there, even though I didn't know any of them.\\n\\nWhen I got back to New York I resumed my old life, except now I was rich. It was as weird as it sounds. I resumed all my old patterns, except now there were doors where there hadn't been. Now when I was tired of walking, all I had to do was raise my hand, and (unless it was raining) a taxi would stop to pick me up. Now when I walked past charming little restaurants I could go in and order lunch. It was exciting for a while. Painting started to go better. I experimented with a new kind of still life where I'd paint one painting in the old way, then photograph it and print it, blown up, on canvas, and then use that as the underpainting for a second still life, painted from the same objects (which hopefully hadn't rotted yet).\\n\\nMeanwhile I looked for an apartment to buy. Now I could actually choose what\", doc_id='99b3c4c2-aeab-4f5a-9474-916f6c35c9fb', embedding=None, doc_hash='363de309e331985d57cbd2a87662a4b6036a44807432524f2dbfd50a7e6ba7e5', extra_info=None, node_info={'start': 36125, 'end': 40040, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'eb14dc48-f3a1-461d-9a49-0d78331dd083', <DocumentRelationship.NEXT: '3'>: '8a9e2472-230f-437f-b720-1494878d5933'}), score=None), NodeWithScore(node=Node(text='I looked for an apartment to buy. Now I could actually choose what neighborhood to live in. Where, I asked myself and various real estate agents, is the Cambridge of New York? Aided by occasional visits to actual Cambridge, I gradually realized there wasn\\'t one. Huh.\\n\\nAround this time, in the spring of 2000, I had an idea. It was clear from our experience with Viaweb that web apps were the future. Why not build a web app for making web apps? Why not let people edit code on our server through the browser, and then host the resulting applications for them? [9] You could run all sorts of services on the servers that these applications could use just by making an API call: making and receiving phone calls, manipulating images, taking credit card payments, etc.\\n\\nI got so excited about this idea that I couldn\\'t think about anything else. It seemed obvious that this was the future. I didn\\'t particularly want to start another company, but it was clear that this idea would have to be embodied as one, so I decided to move to Cambridge and start it. I hoped to lure Robert into working on it with me, but there I ran into a hitch. Robert was now a postdoc at MIT, and though he\\'d made a lot of money the last time I\\'d lured him into working on one of my schemes, it had also been a huge time sink. So while he agreed that it sounded like a plausible idea, he firmly refused to work on it.\\n\\nHmph. Well, I\\'d do it myself then. I recruited Dan Giffin, who had worked for Viaweb, and two undergrads who wanted summer jobs, and we got to work trying to build what it\\'s now clear is about twenty companies and several open source projects worth of software. The language for defining applications would of course be a dialect of Lisp. But I wasn\\'t so naive as to assume I could spring an overt Lisp on a general audience; we\\'d hide the parentheses, like Dylan did.\\n\\nBy then there was a name for the kind of company Viaweb was, an \"application service provider,\" or ASP. This name didn\\'t last long before it was replaced by \"software as a service,\" but it was current for long enough that I named this new company after it: it was going to be called Aspra.\\n\\nI started working on the application builder, Dan worked on network infrastructure, and the two undergrads worked on the first two services (images and phone calls). But about halfway through the summer I realized I really didn\\'t want to run a company — especially not a big one, which it was looking like this would have to be. I\\'d only started Viaweb because I needed the money. Now that I didn\\'t need money anymore, why was I doing this? If this vision had to be realized as a company, then screw the vision. I\\'d build a subset that could be done as an open source project.\\n\\nMuch to my surprise, the time I spent working on this stuff was not wasted after all. After we started Y Combinator, I would often encounter startups working on parts of this new architecture, and it was very useful to have spent so much time thinking about it and even trying to write some of it.\\n\\nThe subset I would build as an open source project was the new Lisp, whose parentheses I now wouldn\\'t even have to hide. A lot of Lisp hackers dream of building a new Lisp, partly because one of the distinctive features of the language is that it has dialects, and partly, I think, because we have in our minds a Platonic form of Lisp that all existing dialects fall short of. I certainly did. So at the end of the summer Dan and I switched to working on this new dialect of Lisp, which I called Arc, in a house I bought in Cambridge.\\n\\nThe following spring, lightning struck. I was invited to give a talk at a Lisp conference, so I gave one about how we\\'d used Lisp at Viaweb. Afterward I put a postscript file of this talk online, on paulgraham.com, which I\\'d created years before using Viaweb but had never used for anything. In one day it got 30,000 page views. What on earth had happened? The referring urls showed', doc_id='8a9e2472-230f-437f-b720-1494878d5933', embedding=None, doc_hash='1a72edc63a9582c55f626fed3dbb1fadbfb1e7e7111e93b6c789c79c66db1e11', extra_info=None, node_info={'start': 40067, 'end': 44018, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb', <DocumentRelationship.NEXT: '3'>: '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060'}), score=None), NodeWithScore(node=Node(text=\"page views. What on earth had happened? The referring urls showed that someone had posted it on Slashdot. [10]\\n\\nWow, I thought, there's an audience. If I write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\\n\\nThis had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\\n\\nIn the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\\n\\nI've worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I'd always write essays too.\\n\\nI knew that online essays would be a marginal medium at first. Socially they'd seem more like rants posted by nutjobs on their GeoCities sites than the genteel and beautifully typeset compositions published in The New Yorker. But by this point I knew enough to find that encouraging instead of discouraging.\\n\\nOne of the most conspicuous patterns I've noticed in my life is how well it has worked, for me at least, to work on things that weren't prestigious. Still life has always been the least prestigious form of painting. Viaweb and Y Combinator both seemed lame when we started them. I still get the glassy eye from strangers when they ask what I'm writing, and I explain that it's an essay I'm going to publish on my web site. Even Lisp, though prestigious intellectually in something like the way Latin is, also seems about as hip.\\n\\nIt's not that unprestigious types of work are good per se. But when you find yourself drawn to some kind of work despite its current lack of prestige, it's a sign both that there's something real to be discovered there, and that you have the right kind of motives. Impure motives are a big danger for the ambitious. If anything is going to lead you astray, it will be the desire to impress people. So while working on things that aren't prestigious doesn't guarantee you're on the right track, it at least guarantees you're not on the most common type of wrong one.\\n\\nOver the next several years I wrote lots of essays about all kinds of different topics. O'Reilly reprinted a collection of them as a book, called Hackers & Painters after one of the essays in it. I also worked on spam filters, and did some more painting. I used to have dinners for a group of friends every thursday night, which taught me how to cook for groups. And I bought another building in Cambridge, a former candy factory (and later, twas said, porn studio), to use as an office.\\n\\nOne night in October 2003 there was a big party at my house. It was a clever idea of my friend Maria Daniels, who was one of the thursday diners. Three separate hosts would all invite their friends to one party. So for every guest, two thirds of the other guests would be people they didn't know but would probably like. One of the guests was someone I didn't know but would turn out to like a lot: a woman called Jessica Livingston. A couple days later I asked her out.\\n\\nJessica was in charge of marketing at a Boston investment\", doc_id='2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060', embedding=None, doc_hash='597ba7da683f715e046031bf0824aa5d9ff24b64aa2c0902a3ad6ee1e48ea312', extra_info=None, node_info={'start': 44018, 'end': 48016, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '8a9e2472-230f-437f-b720-1494878d5933', <DocumentRelationship.NEXT: '3'>: '42f8cdbc-3613-409a-85b6-bb1e22c85fef'}), score=None), NodeWithScore(node=Node(text='I asked her out.\\n\\nJessica was in charge of marketing at a Boston investment bank. This bank thought it understood startups, but over the next year, as she met friends of mine from the startup world, she was surprised how different reality was. And how colorful their stories were. So she decided to compile a book of interviews with startup founders.\\n\\nWhen the bank had financial problems and she had to fire half her staff, she started looking for a new job. In early 2005 she interviewed for a marketing job at a Boston VC firm. It took them weeks to make up their minds, and during this time I started telling her about all the things that needed to be fixed about venture capital. They should make a larger number of smaller investments instead of a handful of giant ones, they should be funding younger, more technical founders instead of MBAs, they should let the founders remain as CEO, and so on.\\n\\nOne of my tricks for writing essays had always been to give talks. The prospect of having to stand up in front of a group of people and tell them something that won\\'t waste their time is a great spur to the imagination. When the Harvard Computer Society, the undergrad computer club, asked me to give a talk, I decided I would tell them how to start a startup. Maybe they\\'d be able to avoid the worst of the mistakes we\\'d made.\\n\\nSo I gave this talk, in the course of which I told them that the best sources of seed funding were successful startup founders, because then they\\'d be sources of advice too. Whereupon it seemed they were all looking expectantly at me. Horrified at the prospect of having my inbox flooded by business plans (if I\\'d only known), I blurted out \"But not me!\" and went on with the talk. But afterward it occurred to me that I should really stop procrastinating about angel investing. I\\'d been meaning to since Yahoo bought us, and now it was 7 years later and I still hadn\\'t done one angel investment.\\n\\nMeanwhile I had been scheming with Robert and Trevor about projects we could work on together. I missed working with them, and it seemed like there had to be something we could collaborate on.\\n\\nAs Jessica and I were walking home from dinner on March 11, at the corner of Garden and Walker streets, these three threads converged. Screw the VCs who were taking so long to make up their minds. We\\'d start our own investment firm and actually implement the ideas we\\'d been talking about. I\\'d fund it, and Jessica could quit her job and work for it, and we\\'d get Robert and Trevor as partners too. [13]\\n\\nOnce again, ignorance worked in our favor. We had no idea how to be angel investors, and in Boston in 2005 there were no Ron Conways to learn from. So we just made what seemed like the obvious choices, and some of the things we did turned out to be novel.\\n\\nThere are multiple components to Y Combinator, and we didn\\'t figure them all out at once. The part we got first was to be an angel firm. In those days, those two words didn\\'t go together. There were VC firms, which were organized companies with people whose job it was to make investments, but they only did big, million dollar investments. And there were angels, who did smaller investments, but these were individuals who were usually focused on other things and made investments on the side. And neither of them helped founders enough in the beginning. We knew how helpless founders were in some respects, because we remembered how helpless we\\'d been. For example, one thing Julian had done for us that seemed to us like magic was to get us set up as a company. We were fine writing fairly difficult software, but actually getting incorporated, with bylaws and stock and all that stuff, how on earth did you do that? Our plan was not only to make seed investments, but to do for startups everything Julian had done for us.\\n\\nYC was not organized as a fund. It was cheap enough to run that we funded it with our own money. That went right by 99% of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" But once again, this was not due to any particular insight on our part. We didn\\'t know how VC firms', doc_id='42f8cdbc-3613-409a-85b6-bb1e22c85fef', embedding=None, doc_hash='345510864fca229d2e5a22bf719f4ee408018c13c348307d3d72bc151fe61050', extra_info=None, node_info={'start': 48009, 'end': 52140, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060', <DocumentRelationship.NEXT: '3'>: 'f9e74e9d-cdd9-43c4-8742-76c38200305f'}), score=None), NodeWithScore(node=Node(text='not due to any particular insight on our part. We didn\\'t know how VC firms were organized. It never occurred to us to try to raise a fund, and if it had, we wouldn\\'t have known where to start. [14]\\n\\nThe most distinctive thing about YC is the batch model: to fund a bunch of startups all at once, twice a year, and then to spend three months focusing intensively on trying to help them. That part we discovered by accident, not merely implicitly but explicitly due to our ignorance about investing. We needed to get experience as investors. What better way, we thought, than to fund a whole bunch of startups at once? We knew undergrads got temporary jobs at tech companies during the summer. Why not organize a summer program where they\\'d start startups instead? We wouldn\\'t feel guilty for being in a sense fake investors, because they would in a similar sense be fake founders. So while we probably wouldn\\'t make much money out of it, we\\'d at least get to practice being investors on them, and they for their part would probably have a more interesting summer than they would working at Microsoft.\\n\\nWe\\'d use the building I owned in Cambridge as our headquarters. We\\'d all have dinner there once a week — on tuesdays, since I was already cooking for the thursday diners on thursdays — and after dinner we\\'d bring in experts on startups to give talks.\\n\\nWe knew undergrads were deciding then about summer jobs, so in a matter of days we cooked up something we called the Summer Founders Program, and I posted an announcement on my site, inviting undergrads to apply. I had never imagined that writing essays would be a way to get \"deal flow,\" as investors call it, but it turned out to be the perfect source. [15] We got 225 applications for the Summer Founders Program, and we were surprised to find that a lot of them were from people who\\'d already graduated, or were about to that spring. Already this SFP thing was starting to feel more serious than we\\'d intended.\\n\\nWe invited about 20 of the 225 groups to interview in person, and from those we picked 8 to fund. They were an impressive group. That first batch included reddit, Justin Kan and Emmett Shear, who went on to found Twitch, Aaron Swartz, who had already helped write the RSS spec and would a few years later become a martyr for open access, and Sam Altman, who would later become the second president of YC. I don\\'t think it was entirely luck that the first batch was so good. You had to be pretty bold to sign up for a weird thing like the Summer Founders Program instead of a summer job at a legit place like Microsoft or Goldman Sachs.\\n\\nThe deal for startups was based on a combination of the deal we did with Julian ($10k for 10%) and what Robert said MIT grad students got for the summer ($6k). We invested $6k per founder, which in the typical two-founder case was $12k, in return for 6%. That had to be fair, because it was twice as good as the deal we ourselves had taken. Plus that first summer, which was really hot, Jessica brought the founders free air conditioners. [16]\\n\\nFairly quickly I realized that we had stumbled upon the way to scale startup funding. Funding startups in batches was more convenient for us, because it meant we could do things for a lot of startups at once, but being part of a batch was better for the startups too. It solved one of the biggest problems faced by founders: the isolation. Now you not only had colleagues, but colleagues who understood the problems you were facing and could tell you how they were solving them.\\n\\nAs YC grew, we started to notice other advantages of scale. The alumni became a tight community, dedicated to helping one another, and especially the current batch, whose shoes they remembered being in. We also noticed that the startups were becoming one another\\'s customers. We used to refer jokingly to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get their initial set of customers almost entirely from among their batchmates.\\n\\nI had not originally', doc_id='f9e74e9d-cdd9-43c4-8742-76c38200305f', embedding=None, doc_hash='03e6a7d2e66c082507e29d8cfe9ec36fa77fe0b1cf352ad5ce85ad91f71a3622', extra_info=None, node_info={'start': 52144, 'end': 56170, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '42f8cdbc-3613-409a-85b6-bb1e22c85fef', <DocumentRelationship.NEXT: '3'>: 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd'}), score=None), NodeWithScore(node=Node(text='of customers almost entirely from among their batchmates.\\n\\nI had not originally intended YC to be a full-time job. I was going to do three things: hack, write essays, and work on YC. As YC grew, and I grew more excited about it, it started to take up a lot more than a third of my attention. But for the first few years I was still able to work on other things.\\n\\nIn the summer of 2006, Robert and I started working on a new version of Arc. This one was reasonably fast, because it was compiled into Scheme. To test this new Arc, I wrote Hacker News in it. It was originally meant to be a news aggregator for startup founders and was called Startup News, but after a few months I got tired of reading about nothing but startups. Plus it wasn\\'t startup founders we wanted to reach. It was future startup founders. So I changed the name to Hacker News and the topic to whatever engaged one\\'s intellectual curiosity.\\n\\nHN was no doubt good for YC, but it was also by far the biggest source of stress for me. If all I\\'d had to do was select and help founders, life would have been so easy. And that implies that HN was a mistake. Surely the biggest source of stress in one\\'s work should at least be something close to the core of the work. Whereas I was like someone who was in pain while running a marathon not from the exertion of running, but because I had a blister from an ill-fitting shoe. When I was dealing with some urgent problem during YC, there was about a 60% chance it had to do with HN, and a 40% chance it had do with everything else combined. [17]\\n\\nAs well as HN, I wrote all of YC\\'s internal software in Arc. But while I continued to work a good deal in Arc, I gradually stopped working on Arc, partly because I didn\\'t have time to, and partly because it was a lot less attractive to mess around with the language now that we had all this infrastructure depending on it. So now my three projects were reduced to two: writing essays and working on YC.\\n\\nYC was different from other kinds of work I\\'ve done. Instead of deciding for myself what to work on, the problems came to me. Every 6 months there was a new batch of startups, and their problems, whatever they were, became our problems. It was very engaging work, because their problems were quite varied, and the good founders were very effective. If you were trying to learn the most you could about startups in the shortest possible time, you couldn\\'t have picked a better way to do it.\\n\\nThere were parts of the job I didn\\'t like. Disputes between cofounders, figuring out when people were lying to us, fighting with people who maltreated the startups, and so on. But I worked hard even at the parts I didn\\'t like. I was haunted by something Kevin Hale once said about companies: \"No one works harder than the boss.\" He meant it both descriptively and prescriptively, and it was the second part that scared me. I wanted YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I\\'d better work very hard.\\n\\nOne day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember his exact words very clearly. \"You know,\" he said, \"you should make sure Y Combinator isn\\'t the last cool thing you do.\"\\n\\nAt the time I didn\\'t understand what he meant, but gradually it dawned on me that he was saying I should quit. This seemed strange advice, because YC was doing great. But if there was one thing rarer than Rtm offering advice, it was Rtm being wrong. So this set me thinking. It was true that on my current trajectory, YC would be the last thing I did, because it was only taking up more of my attention. It had already eaten Arc, and was in the process of eating', doc_id='ab7ed037-4269-4593-b5ff-0ce3d9213cbd', embedding=None, doc_hash='28da476588fa6a7c04e3fc8d0c8490de5a6aa3f4b46ada11723bd524402b1d33', extra_info=None, node_info={'start': 56162, 'end': 60161, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'f9e74e9d-cdd9-43c4-8742-76c38200305f', <DocumentRelationship.NEXT: '3'>: 'cfd50785-c54a-4e07-b474-561541968da9'}), score=None), NodeWithScore(node=Node(text=\"of my attention. It had already eaten Arc, and was in the process of eating essays too. Either YC was my life's work or I'd have to leave eventually. And it wasn't, so I would.\\n\\nIn the summer of 2012 my mother had a stroke, and the cause turned out to be a blood clot caused by colon cancer. The stroke destroyed her balance, and she was put in a nursing home, but she really wanted to get out of it and back to her house, and my sister and I were determined to help her do it. I used to fly up to Oregon to visit her regularly, and I had a lot of time to think on those flights. On one of them I realized I was ready to hand YC over to someone else.\\n\\nI asked Jessica if she wanted to be president, but she didn't, so we decided we'd try to recruit Sam Altman. We talked to Robert and Trevor and we agreed to make it a complete changing of the guard. Up till that point YC had been controlled by the original LLC we four had started. But we wanted YC to last for a long time, and to do that it couldn't be controlled by the founders. So if Sam said yes, we'd let him reorganize YC. Robert and I would retire, and Jessica and Trevor would become ordinary partners.\\n\\nWhen we asked Sam if he wanted to be president of YC, initially he said no. He wanted to start a startup to make nuclear reactors. But I kept at it, and in October 2013 he finally agreed. We decided he'd take over starting with the winter 2014 batch. For the rest of 2013 I left running YC more and more to Sam, partly so he could learn the job, and partly because I was focused on my mother, whose cancer had returned.\\n\\nShe died on January 15, 2014. We knew this was coming, but it was still hard when it did.\\n\\nI kept working on YC till March, to help get that batch of startups through Demo Day, then I checked out pretty completely. (I still talk to alumni and to new startups working on things I'm interested in, but that only takes a few hours a week.)\\n\\nWhat should I do next? Rtm's advice hadn't included anything about that. I wanted to do something completely different, so I decided I'd paint. I wanted to see how good I could get if I really focused on it. So the day after I stopped working on YC, I started painting. I was rusty and it took a while to get back into shape, but it was at least completely engaging. [18]\\n\\nI spent most of the rest of 2014 painting. I'd never been able to work so uninterruptedly before, and I got to be better than I had been. Not good enough, but better. Then in November, right in the middle of a painting, I ran out of steam. Up till that point I'd always been curious to see how the painting I was working on would turn out, but suddenly finishing this one seemed like a chore. So I stopped working on it and cleaned my brushes and haven't painted since. So far anyway.\\n\\nI realize that sounds rather wimpy. But attention is a zero sum game. If you can choose what to work on, and you choose a project that's not the best one (or at least a good one) for you, then it's getting in the way of another project that is. And at 50 there was some opportunity cost to screwing around.\\n\\nI started writing essays again, and wrote a bunch of new ones over the next few months. I even wrote a couple that weren't about startups. Then in March 2015 I started working on Lisp again.\\n\\nThe distinctive thing about Lisp is that its core is a language defined by writing an interpreter in itself. It wasn't originally intended as a programming language in the ordinary sense. It was meant to be a formal model of computation, an alternative to the Turing machine. If you want to write an interpreter for a language in itself, what's the minimum set of predefined operators you need? The Lisp that John McCarthy invented, or more accurately discovered, is an answer to that question. [19]\\n\\nMcCarthy didn't realize this Lisp could even be used to program computers\", doc_id='cfd50785-c54a-4e07-b474-561541968da9', embedding=None, doc_hash='b6524989f50c19316fcc4135d476deedc62b79ab141d9f650743a6fe5f3558c9', extra_info=None, node_info={'start': 60170, 'end': 64027, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd', <DocumentRelationship.NEXT: '3'>: '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4'}), score=None), NodeWithScore(node=Node(text='didn\\'t realize this Lisp could even be used to program computers till his grad student Steve Russell suggested it. Russell translated McCarthy\\'s interpreter into IBM 704 machine language, and from that point Lisp started also to be a programming language in the ordinary sense. But its origins as a model of computation gave it a power and elegance that other languages couldn\\'t match. It was this that attracted me in college, though I didn\\'t understand why at the time.\\n\\nMcCarthy\\'s 1960 Lisp did nothing more than interpret Lisp expressions. It was missing a lot of things you\\'d want in a programming language. So these had to be added, and when they were, they weren\\'t defined using McCarthy\\'s original axiomatic approach. That wouldn\\'t have been feasible at the time. McCarthy tested his interpreter by hand-simulating the execution of programs. But it was already getting close to the limit of interpreters you could test that way — indeed, there was a bug in it that McCarthy had overlooked. To test a more complicated interpreter, you\\'d have had to run it, and computers then weren\\'t powerful enough.\\n\\nNow they are, though. Now you could continue using McCarthy\\'s axiomatic approach till you\\'d defined a complete programming language. And as long as every change you made to McCarthy\\'s Lisp was a discoveredness-preserving transformation, you could, in principle, end up with a complete language that had this quality. Harder to do than to talk about, of course, but if it was possible in principle, why not try? So I decided to take a shot at it. It took 4 years, from March 26, 2015 to October 12, 2019. It was fortunate that I had a precisely defined goal, or it would have been hard to keep at it for so long.\\n\\nI wrote this new Lisp, called Bel, in itself in Arc. That may sound like a contradiction, but it\\'s an indication of the sort of trickery I had to engage in to make this work. By means of an egregious collection of hacks I managed to make something close enough to an interpreter written in itself that could actually run. Not fast, but fast enough to test.\\n\\nI had to ban myself from writing essays during most of this time, or I\\'d never have finished. In late 2015 I spent 3 months writing essays, and when I went back to working on Bel I could barely understand the code. Not so much because it was badly written as because the problem is so convoluted. When you\\'re working on an interpreter written in itself, it\\'s hard to keep track of what\\'s happening at what level, and errors can be practically encrypted by the time you get them.\\n\\nSo I said no more essays till Bel was done. But I told few people about Bel while I was working on it. So for years it must have seemed that I was doing nothing, when in fact I was working harder than I\\'d ever worked on anything. Occasionally after wrestling for hours with some gruesome bug I\\'d check Twitter or HN and see someone asking \"Does Paul Graham still code?\"\\n\\nWorking on Bel was hard but satisfying. I worked on it so intensively that at any given time I had a decent chunk of the code in my head and could write more there. I remember taking the boys to the coast on a sunny day in 2015 and figuring out how to deal with some problem involving continuations while I watched them play in the tide pools. It felt like I was doing life right. I remember that because I was slightly dismayed at how novel it felt. The good news is that I had more moments like this over the next few years.\\n\\nIn the summer of 2016 we moved to England. We wanted our kids to see what it was like living in another country, and since I was a British citizen by birth, that seemed the obvious choice. We only meant to stay for a year, but we liked it so much that we still live there. So most of Bel was written in England.\\n\\nIn the fall of 2019, Bel was finally finished. Like McCarthy\\'s original Lisp, it\\'s a spec rather than an implementation, although like McCarthy\\'s Lisp it\\'s a spec expressed as code.\\n\\nNow that I could write essays again, I wrote a bunch about topics I\\'d had stacked', doc_id='14d14357-e5cf-4015-8a65-2cc9fd3fb5c4', embedding=None, doc_hash='fdbb519247f837aba04548ab0aec6383f33f41fbbabe6aacfe83b2f52ad699c5', extra_info=None, node_info={'start': 64035, 'end': 68071, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'cfd50785-c54a-4e07-b474-561541968da9', <DocumentRelationship.NEXT: '3'>: '81530055-3701-4064-8170-c2c3d42f05b7'}), score=None), NodeWithScore(node=Node(text=\"I could write essays again, I wrote a bunch about topics I'd had stacked up. I kept writing essays through 2020, but I also started to think about other things I could work on. How should I choose what to do? Well, how had I chosen what to work on in the past? I wrote an essay for myself to answer that question, and I was surprised how long and messy the answer turned out to be. If this surprised me, who'd lived it, then I thought perhaps it would be interesting to other people, and encouraging to those with similarly messy lives. So I wrote a more detailed version for others to read, and this is the last sentence of it.\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nNotes\\n\\n[1] My experience skipped a step in the evolution of computers: time-sharing machines with interactive OSes. I went straight from batch processing to microcomputers, which made microcomputers seem all the more exciting.\\n\\n[2] Italian words for abstract concepts can nearly always be predicted from their English cognates (except for occasional traps like polluzione). It's the everyday words that differ. So if you string together a lot of abstract concepts with a few simple verbs, you can make a little Italian go a long way.\\n\\n[3] I lived at Piazza San Felice 4, so my walk to the Accademia went straight down the spine of old Florence: past the Pitti, across the bridge, past Orsanmichele, between the Duomo and the Baptistery, and then up Via Ricasoli to Piazza San Marco. I saw Florence at street level in every possible condition, from empty dark winter evenings to sweltering summer days when the streets were packed with tourists.\\n\\n[4] You can of course paint people like still lives if you want to, and they're willing. That sort of portrait is arguably the apex of still life painting, though the long sitting does tend to produce pained expressions in the sitters.\\n\\n[5] Interleaf was one of many companies that had smart people and built impressive technology, and yet got crushed by Moore's Law. In the 1990s the exponential growth in the power of commodity (i.e. Intel) processors rolled up high-end, special-purpose hardware and software companies like a bulldozer.\\n\\n[6] The signature style seekers at RISD weren't specifically mercenary. In the art world, money and coolness are tightly coupled. Anything expensive comes to be seen as cool, and anything seen as cool will soon become equally expensive.\\n\\n[7] Technically the apartment wasn't rent-controlled but rent-stabilized, but this is a refinement only New Yorkers would know or care about. The point is that it was really cheap, less than half market price.\\n\\n[8] Most software you can launch as soon as it's done. But when the software is an online store builder and you're hosting the stores, if you don't have any users yet, that fact will be painfully obvious. So before we could launch publicly we had to launch privately, in the sense of recruiting an initial set of users and making sure they had decent-looking stores.\\n\\n[9] We'd had a code editor in Viaweb for users to define their own page styles. They didn't know it, but they were editing Lisp expressions underneath. But this wasn't an app editor, because the code ran when the merchants' sites were generated, not when shoppers visited them.\\n\\n[10] This was the first instance of what is now a familiar experience, and so was what happened next, when I read the comments and found they were full of angry people. How could I claim that Lisp was better than other languages? Weren't they all Turing complete? People who see the responses to essays I write sometimes tell me how sorry they feel for me, but I'm not exaggerating when I reply that it has always been like this, since the very beginning. It comes with the territory. An essay must tell readers things they don't already know, and some people dislike being told such things.\\n\\n[11] People put plenty of stuff on the internet in the 90s of course, but putting something online is not the same as publishing it online. Publishing online means you\", doc_id='81530055-3701-4064-8170-c2c3d42f05b7', embedding=None, doc_hash='7ded19f889627d27a2c132b3d5c4f9dac587cd2407c5624d421e292f2b3ab1d7', extra_info=None, node_info={'start': 68067, 'end': 72048, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4', <DocumentRelationship.NEXT: '3'>: 'c851c700-5226-42a9-87da-e89e548e381e'}), score=None), NodeWithScore(node=Node(text='putting something online is not the same as publishing it online. Publishing online means you treat the online version as the (or at least a) primary version.\\n\\n[12] There is a general lesson here that our experience with Y Combinator also teaches: Customs continue to constrain you long after the restrictions that caused them have disappeared. Customary VC practice had once, like the customs about publishing essays, been based on real constraints. Startups had once been much more expensive to start, and proportionally rare. Now they could be cheap and common, but the VCs\\' customs still reflected the old world, just as customs about writing essays still reflected the constraints of the print era.\\n\\nWhich in turn implies that people who are independent-minded (i.e. less influenced by custom) will have an advantage in fields affected by rapid change (where customs are more likely to be obsolete).\\n\\nHere\\'s an interesting point, though: you can\\'t always predict which fields will be affected by rapid change. Obviously software and venture capital will be, but who would have predicted that essay writing would be?\\n\\n[13] Y Combinator was not the original name. At first we were called Cambridge Seed. But we didn\\'t want a regional name, in case someone copied us in Silicon Valley, so we renamed ourselves after one of the coolest tricks in the lambda calculus, the Y combinator.\\n\\nI picked orange as our color partly because it\\'s the warmest, and partly because no VC used it. In 2005 all the VCs used staid colors like maroon, navy blue, and forest green, because they were trying to appeal to LPs, not founders. The YC logo itself is an inside joke: the Viaweb logo had been a white V on a red circle, so I made the YC logo a white Y on an orange square.\\n\\n[14] YC did become a fund for a couple years starting in 2009, because it was getting so big I could no longer afford to fund it personally. But after Heroku got bought we had enough money to go back to being self-funded.\\n\\n[15] I\\'ve never liked the term \"deal flow,\" because it implies that the number of new startups at any given time is fixed. This is not only false, but it\\'s the purpose of YC to falsify it, by causing startups to be founded that would not otherwise have existed.\\n\\n[16] She reports that they were all different shapes and sizes, because there was a run on air conditioners and she had to get whatever she could, but that they were all heavier than she could carry now.\\n\\n[17] Another problem with HN was a bizarre edge case that occurs when you both write essays and run a forum. When you run a forum, you\\'re assumed to see if not every conversation, at least every conversation involving you. And when you write essays, people post highly imaginative misinterpretations of them on forums. Individually these two phenomena are tedious but bearable, but the combination is disastrous. You actually have to respond to the misinterpretations, because the assumption that you\\'re present in the conversation means that not responding to any sufficiently upvoted misinterpretation reads as a tacit admission that it\\'s correct. But that in turn encourages more; anyone who wants to pick a fight with you senses that now is their chance.\\n\\n[18] The worst thing about leaving YC was not working with Jessica anymore. We\\'d been working on YC almost the whole time we\\'d known each other, and we\\'d neither tried nor wanted to separate it from our personal lives, so leaving was like pulling up a deeply rooted tree.\\n\\n[19] One way to get more precise about the concept of invented vs discovered is to talk about space aliens. Any sufficiently advanced alien civilization would certainly know about the Pythagorean theorem, for example. I believe, though with less certainty, that they would also know about the Lisp in McCarthy\\'s 1960 paper.\\n\\nBut if so there\\'s no reason to suppose that this is the limit of the language that might be known to them. Presumably aliens need numbers and errors and I/O too. So it seems likely there exists at least one path out of McCarthy\\'s', doc_id='c851c700-5226-42a9-87da-e89e548e381e', embedding=None, doc_hash='8c5bdf9883547bcebedcb406fa77ba918defaa7bbedece21114163c578ac0729', extra_info=None, node_info={'start': 72029, 'end': 76071, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '81530055-3701-4064-8170-c2c3d42f05b7', <DocumentRelationship.NEXT: '3'>: '4972fc4f-6990-4659-a20c-98a8147373d8'}), score=None), NodeWithScore(node=Node(text=\"So it seems likely there exists at least one path out of McCarthy's Lisp along which discoveredness is preserved.\\n\\n\\n\\nThanks to Trevor Blackwell, John Collison, Patrick Collison, Daniel Gackle, Ralph Hazell, Jessica Livingston, Robert Morris, and Harj Taggar for reading drafts of this.\\n\\n\\n\\n\", doc_id='4972fc4f-6990-4659-a20c-98a8147373d8', embedding=None, doc_hash='9d65b6bc997db43cee91e2c4b7380e4efc1059937154b60dc2e45b8aa489e59e', extra_info=None, node_info={'start': 76083, 'end': 76372, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'c851c700-5226-42a9-87da-e89e548e381e'}), score=None)], extra_info={'a0e4f5a8-f852-4807-96f2-76721dc1e57d': None, '1992e045-1032-432c-ac01-42947c0727e9': None, 'e06dfa02-faa5-4617-9d58-00ae88433c58': None, '0fc849a1-2421-414e-9f83-f39e3ac47e41': None, '8e0bca16-b087-489b-983a-5beaaf393f64': None, '855d5459-1cfe-465e-8c94-f9a1d047f821': None, '3b199ba9-d04b-473a-8c73-39c293638957': None, '669c0218-b2c1-428b-808c-f5408e52dcdf': None, 'eb14dc48-f3a1-461d-9a49-0d78331dd083': None, '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb': None, '8a9e2472-230f-437f-b720-1494878d5933': None, '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060': None, '42f8cdbc-3613-409a-85b6-bb1e22c85fef': None, 'f9e74e9d-cdd9-43c4-8742-76c38200305f': None, 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd': None, 'cfd50785-c54a-4e07-b474-561541968da9': None, '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4': None, '81530055-3701-4064-8170-c2c3d42f05b7': None, 'c851c700-5226-42a9-87da-e89e548e381e': None, '4972fc4f-6990-4659-a20c-98a8147373d8': None})"
-      ]
-     },
-     "execution_count": 11,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "query_engine.query(\"What is the summary of the document?\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "query_engine.query(\"What did Paul Graham do after RICS?\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### LLMSingleSelector\n",
-    "\n",
-    "Use OpenAI (or any other LLM) to parse generated JSON under the hood to select a sub-index for routing."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "query_engine = RouterQueryEngine(\n",
-    "    selector=LLMSingleSelector.from_defaults(),\n",
-    "    query_engine_tools=[\n",
-    "        list_tool,\n",
-    "        vector_tool,\n",
-    "    ],\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting query engine 0: It provides a summary of the document..\n",
-      "> [get_response] Total LLM token usage: 3411 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total LLM token usage: 3411 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "Response(response=\"\\nThis document is a reflection on the author's experiences with computers and writing, from his early days of programming on an IBM 1401 to his more recent work on a web application builder. He recounts his experiences with programming, painting, and starting companies, and how he eventually came to write essays about his life and the choices he made.\", source_nodes=[NodeWithScore(node=Node(text='\\t\\t\\n\\nWhat I Worked On\\n\\nFebruary 2021\\n\\nBefore college the two main things I worked on, outside of school, were writing and programming. I didn\\'t write essays. I wrote what beginning writers were supposed to write then, and probably still are: short stories. My stories were awful. They had hardly any plot, just characters with strong feelings, which I imagined made them deep.\\n\\nThe first programs I tried writing were on the IBM 1401 that our school district used for what was then called \"data processing.\" This was in 9th grade, so I was 13 or 14. The school district\\'s 1401 happened to be in the basement of our junior high school, and my friend Rich Draves and I got permission to use it. It was like a mini Bond villain\\'s lair down there, with all these alien-looking machines — CPU, disk drives, printer, card reader — sitting up on a raised floor under bright fluorescent lights.\\n\\nThe language we used was an early version of Fortran. You had to type programs on punch cards, then stack them in the card reader and press a button to load the program into memory and run it. The result would ordinarily be to print something on the spectacularly loud printer.\\n\\nI was puzzled by the 1401. I couldn\\'t figure out what to do with it. And in retrospect there\\'s not much I could have done with it. The only form of input to programs was data stored on punched cards, and I didn\\'t have any data stored on punched cards. The only other option was to do things that didn\\'t rely on any input, like calculate approximations of pi, but I didn\\'t know enough math to do anything interesting of that type. So I\\'m not surprised I can\\'t remember any programs I wrote, because they can\\'t have done much. My clearest memory is of the moment I learned it was possible for programs not to terminate, when one of mine didn\\'t. On a machine without time-sharing, this was a social as well as a technical error, as the data center manager\\'s expression made clear.\\n\\nWith microcomputers, everything changed. Now you could have a computer sitting right in front of you, on a desk, that could respond to your keystrokes as it was running instead of just churning through a stack of punch cards and then stopping. [1]\\n\\nThe first of my friends to get a microcomputer built it himself. It was sold as a kit by Heathkit. I remember vividly how impressed and envious I felt watching him sitting in front of it, typing programs right into the computer.\\n\\nComputers were expensive in those days and it took me years of nagging before I convinced my father to buy one, a TRS-80, in about 1980. The gold standard then was the Apple II, but a TRS-80 was good enough. This was when I really started programming. I wrote simple games, a program to predict how high my model rockets would fly, and a word processor that my father used to write at least one book. There was only room in memory for about 2 pages of text, so he\\'d write 2 pages at a time and then print them out, but it was a lot better than a typewriter.\\n\\nThough I liked programming, I didn\\'t plan to study it in college. In college I was going to study philosophy, which sounded much more powerful. It seemed, to my naive high school self, to be the study of the ultimate truths, compared to which the things studied in other fields would be mere domain knowledge. What I discovered when I got to college was that the other fields took up so much of the space of ideas that there wasn\\'t much left for these supposed ultimate truths. All that seemed left for philosophy were edge cases that people in other fields felt could safely be ignored.\\n\\nI couldn\\'t have put this into words when I was 18. All I knew at the time was that I kept taking philosophy courses and they kept being boring. So I decided to switch to AI.\\n\\nAI was in the air in the mid 1980s, but there were two things especially that made me want to work on it: a novel by Heinlein called The Moon is a Harsh Mistress, which featured an intelligent computer called Mike, and a PBS documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried', doc_id='a0e4f5a8-f852-4807-96f2-76721dc1e57d', embedding=None, doc_hash='694feb4edd2c0029159c7f9faf46df308177a12658e50be42d3a8cb434bef168', extra_info=None, node_info={'start': 0, 'end': 4040, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.NEXT: '3'>: '1992e045-1032-432c-ac01-42947c0727e9'}), score=None), NodeWithScore(node=Node(text='documentary that showed Terry Winograd using SHRDLU. I haven\\'t tried rereading The Moon is a Harsh Mistress, so I don\\'t know how well it has aged, but when I read it I was drawn entirely into its world. It seemed only a matter of time before we\\'d have Mike, and when I saw Winograd using SHRDLU, it seemed like that time would be a few years at most. All you had to do was teach SHRDLU more words.\\n\\nThere weren\\'t any classes in AI at Cornell then, not even graduate classes, so I started trying to teach myself. Which meant learning Lisp, since in those days Lisp was regarded as the language of AI. The commonly used programming languages then were pretty primitive, and programmers\\' ideas correspondingly so. The default language at Cornell was a Pascal-like language called PL/I, and the situation was similar elsewhere. Learning Lisp expanded my concept of a program so fast that it was years before I started to have a sense of where the new limits were. This was more like it; this was what I had expected college to do. It wasn\\'t happening in a class, like it was supposed to, but that was ok. For the next couple years I was on a roll. I knew what I was going to do.\\n\\nFor my undergraduate thesis, I reverse-engineered SHRDLU. My God did I love working on that program. It was a pleasing bit of code, but what made it even more exciting was my belief — hard to imagine now, but not unique in 1985 — that it was already climbing the lower slopes of intelligence.\\n\\nI had gotten into a program at Cornell that didn\\'t make you choose a major. You could take whatever classes you liked, and choose whatever you liked to put on your degree. I of course chose \"Artificial Intelligence.\" When I got the actual physical diploma, I was dismayed to find that the quotes had been included, which made them read as scare-quotes. At the time this bothered me, but now it seems amusingly accurate, for reasons I was about to discover.\\n\\nI applied to 3 grad schools: MIT and Yale, which were renowned for AI at the time, and Harvard, which I\\'d visited because Rich Draves went there, and was also home to Bill Woods, who\\'d invented the type of parser I used in my SHRDLU clone. Only Harvard accepted me, so that was where I went.\\n\\nI don\\'t remember the moment it happened, or if there even was a specific moment, but during the first year of grad school I realized that AI, as practiced at the time, was a hoax. By which I mean the sort of AI in which a program that\\'s told \"the dog is sitting on the chair\" translates this into some formal representation and adds it to the list of things it knows.\\n\\nWhat these programs really showed was that there\\'s a subset of natural language that\\'s a formal language. But a very proper subset. It was clear that there was an unbridgeable gap between what they could do and actually understanding natural language. It was not, in fact, simply a matter of teaching SHRDLU more words. That whole way of doing AI, with explicit data structures representing concepts, was not going to work. Its brokenness did, as so often happens, generate a lot of opportunities to write papers about various band-aids that could be applied to it, but it was never going to get us Mike.\\n\\nSo I looked around to see what I could salvage from the wreckage of my plans, and there was Lisp. I knew from experience that Lisp was interesting for its own sake and not just for its association with AI, even though that was the main reason people cared about it at the time. So I decided to focus on Lisp. In fact, I decided to write a book about Lisp hacking. It\\'s scary to think how little I knew about Lisp hacking when I started writing that book. But there\\'s nothing like writing a book about something to help you learn it. The book, On Lisp, wasn\\'t published till 1993, but I wrote much of it in grad school.\\n\\nComputer Science is an uneasy alliance between two halves, theory and systems. The', doc_id='1992e045-1032-432c-ac01-42947c0727e9', embedding=None, doc_hash='a8b756a8ec7638539582ead732467c48ac3d7bdbfc15e068dd041d4c9582e497', extra_info=None, node_info={'start': 3982, 'end': 7880, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'a0e4f5a8-f852-4807-96f2-76721dc1e57d', <DocumentRelationship.NEXT: '3'>: 'e06dfa02-faa5-4617-9d58-00ae88433c58'}), score=None), NodeWithScore(node=Node(text='Science is an uneasy alliance between two halves, theory and systems. The theory people prove things, and the systems people build things. I wanted to build things. I had plenty of respect for theory — indeed, a sneaking suspicion that it was the more admirable of the two halves — but building things seemed so much more exciting.\\n\\nThe problem with systems work, though, was that it didn\\'t last. Any program you wrote today, no matter how good, would be obsolete in a couple decades at best. People might mention your software in footnotes, but no one would actually use it. And indeed, it would seem very feeble work. Only people with a sense of the history of the field would even realize that, in its time, it had been good.\\n\\nThere were some surplus Xerox Dandelions floating around the computer lab at one point. Anyone who wanted one to play around with could have one. I was briefly tempted, but they were so slow by present standards; what was the point? No one else wanted one either, so off they went. That was what happened to systems work.\\n\\nI wanted not just to build things, but to build things that would last.\\n\\nIn this dissatisfied state I went in 1988 to visit Rich Draves at CMU, where he was in grad school. One day I went to visit the Carnegie Institute, where I\\'d spent a lot of time as a kid. While looking at a painting there I realized something that might seem obvious, but was a big surprise to me. There, right on the wall, was something you could make that would last. Paintings didn\\'t become obsolete. Some of the best ones were hundreds of years old.\\n\\nAnd moreover this was something you could make a living doing. Not as easily as you could by writing software, of course, but I thought if you were really industrious and lived really cheaply, it had to be possible to make enough to survive. And as an artist you could be truly independent. You wouldn\\'t have a boss, or even need to get research funding.\\n\\nI had always liked looking at paintings. Could I make them? I had no idea. I\\'d never imagined it was even possible. I knew intellectually that people made art — that it didn\\'t just appear spontaneously — but it was as if the people who made it were a different species. They either lived long ago or were mysterious geniuses doing strange things in profiles in Life magazine. The idea of actually being able to make art, to put that verb before that noun, seemed almost miraculous.\\n\\nThat fall I started taking art classes at Harvard. Grad students could take classes in any department, and my advisor, Tom Cheatham, was very easy going. If he even knew about the strange classes I was taking, he never said anything.\\n\\nSo now I was in a PhD program in computer science, yet planning to be an artist, yet also genuinely in love with Lisp hacking and working away at On Lisp. In other words, like many a grad student, I was working energetically on multiple projects that were not my thesis.\\n\\nI didn\\'t see a way out of this situation. I didn\\'t want to drop out of grad school, but how else was I going to get out? I remember when my friend Robert Morris got kicked out of Cornell for writing the internet worm of 1988, I was envious that he\\'d found such a spectacular way to get out of grad school.\\n\\nThen one day in April 1990 a crack appeared in the wall. I ran into professor Cheatham and he asked if I was far enough along to graduate that June. I didn\\'t have a word of my dissertation written, but in what must have been the quickest bit of thinking in my life, I decided to take a shot at writing one in the 5 weeks or so that remained before the deadline, reusing parts of On Lisp where I could, and I was able to respond, with no perceptible delay \"Yes, I think so. I\\'ll give you something to read in a few days.\"\\n\\nI picked applications of continuations as the topic. In retrospect I should have written about macros and embedded languages. There\\'s a whole world there that\\'s barely been explored. But all I wanted was to get out of grad school, and my rapidly written', doc_id='e06dfa02-faa5-4617-9d58-00ae88433c58', embedding=None, doc_hash='eb0b51b99800feed137ff8ae00239c9f87ae534d686eaa5684651d0a0f641742', extra_info=None, node_info={'start': 7878, 'end': 11887, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '1992e045-1032-432c-ac01-42947c0727e9', <DocumentRelationship.NEXT: '3'>: '0fc849a1-2421-414e-9f83-f39e3ac47e41'}), score=None), NodeWithScore(node=Node(text=\"But all I wanted was to get out of grad school, and my rapidly written dissertation sufficed, just barely.\\n\\nMeanwhile I was applying to art schools. I applied to two: RISD in the US, and the Accademia di Belli Arti in Florence, which, because it was the oldest art school, I imagined would be good. RISD accepted me, and I never heard back from the Accademia, so off to Providence I went.\\n\\nI'd applied for the BFA program at RISD, which meant in effect that I had to go to college again. This was not as strange as it sounds, because I was only 25, and art schools are full of people of different ages. RISD counted me as a transfer sophomore and said I had to do the foundation that summer. The foundation means the classes that everyone has to take in fundamental subjects like drawing, color, and design.\\n\\nToward the end of the summer I got a big surprise: a letter from the Accademia, which had been delayed because they'd sent it to Cambridge England instead of Cambridge Massachusetts, inviting me to take the entrance exam in Florence that fall. This was now only weeks away. My nice landlady let me leave my stuff in her attic. I had some money saved from consulting work I'd done in grad school; there was probably enough to last a year if I lived cheaply. Now all I had to do was learn Italian.\\n\\nOnly stranieri (foreigners) had to take this entrance exam. In retrospect it may well have been a way of excluding them, because there were so many stranieri attracted by the idea of studying art in Florence that the Italian students would otherwise have been outnumbered. I was in decent shape at painting and drawing from the RISD foundation that summer, but I still don't know how I managed to pass the written exam. I remember that I answered the essay question by writing about Cezanne, and that I cranked up the intellectual level as high as I could to make the most of my limited vocabulary. [2]\\n\\nI'm only up to age 25 and already there are such conspicuous patterns. Here I was, yet again about to attend some august institution in the hopes of learning about some prestigious subject, and yet again about to be disappointed. The students and faculty in the painting department at the Accademia were the nicest people you could imagine, but they had long since arrived at an arrangement whereby the students wouldn't require the faculty to teach anything, and in return the faculty wouldn't require the students to learn anything. And at the same time all involved would adhere outwardly to the conventions of a 19th century atelier. We actually had one of those little stoves, fed with kindling, that you see in 19th century studio paintings, and a nude model sitting as close to it as possible without getting burned. Except hardly anyone else painted her besides me. The rest of the students spent their time chatting or occasionally trying to imitate things they'd seen in American art magazines.\\n\\nOur model turned out to live just down the street from me. She made a living from a combination of modelling and making fakes for a local antique dealer. She'd copy an obscure old painting out of a book, and then he'd take the copy and maltreat it to make it look old. [3]\\n\\nWhile I was a student at the Accademia I started painting still lives in my bedroom at night. These paintings were tiny, because the room was, and because I painted them on leftover scraps of canvas, which was all I could afford at the time. Painting still lives is different from painting people, because the subject, as its name suggests, can't move. People can't sit for more than about 15 minutes at a time, and when they do they don't sit very still. So the traditional m.o. for painting people is to know how to paint a generic person, which you then modify to match the specific person you're painting. Whereas a still life you can, if you want, copy pixel by pixel from what you're seeing. You don't want to stop there, of course, or you get merely photographic accuracy, and what makes a still life interesting is that\", doc_id='0fc849a1-2421-414e-9f83-f39e3ac47e41', embedding=None, doc_hash='55796dd9e91b31dd897144f980f8536700eb3febd0f6bc5e732db0b2b754cf42', extra_info=None, node_info={'start': 11894, 'end': 15911, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'e06dfa02-faa5-4617-9d58-00ae88433c58', <DocumentRelationship.NEXT: '3'>: '8e0bca16-b087-489b-983a-5beaaf393f64'}), score=None), NodeWithScore(node=Node(text='or you get merely photographic accuracy, and what makes a still life interesting is that it\\'s been through a head. You want to emphasize the visual cues that tell you, for example, that the reason the color changes suddenly at a certain point is that it\\'s the edge of an object. By subtly emphasizing such things you can make paintings that are more realistic than photographs not just in some metaphorical sense, but in the strict information-theoretic sense. [4]\\n\\nI liked painting still lives because I was curious about what I was seeing. In everyday life, we aren\\'t consciously aware of much we\\'re seeing. Most visual perception is handled by low-level processes that merely tell your brain \"that\\'s a water droplet\" without telling you details like where the lightest and darkest points are, or \"that\\'s a bush\" without telling you the shape and position of every leaf. This is a feature of brains, not a bug. In everyday life it would be distracting to notice every leaf on every bush. But when you have to paint something, you have to look more closely, and when you do there\\'s a lot to see. You can still be noticing new things after days of trying to paint something people usually take for granted, just as you can after days of trying to write an essay about something people usually take for granted.\\n\\nThis is not the only way to paint. I\\'m not 100% sure it\\'s even a good way to paint. But it seemed a good enough bet to be worth trying.\\n\\nOur teacher, professor Ulivi, was a nice guy. He could see I worked hard, and gave me a good grade, which he wrote down in a sort of passport each student had. But the Accademia wasn\\'t teaching me anything except Italian, and my money was running out, so at the end of the first year I went back to the US.\\n\\nI wanted to go back to RISD, but I was now broke and RISD was very expensive, so I decided to get a job for a year and then return to RISD the next fall. I got one at a company called Interleaf, which made software for creating documents. You mean like Microsoft Word? Exactly. That was how I learned that low end software tends to eat high end software. But Interleaf still had a few years to live yet. [5]\\n\\nInterleaf had done something pretty bold. Inspired by Emacs, they\\'d added a scripting language, and even made the scripting language a dialect of Lisp. Now they wanted a Lisp hacker to write things in it. This was the closest thing I\\'ve had to a normal job, and I hereby apologize to my boss and coworkers, because I was a bad employee. Their Lisp was the thinnest icing on a giant C cake, and since I didn\\'t know C and didn\\'t want to learn it, I never understood most of the software. Plus I was terribly irresponsible. This was back when a programming job meant showing up every day during certain working hours. That seemed unnatural to me, and on this point the rest of the world is coming around to my way of thinking, but at the time it caused a lot of friction. Toward the end of the year I spent much of my time surreptitiously working on On Lisp, which I had by this time gotten a contract to publish.\\n\\nThe good part was that I got paid huge amounts of money, especially by art student standards. In Florence, after paying my part of the rent, my budget for everything else had been $7 a day. Now I was getting paid more than 4 times that every hour, even when I was just sitting in a meeting. By living cheaply I not only managed to save enough to go back to RISD, but also paid off my college loans.\\n\\nI learned some useful things at Interleaf, though they were mostly about what not to do. I learned that it\\'s better for technology companies to be run by product people than sales people (though sales is a real skill and people who are good at it are really good at it), that it leads to bugs when code is edited by too many people, that cheap office space is no bargain if it\\'s depressing, that planned meetings are inferior to corridor conversations, that big, bureaucratic customers are a dangerous source of money, and that there\\'s not much overlap between conventional', doc_id='8e0bca16-b087-489b-983a-5beaaf393f64', embedding=None, doc_hash='381c03ecd6edff8260cd955e3762b2c94431d15ae62b58591ab3f9fc9342ed58', extra_info=None, node_info={'start': 15894, 'end': 19945, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '0fc849a1-2421-414e-9f83-f39e3ac47e41', <DocumentRelationship.NEXT: '3'>: '855d5459-1cfe-465e-8c94-f9a1d047f821'}), score=None), NodeWithScore(node=Node(text='are a dangerous source of money, and that there\\'s not much overlap between conventional office hours and the optimal time for hacking, or conventional offices and the optimal place for it.\\n\\nBut the most important thing I learned, and which I used in both Viaweb and Y Combinator, is that the low end eats the high end: that it\\'s good to be the \"entry level\" option, even though that will be less prestigious, because if you\\'re not, someone else will be, and will squash you against the ceiling. Which in turn means that prestige is a danger sign.\\n\\nWhen I left to go back to RISD the next fall, I arranged to do freelance work for the group that did projects for customers, and this was how I survived for the next several years. When I came back to visit for a project later on, someone told me about a new thing called HTML, which was, as he described it, a derivative of SGML. Markup language enthusiasts were an occupational hazard at Interleaf and I ignored him, but this HTML thing later became a big part of my life.\\n\\nIn the fall of 1992 I moved back to Providence to continue at RISD. The foundation had merely been intro stuff, and the Accademia had been a (very civilized) joke. Now I was going to see what real art school was like. But alas it was more like the Accademia than not. Better organized, certainly, and a lot more expensive, but it was now becoming clear that art school did not bear the same relationship to art that medical school bore to medicine. At least not the painting department. The textile department, which my next door neighbor belonged to, seemed to be pretty rigorous. No doubt illustration and architecture were too. But painting was post-rigorous. Painting students were supposed to express themselves, which to the more worldly ones meant to try to cook up some sort of distinctive signature style.\\n\\nA signature style is the visual equivalent of what in show business is known as a \"schtick\": something that immediately identifies the work as yours and no one else\\'s. For example, when you see a painting that looks like a certain kind of cartoon, you know it\\'s by Roy Lichtenstein. So if you see a big painting of this type hanging in the apartment of a hedge fund manager, you know he paid millions of dollars for it. That\\'s not always why artists have a signature style, but it\\'s usually why buyers pay a lot for such work. [6]\\n\\nThere were plenty of earnest students too: kids who \"could draw\" in high school, and now had come to what was supposed to be the best art school in the country, to learn to draw even better. They tended to be confused and demoralized by what they found at RISD, but they kept going, because painting was what they did. I was not one of the kids who could draw in high school, but at RISD I was definitely closer to their tribe than the tribe of signature style seekers.\\n\\nI learned a lot in the color class I took at RISD, but otherwise I was basically teaching myself to paint, and I could do that for free. So in 1993 I dropped out. I hung around Providence for a bit, and then my college friend Nancy Parmet did me a big favor. A rent-controlled apartment in a building her mother owned in New York was becoming vacant. Did I want it? It wasn\\'t much more than my current place, and New York was supposed to be where the artists were. So yes, I wanted it! [7]\\n\\nAsterix comics begin by zooming in on a tiny corner of Roman Gaul that turns out not to be controlled by the Romans. You can do something similar on a map of New York City: if you zoom in on the Upper East Side, there\\'s a tiny corner that\\'s not rich, or at least wasn\\'t in 1993. It\\'s called Yorkville, and that was my new home. Now I was a New York artist — in the strictly technical sense of making paintings and living in New York.\\n\\nI was nervous about money, because I could sense that Interleaf was on the way down. Freelance Lisp hacking work was very rare, and I', doc_id='855d5459-1cfe-465e-8c94-f9a1d047f821', embedding=None, doc_hash='3a298b8e6f42c9af6356b611a9cbe37172455b8047ba2009f49b910488e96f8d', extra_info=None, node_info={'start': 19946, 'end': 23848, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '8e0bca16-b087-489b-983a-5beaaf393f64', <DocumentRelationship.NEXT: '3'>: '3b199ba9-d04b-473a-8c73-39c293638957'}), score=None), NodeWithScore(node=Node(text='on the way down. Freelance Lisp hacking work was very rare, and I didn\\'t want to have to program in another language, which in those days would have meant C++ if I was lucky. So with my unerring nose for financial opportunity, I decided to write another book on Lisp. This would be a popular book, the sort of book that could be used as a textbook. I imagined myself living frugally off the royalties and spending all my time painting. (The painting on the cover of this book, ANSI Common Lisp, is one that I painted around this time.)\\n\\nThe best thing about New York for me was the presence of Idelle and Julian Weber. Idelle Weber was a painter, one of the early photorealists, and I\\'d taken her painting class at Harvard. I\\'ve never known a teacher more beloved by her students. Large numbers of former students kept in touch with her, including me. After I moved to New York I became her de facto studio assistant.\\n\\nShe liked to paint on big, square canvases, 4 to 5 feet on a side. One day in late 1994 as I was stretching one of these monsters there was something on the radio about a famous fund manager. He wasn\\'t that much older than me, and was super rich. The thought suddenly occurred to me: why don\\'t I become rich? Then I\\'ll be able to work on whatever I want.\\n\\nMeanwhile I\\'d been hearing more and more about this new thing called the World Wide Web. Robert Morris showed it to me when I visited him in Cambridge, where he was now in grad school at Harvard. It seemed to me that the web would be a big deal. I\\'d seen what graphical user interfaces had done for the popularity of microcomputers. It seemed like the web would do the same for the internet.\\n\\nIf I wanted to get rich, here was the next train leaving the station. I was right about that part. What I got wrong was the idea. I decided we should start a company to put art galleries online. I can\\'t honestly say, after reading so many Y Combinator applications, that this was the worst startup idea ever, but it was up there. Art galleries didn\\'t want to be online, and still don\\'t, not the fancy ones. That\\'s not how they sell. I wrote some software to generate web sites for galleries, and Robert wrote some to resize images and set up an http server to serve the pages. Then we tried to sign up galleries. To call this a difficult sale would be an understatement. It was difficult to give away. A few galleries let us make sites for them for free, but none paid us.\\n\\nThen some online stores started to appear, and I realized that except for the order buttons they were identical to the sites we\\'d been generating for galleries. This impressive-sounding thing called an \"internet storefront\" was something we already knew how to build.\\n\\nSo in the summer of 1995, after I submitted the camera-ready copy of ANSI Common Lisp to the publishers, we started trying to write software to build online stores. At first this was going to be normal desktop software, which in those days meant Windows software. That was an alarming prospect, because neither of us knew how to write Windows software or wanted to learn. We lived in the Unix world. But we decided we\\'d at least try writing a prototype store builder on Unix. Robert wrote a shopping cart, and I wrote a new site generator for stores — in Lisp, of course.\\n\\nWe were working out of Robert\\'s apartment in Cambridge. His roommate was away for big chunks of time, during which I got to sleep in his room. For some reason there was no bed frame or sheets, just a mattress on the floor. One morning as I was lying on this mattress I had an idea that made me sit up like a capital L. What if we ran the software on the server, and let users control it by clicking on links? Then we\\'d never have to write anything to run on users\\' computers. We could generate the sites on the same server we\\'d serve them from. Users wouldn\\'t need anything more than a browser.\\n\\nThis kind of software, known as a web app, is common now, but at the time it wasn\\'t clear that it was even possible. To find out, we decided to try', doc_id='3b199ba9-d04b-473a-8c73-39c293638957', embedding=None, doc_hash='193c210e6a2567bac7400ec1d44e0bcc00777378664c0e3d08495baf7db4d9ac', extra_info=None, node_info={'start': 23870, 'end': 27897, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '855d5459-1cfe-465e-8c94-f9a1d047f821', <DocumentRelationship.NEXT: '3'>: '669c0218-b2c1-428b-808c-f5408e52dcdf'}), score=None), NodeWithScore(node=Node(text='wasn\\'t clear that it was even possible. To find out, we decided to try making a version of our store builder that you could control through the browser. A couple days later, on August 12, we had one that worked. The UI was horrible, but it proved you could build a whole store through the browser, without any client software or typing anything into the command line on the server.\\n\\nNow we felt like we were really onto something. I had visions of a whole new generation of software working this way. You wouldn\\'t need versions, or ports, or any of that crap. At Interleaf there had been a whole group called Release Engineering that seemed to be at least as big as the group that actually wrote the software. Now you could just update the software right on the server.\\n\\nWe started a new company we called Viaweb, after the fact that our software worked via the web, and we got $10,000 in seed funding from Idelle\\'s husband Julian. In return for that and doing the initial legal work and giving us business advice, we gave him 10% of the company. Ten years later this deal became the model for Y Combinator\\'s. We knew founders needed something like this, because we\\'d needed it ourselves.\\n\\nAt this stage I had a negative net worth, because the thousand dollars or so I had in the bank was more than counterbalanced by what I owed the government in taxes. (Had I diligently set aside the proper proportion of the money I\\'d made consulting for Interleaf? No, I had not.) So although Robert had his graduate student stipend, I needed that seed funding to live on.\\n\\nWe originally hoped to launch in September, but we got more ambitious about the software as we worked on it. Eventually we managed to build a WYSIWYG site builder, in the sense that as you were creating pages, they looked exactly like the static ones that would be generated later, except that instead of leading to static pages, the links all referred to closures stored in a hash table on the server.\\n\\nIt helped to have studied art, because the main goal of an online store builder is to make users look legit, and the key to looking legit is high production values. If you get page layouts and fonts and colors right, you can make a guy running a store out of his bedroom look more legit than a big company.\\n\\n(If you\\'re curious why my site looks so old-fashioned, it\\'s because it\\'s still made with this software. It may look clunky today, but in 1996 it was the last word in slick.)\\n\\nIn September, Robert rebelled. \"We\\'ve been working on this for a month,\" he said, \"and it\\'s still not done.\" This is funny in retrospect, because he would still be working on it almost 3 years later. But I decided it might be prudent to recruit more programmers, and I asked Robert who else in grad school with him was really good. He recommended Trevor Blackwell, which surprised me at first, because at that point I knew Trevor mainly for his plan to reduce everything in his life to a stack of notecards, which he carried around with him. But Rtm was right, as usual. Trevor turned out to be a frighteningly effective hacker.\\n\\nIt was a lot of fun working with Robert and Trevor. They\\'re the two most independent-minded people I know, and in completely different ways. If you could see inside Rtm\\'s brain it would look like a colonial New England church, and if you could see inside Trevor\\'s it would look like the worst excesses of Austrian Rococo.\\n\\nWe opened for business, with 6 stores, in January 1996. It was just as well we waited a few months, because although we worried we were late, we were actually almost fatally early. There was a lot of talk in the press then about ecommerce, but not many people actually wanted online stores. [8]\\n\\nThere were three main parts to the software: the editor, which people used to build sites and which I wrote, the shopping cart, which Robert wrote, and the manager, which kept track of orders and statistics, and which Trevor wrote. In its time, the editor was one of the best general-purpose site builders. I kept the code tight and didn\\'t have to integrate with any other software except Robert\\'s and Trevor\\'s, so it was quite fun to work on. If all I\\'d had to do was', doc_id='669c0218-b2c1-428b-808c-f5408e52dcdf', embedding=None, doc_hash='fd9eb9ceabf16d661afa8f19d64b256664bbc61c886292817fd92dc2456b8eaa', extra_info=None, node_info={'start': 27894, 'end': 32060, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '3b199ba9-d04b-473a-8c73-39c293638957', <DocumentRelationship.NEXT: '3'>: 'eb14dc48-f3a1-461d-9a49-0d78331dd083'}), score=None), NodeWithScore(node=Node(text='so it was quite fun to work on. If all I\\'d had to do was work on this software, the next 3 years would have been the easiest of my life. Unfortunately I had to do a lot more, all of it stuff I was worse at than programming, and the next 3 years were instead the most stressful.\\n\\nThere were a lot of startups making ecommerce software in the second half of the 90s. We were determined to be the Microsoft Word, not the Interleaf. Which meant being easy to use and inexpensive. It was lucky for us that we were poor, because that caused us to make Viaweb even more inexpensive than we realized. We charged $100 a month for a small store and $300 a month for a big one. This low price was a big attraction, and a constant thorn in the sides of competitors, but it wasn\\'t because of some clever insight that we set the price low. We had no idea what businesses paid for things. $300 a month seemed like a lot of money to us.\\n\\nWe did a lot of things right by accident like that. For example, we did what\\'s now called \"doing things that don\\'t scale,\" although at the time we would have described it as \"being so lame that we\\'re driven to the most desperate measures to get users.\" The most common of which was building stores for them. This seemed particularly humiliating, since the whole raison d\\'etre of our software was that people could use it to make their own stores. But anything to get users.\\n\\nWe learned a lot more about retail than we wanted to know. For example, that if you could only have a small image of a man\\'s shirt (and all images were small then by present standards), it was better to have a closeup of the collar than a picture of the whole shirt. The reason I remember learning this was that it meant I had to rescan about 30 images of men\\'s shirts. My first set of scans were so beautiful too.\\n\\nThough this felt wrong, it was exactly the right thing to be doing. Building stores for users taught us about retail, and about how it felt to use our software. I was initially both mystified and repelled by \"business\" and thought we needed a \"business person\" to be in charge of it, but once we started to get users, I was converted, in much the same way I was converted to fatherhood once I had kids. Whatever users wanted, I was all theirs. Maybe one day we\\'d have so many users that I couldn\\'t scan their images for them, but in the meantime there was nothing more important to do.\\n\\nAnother thing I didn\\'t get at the time is that growth rate is the ultimate test of a startup. Our growth rate was fine. We had about 70 stores at the end of 1996 and about 500 at the end of 1997. I mistakenly thought the thing that mattered was the absolute number of users. And that is the thing that matters in the sense that that\\'s how much money you\\'re making, and if you\\'re not making enough, you might go out of business. But in the long term the growth rate takes care of the absolute number. If we\\'d been a startup I was advising at Y Combinator, I would have said: Stop being so stressed out, because you\\'re doing fine. You\\'re growing 7x a year. Just don\\'t hire too many more people and you\\'ll soon be profitable, and then you\\'ll control your own destiny.\\n\\nAlas I hired lots more people, partly because our investors wanted me to, and partly because that\\'s what startups did during the Internet Bubble. A company with just a handful of employees would have seemed amateurish. So we didn\\'t reach breakeven until about when Yahoo bought us in the summer of 1998. Which in turn meant we were at the mercy of investors for the entire life of the company. And since both we and our investors were noobs at startups, the result was a mess even by startup standards.\\n\\nIt was a huge relief when Yahoo bought us. In principle our Viaweb stock was valuable. It was a share in a business that was profitable and growing rapidly. But it didn\\'t feel very valuable to me; I had no idea how to value a business, but I was all too keenly aware of the near-death experiences we seemed to have every few months. Nor had I changed my grad student lifestyle significantly since we started. So when', doc_id='eb14dc48-f3a1-461d-9a49-0d78331dd083', embedding=None, doc_hash='9185047072ffc2d0a80db719ed6fa93a69773224a62bbe5c7a8f59eaed4e80e1', extra_info=None, node_info={'start': 32076, 'end': 36162, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '669c0218-b2c1-428b-808c-f5408e52dcdf', <DocumentRelationship.NEXT: '3'>: '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb'}), score=None), NodeWithScore(node=Node(text=\"few months. Nor had I changed my grad student lifestyle significantly since we started. So when Yahoo bought us it felt like going from rags to riches. Since we were going to California, I bought a car, a yellow 1998 VW GTI. I remember thinking that its leather seats alone were by far the most luxurious thing I owned.\\n\\nThe next year, from the summer of 1998 to the summer of 1999, must have been the least productive of my life. I didn't realize it at the time, but I was worn out from the effort and stress of running Viaweb. For a while after I got to California I tried to continue my usual m.o. of programming till 3 in the morning, but fatigue combined with Yahoo's prematurely aged culture and grim cube farm in Santa Clara gradually dragged me down. After a few months it felt disconcertingly like working at Interleaf.\\n\\nYahoo had given us a lot of options when they bought us. At the time I thought Yahoo was so overvalued that they'd never be worth anything, but to my astonishment the stock went up 5x in the next year. I hung on till the first chunk of options vested, then in the summer of 1999 I left. It had been so long since I'd painted anything that I'd half forgotten why I was doing this. My brain had been entirely full of software and men's shirts for 4 years. But I had done this to get rich so I could paint, I reminded myself, and now I was rich, so I should go paint.\\n\\nWhen I said I was leaving, my boss at Yahoo had a long conversation with me about my plans. I told him all about the kinds of pictures I wanted to paint. At the time I was touched that he took such an interest in me. Now I realize it was because he thought I was lying. My options at that point were worth about $2 million a month. If I was leaving that kind of money on the table, it could only be to go and start some new startup, and if I did, I might take people with me. This was the height of the Internet Bubble, and Yahoo was ground zero of it. My boss was at that moment a billionaire. Leaving then to start a new startup must have seemed to him an insanely, and yet also plausibly, ambitious plan.\\n\\nBut I really was quitting to paint, and I started immediately. There was no time to lose. I'd already burned 4 years getting rich. Now when I talk to founders who are leaving after selling their companies, my advice is always the same: take a vacation. That's what I should have done, just gone off somewhere and done nothing for a month or two, but the idea never occurred to me.\\n\\nSo I tried to paint, but I just didn't seem to have any energy or ambition. Part of the problem was that I didn't know many people in California. I'd compounded this problem by buying a house up in the Santa Cruz Mountains, with a beautiful view but miles from anywhere. I stuck it out for a few more months, then in desperation I went back to New York, where unless you understand about rent control you'll be surprised to hear I still had my apartment, sealed up like a tomb of my old life. Idelle was in New York at least, and there were other people trying to paint there, even though I didn't know any of them.\\n\\nWhen I got back to New York I resumed my old life, except now I was rich. It was as weird as it sounds. I resumed all my old patterns, except now there were doors where there hadn't been. Now when I was tired of walking, all I had to do was raise my hand, and (unless it was raining) a taxi would stop to pick me up. Now when I walked past charming little restaurants I could go in and order lunch. It was exciting for a while. Painting started to go better. I experimented with a new kind of still life where I'd paint one painting in the old way, then photograph it and print it, blown up, on canvas, and then use that as the underpainting for a second still life, painted from the same objects (which hopefully hadn't rotted yet).\\n\\nMeanwhile I looked for an apartment to buy. Now I could actually choose what\", doc_id='99b3c4c2-aeab-4f5a-9474-916f6c35c9fb', embedding=None, doc_hash='363de309e331985d57cbd2a87662a4b6036a44807432524f2dbfd50a7e6ba7e5', extra_info=None, node_info={'start': 36125, 'end': 40040, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'eb14dc48-f3a1-461d-9a49-0d78331dd083', <DocumentRelationship.NEXT: '3'>: '8a9e2472-230f-437f-b720-1494878d5933'}), score=None), NodeWithScore(node=Node(text='I looked for an apartment to buy. Now I could actually choose what neighborhood to live in. Where, I asked myself and various real estate agents, is the Cambridge of New York? Aided by occasional visits to actual Cambridge, I gradually realized there wasn\\'t one. Huh.\\n\\nAround this time, in the spring of 2000, I had an idea. It was clear from our experience with Viaweb that web apps were the future. Why not build a web app for making web apps? Why not let people edit code on our server through the browser, and then host the resulting applications for them? [9] You could run all sorts of services on the servers that these applications could use just by making an API call: making and receiving phone calls, manipulating images, taking credit card payments, etc.\\n\\nI got so excited about this idea that I couldn\\'t think about anything else. It seemed obvious that this was the future. I didn\\'t particularly want to start another company, but it was clear that this idea would have to be embodied as one, so I decided to move to Cambridge and start it. I hoped to lure Robert into working on it with me, but there I ran into a hitch. Robert was now a postdoc at MIT, and though he\\'d made a lot of money the last time I\\'d lured him into working on one of my schemes, it had also been a huge time sink. So while he agreed that it sounded like a plausible idea, he firmly refused to work on it.\\n\\nHmph. Well, I\\'d do it myself then. I recruited Dan Giffin, who had worked for Viaweb, and two undergrads who wanted summer jobs, and we got to work trying to build what it\\'s now clear is about twenty companies and several open source projects worth of software. The language for defining applications would of course be a dialect of Lisp. But I wasn\\'t so naive as to assume I could spring an overt Lisp on a general audience; we\\'d hide the parentheses, like Dylan did.\\n\\nBy then there was a name for the kind of company Viaweb was, an \"application service provider,\" or ASP. This name didn\\'t last long before it was replaced by \"software as a service,\" but it was current for long enough that I named this new company after it: it was going to be called Aspra.\\n\\nI started working on the application builder, Dan worked on network infrastructure, and the two undergrads worked on the first two services (images and phone calls). But about halfway through the summer I realized I really didn\\'t want to run a company — especially not a big one, which it was looking like this would have to be. I\\'d only started Viaweb because I needed the money. Now that I didn\\'t need money anymore, why was I doing this? If this vision had to be realized as a company, then screw the vision. I\\'d build a subset that could be done as an open source project.\\n\\nMuch to my surprise, the time I spent working on this stuff was not wasted after all. After we started Y Combinator, I would often encounter startups working on parts of this new architecture, and it was very useful to have spent so much time thinking about it and even trying to write some of it.\\n\\nThe subset I would build as an open source project was the new Lisp, whose parentheses I now wouldn\\'t even have to hide. A lot of Lisp hackers dream of building a new Lisp, partly because one of the distinctive features of the language is that it has dialects, and partly, I think, because we have in our minds a Platonic form of Lisp that all existing dialects fall short of. I certainly did. So at the end of the summer Dan and I switched to working on this new dialect of Lisp, which I called Arc, in a house I bought in Cambridge.\\n\\nThe following spring, lightning struck. I was invited to give a talk at a Lisp conference, so I gave one about how we\\'d used Lisp at Viaweb. Afterward I put a postscript file of this talk online, on paulgraham.com, which I\\'d created years before using Viaweb but had never used for anything. In one day it got 30,000 page views. What on earth had happened? The referring urls showed', doc_id='8a9e2472-230f-437f-b720-1494878d5933', embedding=None, doc_hash='1a72edc63a9582c55f626fed3dbb1fadbfb1e7e7111e93b6c789c79c66db1e11', extra_info=None, node_info={'start': 40067, 'end': 44018, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb', <DocumentRelationship.NEXT: '3'>: '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060'}), score=None), NodeWithScore(node=Node(text=\"page views. What on earth had happened? The referring urls showed that someone had posted it on Slashdot. [10]\\n\\nWow, I thought, there's an audience. If I write something and put it on the web, anyone can read it. That may seem obvious now, but it was surprising then. In the print era there was a narrow channel to readers, guarded by fierce monsters known as editors. The only way to get an audience for anything you wrote was to get it published as a book, or in a newspaper or magazine. Now anyone could publish anything.\\n\\nThis had been possible in principle since 1993, but not many people had realized it yet. I had been intimately involved with building the infrastructure of the web for most of that time, and a writer as well, and it had taken me 8 years to realize it. Even then it took me several years to understand the implications. It meant there would be a whole new generation of essays. [11]\\n\\nIn the print era, the channel for publishing essays had been vanishingly small. Except for a few officially anointed thinkers who went to the right parties in New York, the only people allowed to publish essays were specialists writing about their specialties. There were so many essays that had never been written, because there had been no way to publish them. Now they could be, and I was going to write them. [12]\\n\\nI've worked on several different things, but to the extent there was a turning point where I figured out what to work on, it was when I started publishing essays online. From then on I knew that whatever else I did, I'd always write essays too.\\n\\nI knew that online essays would be a marginal medium at first. Socially they'd seem more like rants posted by nutjobs on their GeoCities sites than the genteel and beautifully typeset compositions published in The New Yorker. But by this point I knew enough to find that encouraging instead of discouraging.\\n\\nOne of the most conspicuous patterns I've noticed in my life is how well it has worked, for me at least, to work on things that weren't prestigious. Still life has always been the least prestigious form of painting. Viaweb and Y Combinator both seemed lame when we started them. I still get the glassy eye from strangers when they ask what I'm writing, and I explain that it's an essay I'm going to publish on my web site. Even Lisp, though prestigious intellectually in something like the way Latin is, also seems about as hip.\\n\\nIt's not that unprestigious types of work are good per se. But when you find yourself drawn to some kind of work despite its current lack of prestige, it's a sign both that there's something real to be discovered there, and that you have the right kind of motives. Impure motives are a big danger for the ambitious. If anything is going to lead you astray, it will be the desire to impress people. So while working on things that aren't prestigious doesn't guarantee you're on the right track, it at least guarantees you're not on the most common type of wrong one.\\n\\nOver the next several years I wrote lots of essays about all kinds of different topics. O'Reilly reprinted a collection of them as a book, called Hackers & Painters after one of the essays in it. I also worked on spam filters, and did some more painting. I used to have dinners for a group of friends every thursday night, which taught me how to cook for groups. And I bought another building in Cambridge, a former candy factory (and later, twas said, porn studio), to use as an office.\\n\\nOne night in October 2003 there was a big party at my house. It was a clever idea of my friend Maria Daniels, who was one of the thursday diners. Three separate hosts would all invite their friends to one party. So for every guest, two thirds of the other guests would be people they didn't know but would probably like. One of the guests was someone I didn't know but would turn out to like a lot: a woman called Jessica Livingston. A couple days later I asked her out.\\n\\nJessica was in charge of marketing at a Boston investment\", doc_id='2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060', embedding=None, doc_hash='597ba7da683f715e046031bf0824aa5d9ff24b64aa2c0902a3ad6ee1e48ea312', extra_info=None, node_info={'start': 44018, 'end': 48016, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '8a9e2472-230f-437f-b720-1494878d5933', <DocumentRelationship.NEXT: '3'>: '42f8cdbc-3613-409a-85b6-bb1e22c85fef'}), score=None), NodeWithScore(node=Node(text='I asked her out.\\n\\nJessica was in charge of marketing at a Boston investment bank. This bank thought it understood startups, but over the next year, as she met friends of mine from the startup world, she was surprised how different reality was. And how colorful their stories were. So she decided to compile a book of interviews with startup founders.\\n\\nWhen the bank had financial problems and she had to fire half her staff, she started looking for a new job. In early 2005 she interviewed for a marketing job at a Boston VC firm. It took them weeks to make up their minds, and during this time I started telling her about all the things that needed to be fixed about venture capital. They should make a larger number of smaller investments instead of a handful of giant ones, they should be funding younger, more technical founders instead of MBAs, they should let the founders remain as CEO, and so on.\\n\\nOne of my tricks for writing essays had always been to give talks. The prospect of having to stand up in front of a group of people and tell them something that won\\'t waste their time is a great spur to the imagination. When the Harvard Computer Society, the undergrad computer club, asked me to give a talk, I decided I would tell them how to start a startup. Maybe they\\'d be able to avoid the worst of the mistakes we\\'d made.\\n\\nSo I gave this talk, in the course of which I told them that the best sources of seed funding were successful startup founders, because then they\\'d be sources of advice too. Whereupon it seemed they were all looking expectantly at me. Horrified at the prospect of having my inbox flooded by business plans (if I\\'d only known), I blurted out \"But not me!\" and went on with the talk. But afterward it occurred to me that I should really stop procrastinating about angel investing. I\\'d been meaning to since Yahoo bought us, and now it was 7 years later and I still hadn\\'t done one angel investment.\\n\\nMeanwhile I had been scheming with Robert and Trevor about projects we could work on together. I missed working with them, and it seemed like there had to be something we could collaborate on.\\n\\nAs Jessica and I were walking home from dinner on March 11, at the corner of Garden and Walker streets, these three threads converged. Screw the VCs who were taking so long to make up their minds. We\\'d start our own investment firm and actually implement the ideas we\\'d been talking about. I\\'d fund it, and Jessica could quit her job and work for it, and we\\'d get Robert and Trevor as partners too. [13]\\n\\nOnce again, ignorance worked in our favor. We had no idea how to be angel investors, and in Boston in 2005 there were no Ron Conways to learn from. So we just made what seemed like the obvious choices, and some of the things we did turned out to be novel.\\n\\nThere are multiple components to Y Combinator, and we didn\\'t figure them all out at once. The part we got first was to be an angel firm. In those days, those two words didn\\'t go together. There were VC firms, which were organized companies with people whose job it was to make investments, but they only did big, million dollar investments. And there were angels, who did smaller investments, but these were individuals who were usually focused on other things and made investments on the side. And neither of them helped founders enough in the beginning. We knew how helpless founders were in some respects, because we remembered how helpless we\\'d been. For example, one thing Julian had done for us that seemed to us like magic was to get us set up as a company. We were fine writing fairly difficult software, but actually getting incorporated, with bylaws and stock and all that stuff, how on earth did you do that? Our plan was not only to make seed investments, but to do for startups everything Julian had done for us.\\n\\nYC was not organized as a fund. It was cheap enough to run that we funded it with our own money. That went right by 99% of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" But once again, this was not due to any particular insight on our part. We didn\\'t know how VC firms', doc_id='42f8cdbc-3613-409a-85b6-bb1e22c85fef', embedding=None, doc_hash='345510864fca229d2e5a22bf719f4ee408018c13c348307d3d72bc151fe61050', extra_info=None, node_info={'start': 48009, 'end': 52140, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060', <DocumentRelationship.NEXT: '3'>: 'f9e74e9d-cdd9-43c4-8742-76c38200305f'}), score=None), NodeWithScore(node=Node(text='not due to any particular insight on our part. We didn\\'t know how VC firms were organized. It never occurred to us to try to raise a fund, and if it had, we wouldn\\'t have known where to start. [14]\\n\\nThe most distinctive thing about YC is the batch model: to fund a bunch of startups all at once, twice a year, and then to spend three months focusing intensively on trying to help them. That part we discovered by accident, not merely implicitly but explicitly due to our ignorance about investing. We needed to get experience as investors. What better way, we thought, than to fund a whole bunch of startups at once? We knew undergrads got temporary jobs at tech companies during the summer. Why not organize a summer program where they\\'d start startups instead? We wouldn\\'t feel guilty for being in a sense fake investors, because they would in a similar sense be fake founders. So while we probably wouldn\\'t make much money out of it, we\\'d at least get to practice being investors on them, and they for their part would probably have a more interesting summer than they would working at Microsoft.\\n\\nWe\\'d use the building I owned in Cambridge as our headquarters. We\\'d all have dinner there once a week — on tuesdays, since I was already cooking for the thursday diners on thursdays — and after dinner we\\'d bring in experts on startups to give talks.\\n\\nWe knew undergrads were deciding then about summer jobs, so in a matter of days we cooked up something we called the Summer Founders Program, and I posted an announcement on my site, inviting undergrads to apply. I had never imagined that writing essays would be a way to get \"deal flow,\" as investors call it, but it turned out to be the perfect source. [15] We got 225 applications for the Summer Founders Program, and we were surprised to find that a lot of them were from people who\\'d already graduated, or were about to that spring. Already this SFP thing was starting to feel more serious than we\\'d intended.\\n\\nWe invited about 20 of the 225 groups to interview in person, and from those we picked 8 to fund. They were an impressive group. That first batch included reddit, Justin Kan and Emmett Shear, who went on to found Twitch, Aaron Swartz, who had already helped write the RSS spec and would a few years later become a martyr for open access, and Sam Altman, who would later become the second president of YC. I don\\'t think it was entirely luck that the first batch was so good. You had to be pretty bold to sign up for a weird thing like the Summer Founders Program instead of a summer job at a legit place like Microsoft or Goldman Sachs.\\n\\nThe deal for startups was based on a combination of the deal we did with Julian ($10k for 10%) and what Robert said MIT grad students got for the summer ($6k). We invested $6k per founder, which in the typical two-founder case was $12k, in return for 6%. That had to be fair, because it was twice as good as the deal we ourselves had taken. Plus that first summer, which was really hot, Jessica brought the founders free air conditioners. [16]\\n\\nFairly quickly I realized that we had stumbled upon the way to scale startup funding. Funding startups in batches was more convenient for us, because it meant we could do things for a lot of startups at once, but being part of a batch was better for the startups too. It solved one of the biggest problems faced by founders: the isolation. Now you not only had colleagues, but colleagues who understood the problems you were facing and could tell you how they were solving them.\\n\\nAs YC grew, we started to notice other advantages of scale. The alumni became a tight community, dedicated to helping one another, and especially the current batch, whose shoes they remembered being in. We also noticed that the startups were becoming one another\\'s customers. We used to refer jokingly to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get their initial set of customers almost entirely from among their batchmates.\\n\\nI had not originally', doc_id='f9e74e9d-cdd9-43c4-8742-76c38200305f', embedding=None, doc_hash='03e6a7d2e66c082507e29d8cfe9ec36fa77fe0b1cf352ad5ce85ad91f71a3622', extra_info=None, node_info={'start': 52144, 'end': 56170, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '42f8cdbc-3613-409a-85b6-bb1e22c85fef', <DocumentRelationship.NEXT: '3'>: 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd'}), score=None), NodeWithScore(node=Node(text='of customers almost entirely from among their batchmates.\\n\\nI had not originally intended YC to be a full-time job. I was going to do three things: hack, write essays, and work on YC. As YC grew, and I grew more excited about it, it started to take up a lot more than a third of my attention. But for the first few years I was still able to work on other things.\\n\\nIn the summer of 2006, Robert and I started working on a new version of Arc. This one was reasonably fast, because it was compiled into Scheme. To test this new Arc, I wrote Hacker News in it. It was originally meant to be a news aggregator for startup founders and was called Startup News, but after a few months I got tired of reading about nothing but startups. Plus it wasn\\'t startup founders we wanted to reach. It was future startup founders. So I changed the name to Hacker News and the topic to whatever engaged one\\'s intellectual curiosity.\\n\\nHN was no doubt good for YC, but it was also by far the biggest source of stress for me. If all I\\'d had to do was select and help founders, life would have been so easy. And that implies that HN was a mistake. Surely the biggest source of stress in one\\'s work should at least be something close to the core of the work. Whereas I was like someone who was in pain while running a marathon not from the exertion of running, but because I had a blister from an ill-fitting shoe. When I was dealing with some urgent problem during YC, there was about a 60% chance it had to do with HN, and a 40% chance it had do with everything else combined. [17]\\n\\nAs well as HN, I wrote all of YC\\'s internal software in Arc. But while I continued to work a good deal in Arc, I gradually stopped working on Arc, partly because I didn\\'t have time to, and partly because it was a lot less attractive to mess around with the language now that we had all this infrastructure depending on it. So now my three projects were reduced to two: writing essays and working on YC.\\n\\nYC was different from other kinds of work I\\'ve done. Instead of deciding for myself what to work on, the problems came to me. Every 6 months there was a new batch of startups, and their problems, whatever they were, became our problems. It was very engaging work, because their problems were quite varied, and the good founders were very effective. If you were trying to learn the most you could about startups in the shortest possible time, you couldn\\'t have picked a better way to do it.\\n\\nThere were parts of the job I didn\\'t like. Disputes between cofounders, figuring out when people were lying to us, fighting with people who maltreated the startups, and so on. But I worked hard even at the parts I didn\\'t like. I was haunted by something Kevin Hale once said about companies: \"No one works harder than the boss.\" He meant it both descriptively and prescriptively, and it was the second part that scared me. I wanted YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I\\'d better work very hard.\\n\\nOne day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember his exact words very clearly. \"You know,\" he said, \"you should make sure Y Combinator isn\\'t the last cool thing you do.\"\\n\\nAt the time I didn\\'t understand what he meant, but gradually it dawned on me that he was saying I should quit. This seemed strange advice, because YC was doing great. But if there was one thing rarer than Rtm offering advice, it was Rtm being wrong. So this set me thinking. It was true that on my current trajectory, YC would be the last thing I did, because it was only taking up more of my attention. It had already eaten Arc, and was in the process of eating', doc_id='ab7ed037-4269-4593-b5ff-0ce3d9213cbd', embedding=None, doc_hash='28da476588fa6a7c04e3fc8d0c8490de5a6aa3f4b46ada11723bd524402b1d33', extra_info=None, node_info={'start': 56162, 'end': 60161, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'f9e74e9d-cdd9-43c4-8742-76c38200305f', <DocumentRelationship.NEXT: '3'>: 'cfd50785-c54a-4e07-b474-561541968da9'}), score=None), NodeWithScore(node=Node(text=\"of my attention. It had already eaten Arc, and was in the process of eating essays too. Either YC was my life's work or I'd have to leave eventually. And it wasn't, so I would.\\n\\nIn the summer of 2012 my mother had a stroke, and the cause turned out to be a blood clot caused by colon cancer. The stroke destroyed her balance, and she was put in a nursing home, but she really wanted to get out of it and back to her house, and my sister and I were determined to help her do it. I used to fly up to Oregon to visit her regularly, and I had a lot of time to think on those flights. On one of them I realized I was ready to hand YC over to someone else.\\n\\nI asked Jessica if she wanted to be president, but she didn't, so we decided we'd try to recruit Sam Altman. We talked to Robert and Trevor and we agreed to make it a complete changing of the guard. Up till that point YC had been controlled by the original LLC we four had started. But we wanted YC to last for a long time, and to do that it couldn't be controlled by the founders. So if Sam said yes, we'd let him reorganize YC. Robert and I would retire, and Jessica and Trevor would become ordinary partners.\\n\\nWhen we asked Sam if he wanted to be president of YC, initially he said no. He wanted to start a startup to make nuclear reactors. But I kept at it, and in October 2013 he finally agreed. We decided he'd take over starting with the winter 2014 batch. For the rest of 2013 I left running YC more and more to Sam, partly so he could learn the job, and partly because I was focused on my mother, whose cancer had returned.\\n\\nShe died on January 15, 2014. We knew this was coming, but it was still hard when it did.\\n\\nI kept working on YC till March, to help get that batch of startups through Demo Day, then I checked out pretty completely. (I still talk to alumni and to new startups working on things I'm interested in, but that only takes a few hours a week.)\\n\\nWhat should I do next? Rtm's advice hadn't included anything about that. I wanted to do something completely different, so I decided I'd paint. I wanted to see how good I could get if I really focused on it. So the day after I stopped working on YC, I started painting. I was rusty and it took a while to get back into shape, but it was at least completely engaging. [18]\\n\\nI spent most of the rest of 2014 painting. I'd never been able to work so uninterruptedly before, and I got to be better than I had been. Not good enough, but better. Then in November, right in the middle of a painting, I ran out of steam. Up till that point I'd always been curious to see how the painting I was working on would turn out, but suddenly finishing this one seemed like a chore. So I stopped working on it and cleaned my brushes and haven't painted since. So far anyway.\\n\\nI realize that sounds rather wimpy. But attention is a zero sum game. If you can choose what to work on, and you choose a project that's not the best one (or at least a good one) for you, then it's getting in the way of another project that is. And at 50 there was some opportunity cost to screwing around.\\n\\nI started writing essays again, and wrote a bunch of new ones over the next few months. I even wrote a couple that weren't about startups. Then in March 2015 I started working on Lisp again.\\n\\nThe distinctive thing about Lisp is that its core is a language defined by writing an interpreter in itself. It wasn't originally intended as a programming language in the ordinary sense. It was meant to be a formal model of computation, an alternative to the Turing machine. If you want to write an interpreter for a language in itself, what's the minimum set of predefined operators you need? The Lisp that John McCarthy invented, or more accurately discovered, is an answer to that question. [19]\\n\\nMcCarthy didn't realize this Lisp could even be used to program computers\", doc_id='cfd50785-c54a-4e07-b474-561541968da9', embedding=None, doc_hash='b6524989f50c19316fcc4135d476deedc62b79ab141d9f650743a6fe5f3558c9', extra_info=None, node_info={'start': 60170, 'end': 64027, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd', <DocumentRelationship.NEXT: '3'>: '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4'}), score=None), NodeWithScore(node=Node(text='didn\\'t realize this Lisp could even be used to program computers till his grad student Steve Russell suggested it. Russell translated McCarthy\\'s interpreter into IBM 704 machine language, and from that point Lisp started also to be a programming language in the ordinary sense. But its origins as a model of computation gave it a power and elegance that other languages couldn\\'t match. It was this that attracted me in college, though I didn\\'t understand why at the time.\\n\\nMcCarthy\\'s 1960 Lisp did nothing more than interpret Lisp expressions. It was missing a lot of things you\\'d want in a programming language. So these had to be added, and when they were, they weren\\'t defined using McCarthy\\'s original axiomatic approach. That wouldn\\'t have been feasible at the time. McCarthy tested his interpreter by hand-simulating the execution of programs. But it was already getting close to the limit of interpreters you could test that way — indeed, there was a bug in it that McCarthy had overlooked. To test a more complicated interpreter, you\\'d have had to run it, and computers then weren\\'t powerful enough.\\n\\nNow they are, though. Now you could continue using McCarthy\\'s axiomatic approach till you\\'d defined a complete programming language. And as long as every change you made to McCarthy\\'s Lisp was a discoveredness-preserving transformation, you could, in principle, end up with a complete language that had this quality. Harder to do than to talk about, of course, but if it was possible in principle, why not try? So I decided to take a shot at it. It took 4 years, from March 26, 2015 to October 12, 2019. It was fortunate that I had a precisely defined goal, or it would have been hard to keep at it for so long.\\n\\nI wrote this new Lisp, called Bel, in itself in Arc. That may sound like a contradiction, but it\\'s an indication of the sort of trickery I had to engage in to make this work. By means of an egregious collection of hacks I managed to make something close enough to an interpreter written in itself that could actually run. Not fast, but fast enough to test.\\n\\nI had to ban myself from writing essays during most of this time, or I\\'d never have finished. In late 2015 I spent 3 months writing essays, and when I went back to working on Bel I could barely understand the code. Not so much because it was badly written as because the problem is so convoluted. When you\\'re working on an interpreter written in itself, it\\'s hard to keep track of what\\'s happening at what level, and errors can be practically encrypted by the time you get them.\\n\\nSo I said no more essays till Bel was done. But I told few people about Bel while I was working on it. So for years it must have seemed that I was doing nothing, when in fact I was working harder than I\\'d ever worked on anything. Occasionally after wrestling for hours with some gruesome bug I\\'d check Twitter or HN and see someone asking \"Does Paul Graham still code?\"\\n\\nWorking on Bel was hard but satisfying. I worked on it so intensively that at any given time I had a decent chunk of the code in my head and could write more there. I remember taking the boys to the coast on a sunny day in 2015 and figuring out how to deal with some problem involving continuations while I watched them play in the tide pools. It felt like I was doing life right. I remember that because I was slightly dismayed at how novel it felt. The good news is that I had more moments like this over the next few years.\\n\\nIn the summer of 2016 we moved to England. We wanted our kids to see what it was like living in another country, and since I was a British citizen by birth, that seemed the obvious choice. We only meant to stay for a year, but we liked it so much that we still live there. So most of Bel was written in England.\\n\\nIn the fall of 2019, Bel was finally finished. Like McCarthy\\'s original Lisp, it\\'s a spec rather than an implementation, although like McCarthy\\'s Lisp it\\'s a spec expressed as code.\\n\\nNow that I could write essays again, I wrote a bunch about topics I\\'d had stacked', doc_id='14d14357-e5cf-4015-8a65-2cc9fd3fb5c4', embedding=None, doc_hash='fdbb519247f837aba04548ab0aec6383f33f41fbbabe6aacfe83b2f52ad699c5', extra_info=None, node_info={'start': 64035, 'end': 68071, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'cfd50785-c54a-4e07-b474-561541968da9', <DocumentRelationship.NEXT: '3'>: '81530055-3701-4064-8170-c2c3d42f05b7'}), score=None), NodeWithScore(node=Node(text=\"I could write essays again, I wrote a bunch about topics I'd had stacked up. I kept writing essays through 2020, but I also started to think about other things I could work on. How should I choose what to do? Well, how had I chosen what to work on in the past? I wrote an essay for myself to answer that question, and I was surprised how long and messy the answer turned out to be. If this surprised me, who'd lived it, then I thought perhaps it would be interesting to other people, and encouraging to those with similarly messy lives. So I wrote a more detailed version for others to read, and this is the last sentence of it.\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nNotes\\n\\n[1] My experience skipped a step in the evolution of computers: time-sharing machines with interactive OSes. I went straight from batch processing to microcomputers, which made microcomputers seem all the more exciting.\\n\\n[2] Italian words for abstract concepts can nearly always be predicted from their English cognates (except for occasional traps like polluzione). It's the everyday words that differ. So if you string together a lot of abstract concepts with a few simple verbs, you can make a little Italian go a long way.\\n\\n[3] I lived at Piazza San Felice 4, so my walk to the Accademia went straight down the spine of old Florence: past the Pitti, across the bridge, past Orsanmichele, between the Duomo and the Baptistery, and then up Via Ricasoli to Piazza San Marco. I saw Florence at street level in every possible condition, from empty dark winter evenings to sweltering summer days when the streets were packed with tourists.\\n\\n[4] You can of course paint people like still lives if you want to, and they're willing. That sort of portrait is arguably the apex of still life painting, though the long sitting does tend to produce pained expressions in the sitters.\\n\\n[5] Interleaf was one of many companies that had smart people and built impressive technology, and yet got crushed by Moore's Law. In the 1990s the exponential growth in the power of commodity (i.e. Intel) processors rolled up high-end, special-purpose hardware and software companies like a bulldozer.\\n\\n[6] The signature style seekers at RISD weren't specifically mercenary. In the art world, money and coolness are tightly coupled. Anything expensive comes to be seen as cool, and anything seen as cool will soon become equally expensive.\\n\\n[7] Technically the apartment wasn't rent-controlled but rent-stabilized, but this is a refinement only New Yorkers would know or care about. The point is that it was really cheap, less than half market price.\\n\\n[8] Most software you can launch as soon as it's done. But when the software is an online store builder and you're hosting the stores, if you don't have any users yet, that fact will be painfully obvious. So before we could launch publicly we had to launch privately, in the sense of recruiting an initial set of users and making sure they had decent-looking stores.\\n\\n[9] We'd had a code editor in Viaweb for users to define their own page styles. They didn't know it, but they were editing Lisp expressions underneath. But this wasn't an app editor, because the code ran when the merchants' sites were generated, not when shoppers visited them.\\n\\n[10] This was the first instance of what is now a familiar experience, and so was what happened next, when I read the comments and found they were full of angry people. How could I claim that Lisp was better than other languages? Weren't they all Turing complete? People who see the responses to essays I write sometimes tell me how sorry they feel for me, but I'm not exaggerating when I reply that it has always been like this, since the very beginning. It comes with the territory. An essay must tell readers things they don't already know, and some people dislike being told such things.\\n\\n[11] People put plenty of stuff on the internet in the 90s of course, but putting something online is not the same as publishing it online. Publishing online means you\", doc_id='81530055-3701-4064-8170-c2c3d42f05b7', embedding=None, doc_hash='7ded19f889627d27a2c132b3d5c4f9dac587cd2407c5624d421e292f2b3ab1d7', extra_info=None, node_info={'start': 68067, 'end': 72048, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4', <DocumentRelationship.NEXT: '3'>: 'c851c700-5226-42a9-87da-e89e548e381e'}), score=None), NodeWithScore(node=Node(text='putting something online is not the same as publishing it online. Publishing online means you treat the online version as the (or at least a) primary version.\\n\\n[12] There is a general lesson here that our experience with Y Combinator also teaches: Customs continue to constrain you long after the restrictions that caused them have disappeared. Customary VC practice had once, like the customs about publishing essays, been based on real constraints. Startups had once been much more expensive to start, and proportionally rare. Now they could be cheap and common, but the VCs\\' customs still reflected the old world, just as customs about writing essays still reflected the constraints of the print era.\\n\\nWhich in turn implies that people who are independent-minded (i.e. less influenced by custom) will have an advantage in fields affected by rapid change (where customs are more likely to be obsolete).\\n\\nHere\\'s an interesting point, though: you can\\'t always predict which fields will be affected by rapid change. Obviously software and venture capital will be, but who would have predicted that essay writing would be?\\n\\n[13] Y Combinator was not the original name. At first we were called Cambridge Seed. But we didn\\'t want a regional name, in case someone copied us in Silicon Valley, so we renamed ourselves after one of the coolest tricks in the lambda calculus, the Y combinator.\\n\\nI picked orange as our color partly because it\\'s the warmest, and partly because no VC used it. In 2005 all the VCs used staid colors like maroon, navy blue, and forest green, because they were trying to appeal to LPs, not founders. The YC logo itself is an inside joke: the Viaweb logo had been a white V on a red circle, so I made the YC logo a white Y on an orange square.\\n\\n[14] YC did become a fund for a couple years starting in 2009, because it was getting so big I could no longer afford to fund it personally. But after Heroku got bought we had enough money to go back to being self-funded.\\n\\n[15] I\\'ve never liked the term \"deal flow,\" because it implies that the number of new startups at any given time is fixed. This is not only false, but it\\'s the purpose of YC to falsify it, by causing startups to be founded that would not otherwise have existed.\\n\\n[16] She reports that they were all different shapes and sizes, because there was a run on air conditioners and she had to get whatever she could, but that they were all heavier than she could carry now.\\n\\n[17] Another problem with HN was a bizarre edge case that occurs when you both write essays and run a forum. When you run a forum, you\\'re assumed to see if not every conversation, at least every conversation involving you. And when you write essays, people post highly imaginative misinterpretations of them on forums. Individually these two phenomena are tedious but bearable, but the combination is disastrous. You actually have to respond to the misinterpretations, because the assumption that you\\'re present in the conversation means that not responding to any sufficiently upvoted misinterpretation reads as a tacit admission that it\\'s correct. But that in turn encourages more; anyone who wants to pick a fight with you senses that now is their chance.\\n\\n[18] The worst thing about leaving YC was not working with Jessica anymore. We\\'d been working on YC almost the whole time we\\'d known each other, and we\\'d neither tried nor wanted to separate it from our personal lives, so leaving was like pulling up a deeply rooted tree.\\n\\n[19] One way to get more precise about the concept of invented vs discovered is to talk about space aliens. Any sufficiently advanced alien civilization would certainly know about the Pythagorean theorem, for example. I believe, though with less certainty, that they would also know about the Lisp in McCarthy\\'s 1960 paper.\\n\\nBut if so there\\'s no reason to suppose that this is the limit of the language that might be known to them. Presumably aliens need numbers and errors and I/O too. So it seems likely there exists at least one path out of McCarthy\\'s', doc_id='c851c700-5226-42a9-87da-e89e548e381e', embedding=None, doc_hash='8c5bdf9883547bcebedcb406fa77ba918defaa7bbedece21114163c578ac0729', extra_info=None, node_info={'start': 72029, 'end': 76071, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: '81530055-3701-4064-8170-c2c3d42f05b7', <DocumentRelationship.NEXT: '3'>: '4972fc4f-6990-4659-a20c-98a8147373d8'}), score=None), NodeWithScore(node=Node(text=\"So it seems likely there exists at least one path out of McCarthy's Lisp along which discoveredness is preserved.\\n\\n\\n\\nThanks to Trevor Blackwell, John Collison, Patrick Collison, Daniel Gackle, Ralph Hazell, Jessica Livingston, Robert Morris, and Harj Taggar for reading drafts of this.\\n\\n\\n\\n\", doc_id='4972fc4f-6990-4659-a20c-98a8147373d8', embedding=None, doc_hash='9d65b6bc997db43cee91e2c4b7380e4efc1059937154b60dc2e45b8aa489e59e', extra_info=None, node_info={'start': 76083, 'end': 76372, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'c851c700-5226-42a9-87da-e89e548e381e'}), score=None)], extra_info={'a0e4f5a8-f852-4807-96f2-76721dc1e57d': None, '1992e045-1032-432c-ac01-42947c0727e9': None, 'e06dfa02-faa5-4617-9d58-00ae88433c58': None, '0fc849a1-2421-414e-9f83-f39e3ac47e41': None, '8e0bca16-b087-489b-983a-5beaaf393f64': None, '855d5459-1cfe-465e-8c94-f9a1d047f821': None, '3b199ba9-d04b-473a-8c73-39c293638957': None, '669c0218-b2c1-428b-808c-f5408e52dcdf': None, 'eb14dc48-f3a1-461d-9a49-0d78331dd083': None, '99b3c4c2-aeab-4f5a-9474-916f6c35c9fb': None, '8a9e2472-230f-437f-b720-1494878d5933': None, '2d1c3f1b-5fa5-46c9-be20-87ad2dfc9060': None, '42f8cdbc-3613-409a-85b6-bb1e22c85fef': None, 'f9e74e9d-cdd9-43c4-8742-76c38200305f': None, 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd': None, 'cfd50785-c54a-4e07-b474-561541968da9': None, '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4': None, '81530055-3701-4064-8170-c2c3d42f05b7': None, 'c851c700-5226-42a9-87da-e89e548e381e': None, '4972fc4f-6990-4659-a20c-98a8147373d8': None})"
-      ]
-     },
-     "execution_count": 14,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "query_engine.query(\"What is the summary of the document?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting query engine 1: Useful for retrieving specific context from Paul Graham essay on What I Worked On..\n",
-      "> [retrieve] Total LLM token usage: 0 tokens\n",
-      "> [retrieve] Total embedding token usage: 9 tokens\n",
-      "> [get_response] Total LLM token usage: 1924 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "Response(response=\"\\nAfter RICS, Paul Graham decided to focus on Y Combinator and help get the startups through Demo Day. He also started writing essays again and wrote a few that weren't about startups. In November 2014, he ran out of steam while painting and stopped working on it. He then started working on Lisp again in March 2015.\", source_nodes=[NodeWithScore(node=Node(text=\"of my attention. It had already eaten Arc, and was in the process of eating essays too. Either YC was my life's work or I'd have to leave eventually. And it wasn't, so I would.\\n\\nIn the summer of 2012 my mother had a stroke, and the cause turned out to be a blood clot caused by colon cancer. The stroke destroyed her balance, and she was put in a nursing home, but she really wanted to get out of it and back to her house, and my sister and I were determined to help her do it. I used to fly up to Oregon to visit her regularly, and I had a lot of time to think on those flights. On one of them I realized I was ready to hand YC over to someone else.\\n\\nI asked Jessica if she wanted to be president, but she didn't, so we decided we'd try to recruit Sam Altman. We talked to Robert and Trevor and we agreed to make it a complete changing of the guard. Up till that point YC had been controlled by the original LLC we four had started. But we wanted YC to last for a long time, and to do that it couldn't be controlled by the founders. So if Sam said yes, we'd let him reorganize YC. Robert and I would retire, and Jessica and Trevor would become ordinary partners.\\n\\nWhen we asked Sam if he wanted to be president of YC, initially he said no. He wanted to start a startup to make nuclear reactors. But I kept at it, and in October 2013 he finally agreed. We decided he'd take over starting with the winter 2014 batch. For the rest of 2013 I left running YC more and more to Sam, partly so he could learn the job, and partly because I was focused on my mother, whose cancer had returned.\\n\\nShe died on January 15, 2014. We knew this was coming, but it was still hard when it did.\\n\\nI kept working on YC till March, to help get that batch of startups through Demo Day, then I checked out pretty completely. (I still talk to alumni and to new startups working on things I'm interested in, but that only takes a few hours a week.)\\n\\nWhat should I do next? Rtm's advice hadn't included anything about that. I wanted to do something completely different, so I decided I'd paint. I wanted to see how good I could get if I really focused on it. So the day after I stopped working on YC, I started painting. I was rusty and it took a while to get back into shape, but it was at least completely engaging. [18]\\n\\nI spent most of the rest of 2014 painting. I'd never been able to work so uninterruptedly before, and I got to be better than I had been. Not good enough, but better. Then in November, right in the middle of a painting, I ran out of steam. Up till that point I'd always been curious to see how the painting I was working on would turn out, but suddenly finishing this one seemed like a chore. So I stopped working on it and cleaned my brushes and haven't painted since. So far anyway.\\n\\nI realize that sounds rather wimpy. But attention is a zero sum game. If you can choose what to work on, and you choose a project that's not the best one (or at least a good one) for you, then it's getting in the way of another project that is. And at 50 there was some opportunity cost to screwing around.\\n\\nI started writing essays again, and wrote a bunch of new ones over the next few months. I even wrote a couple that weren't about startups. Then in March 2015 I started working on Lisp again.\\n\\nThe distinctive thing about Lisp is that its core is a language defined by writing an interpreter in itself. It wasn't originally intended as a programming language in the ordinary sense. It was meant to be a formal model of computation, an alternative to the Turing machine. If you want to write an interpreter for a language in itself, what's the minimum set of predefined operators you need? The Lisp that John McCarthy invented, or more accurately discovered, is an answer to that question. [19]\\n\\nMcCarthy didn't realize this Lisp could even be used to program computers\", doc_id='cfd50785-c54a-4e07-b474-561541968da9', embedding=None, doc_hash='b6524989f50c19316fcc4135d476deedc62b79ab141d9f650743a6fe5f3558c9', extra_info=None, node_info={'start': 60170, 'end': 64027, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd', <DocumentRelationship.NEXT: '3'>: '14d14357-e5cf-4015-8a65-2cc9fd3fb5c4'}), score=0.7906051406335054), NodeWithScore(node=Node(text='of customers almost entirely from among their batchmates.\\n\\nI had not originally intended YC to be a full-time job. I was going to do three things: hack, write essays, and work on YC. As YC grew, and I grew more excited about it, it started to take up a lot more than a third of my attention. But for the first few years I was still able to work on other things.\\n\\nIn the summer of 2006, Robert and I started working on a new version of Arc. This one was reasonably fast, because it was compiled into Scheme. To test this new Arc, I wrote Hacker News in it. It was originally meant to be a news aggregator for startup founders and was called Startup News, but after a few months I got tired of reading about nothing but startups. Plus it wasn\\'t startup founders we wanted to reach. It was future startup founders. So I changed the name to Hacker News and the topic to whatever engaged one\\'s intellectual curiosity.\\n\\nHN was no doubt good for YC, but it was also by far the biggest source of stress for me. If all I\\'d had to do was select and help founders, life would have been so easy. And that implies that HN was a mistake. Surely the biggest source of stress in one\\'s work should at least be something close to the core of the work. Whereas I was like someone who was in pain while running a marathon not from the exertion of running, but because I had a blister from an ill-fitting shoe. When I was dealing with some urgent problem during YC, there was about a 60% chance it had to do with HN, and a 40% chance it had do with everything else combined. [17]\\n\\nAs well as HN, I wrote all of YC\\'s internal software in Arc. But while I continued to work a good deal in Arc, I gradually stopped working on Arc, partly because I didn\\'t have time to, and partly because it was a lot less attractive to mess around with the language now that we had all this infrastructure depending on it. So now my three projects were reduced to two: writing essays and working on YC.\\n\\nYC was different from other kinds of work I\\'ve done. Instead of deciding for myself what to work on, the problems came to me. Every 6 months there was a new batch of startups, and their problems, whatever they were, became our problems. It was very engaging work, because their problems were quite varied, and the good founders were very effective. If you were trying to learn the most you could about startups in the shortest possible time, you couldn\\'t have picked a better way to do it.\\n\\nThere were parts of the job I didn\\'t like. Disputes between cofounders, figuring out when people were lying to us, fighting with people who maltreated the startups, and so on. But I worked hard even at the parts I didn\\'t like. I was haunted by something Kevin Hale once said about companies: \"No one works harder than the boss.\" He meant it both descriptively and prescriptively, and it was the second part that scared me. I wanted YC to be good, so if how hard I worked set the upper bound on how hard everyone else worked, I\\'d better work very hard.\\n\\nOne day in 2010, when he was visiting California for interviews, Robert Morris did something astonishing: he offered me unsolicited advice. I can only remember him doing that once before. One day at Viaweb, when I was bent over double from a kidney stone, he suggested that it would be a good idea for him to take me to the hospital. That was what it took for Rtm to offer unsolicited advice. So I remember his exact words very clearly. \"You know,\" he said, \"you should make sure Y Combinator isn\\'t the last cool thing you do.\"\\n\\nAt the time I didn\\'t understand what he meant, but gradually it dawned on me that he was saying I should quit. This seemed strange advice, because YC was doing great. But if there was one thing rarer than Rtm offering advice, it was Rtm being wrong. So this set me thinking. It was true that on my current trajectory, YC would be the last thing I did, because it was only taking up more of my attention. It had already eaten Arc, and was in the process of eating', doc_id='ab7ed037-4269-4593-b5ff-0ce3d9213cbd', embedding=None, doc_hash='28da476588fa6a7c04e3fc8d0c8490de5a6aa3f4b46ada11723bd524402b1d33', extra_info=None, node_info={'start': 56162, 'end': 60161, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: 'af3dc326-313b-40f5-87ad-007838e7a370', <DocumentRelationship.PREVIOUS: '2'>: 'f9e74e9d-cdd9-43c4-8742-76c38200305f', <DocumentRelationship.NEXT: '3'>: 'cfd50785-c54a-4e07-b474-561541968da9'}), score=0.7863763143524524)], extra_info={'cfd50785-c54a-4e07-b474-561541968da9': None, 'ab7ed037-4269-4593-b5ff-0ce3d9213cbd': None})"
-      ]
-     },
-     "execution_count": 15,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "query_engine.query(\"What did Paul Graham do after RICS?\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "#### PydanticMultiSelector\n",
-    "\n",
-    "In case you are expecting queries to be routed to multiple indexes, you should use a multi selector. The multi selector sends to query to multiple sub-indexes, and then aggregates all responses using a list index to form a complete answer."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n"
-     ]
-    }
-   ],
-   "source": [
-    "from llama_index import SimpleKeywordTableIndex\n",
-    "\n",
-    "keyword_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)\n",
-    "\n",
-    "keyword_tool = QueryEngineTool.from_defaults(\n",
-    "    query_engine=vector_query_engine,\n",
-    "    description=\"Useful for retrieving specific context using keywords from Paul Graham essay on What I Worked On.\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "query_engine = RouterQueryEngine(\n",
-    "    selector=PydanticMultiSelector.from_defaults(),\n",
-    "    query_engine_tools=[\n",
-    "        list_tool,\n",
-    "        vector_tool,\n",
-    "        keyword_tool,\n",
-    "    ],\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting query engine 1: Retrieving specific context from Paul Graham essay on What I Worked On..\n",
-      "> [retrieve] Total LLM token usage: 0 tokens\n",
-      "> [retrieve] Total embedding token usage: 18 tokens\n",
-      "> [get_response] Total LLM token usage: 1995 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n",
-      "Selecting query engine 2: Retrieving specific context using keywords from Paul Graham essay on What I Worked On..\n",
-      "> [retrieve] Total LLM token usage: 0 tokens\n",
-      "> [retrieve] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total LLM token usage: 2055 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total LLM token usage: 0 tokens\n",
-      "> [build_index_from_nodes] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total LLM token usage: 658 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n",
-      "> [get_response] Total LLM token usage: 658 tokens\n",
-      "> [get_response] Total embedding token usage: 0 tokens\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "Response(response=\"\\nNotable events and people from the author's time at Interleaf and YC include: \\n\\nInterleaf: \\n- Founding of Interleaf in 1989\\n- Acquisition of Interleaf by Lernout & Hauspie in 1999\\n- The author's work on Lisp, which led to the development of the Lisp programming language. \\n- The author's work on Arc, which led to the development of the Hacker News website. \\n\\nYC: \\n- Founding of YC in 2005\\n- Launch of Hacker News in 2006\\n- Recruitment of Sam Altman as President in 2013\\n- The author's work with Robert Morris, Trevor Blackwell, and Jessica Livingston to create Y Combinator. \\n- The author's work with Sam Altman to reorganize YC and make it a lasting organization. \\n- The author's work with YC startups to help them succeed. \\n- The author's work on Hacker News, which became a major source of stress. \\n- The author's work on internal software for YC, written in Arc. \\n- The author's work with Kevin Hale, who offered the author unsolicited advice. \\n- The author's mother's stroke and death in 2012 and 2014 respectively\\n- Author's retirement from YC in 2014\\n- Author's decision to take up painting in 2014\\n- Author's return to writing essays and Lisp in 2015\", source_nodes=[NodeWithScore(node=Node(text=\"\\nNotable events and people from the author's time at Interleaf and YC include: \\n\\nInterleaf: \\n- Founding of Interleaf in 1989\\n- Acquisition of Interleaf by Lernout & Hauspie in 1999\\n\\nYC: \\n- Founding of YC in 2005\\n- Launch of Hacker News in 2006\\n- Recruitment of Sam Altman as President in 2013\\n- Author's mother's stroke and death in 2012 and 2014 respectively\\n- Author's retirement from YC in 2014\\n- Author's decision to take up painting in 2014\\n- Author's return to writing essays and Lisp in 2015\", doc_id='cd546791-d1e2-420a-9e9c-fde68d2d51dd', embedding=None, doc_hash='0e61517dfdb144c42c1251f3ed80d58fa2c3859a03f9d7a9ae92d513036690c5', extra_info=None, node_info={'start': 0, 'end': 498, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: '4183ef8b-b14b-4c73-9754-864d64842c1b'}), score=None), NodeWithScore(node=Node(text=\"\\nNotable events and people from the author's time at Interleaf and YC include: \\n\\nInterleaf: \\n- The author's work on Lisp, which led to the development of the Lisp programming language. \\n- The author's work on Arc, which led to the development of the Hacker News website. \\n\\nYC: \\n- The author's work with Robert Morris, Trevor Blackwell, and Jessica Livingston to create Y Combinator. \\n- The author's work with Sam Altman to reorganize YC and make it a lasting organization. \\n- The author's work with YC startups to help them succeed. \\n- The author's work on Hacker News, which became a major source of stress. \\n- The author's work on internal software for YC, written in Arc. \\n- The author's work with Kevin Hale, who offered the author unsolicited advice.\", doc_id='cee04688-dbe7-4749-809e-5a3723e61ac7', embedding=None, doc_hash='246f0f5349eab9d4639f1584170456843b8bd47fcf2862c88437e976309e3a57', extra_info=None, node_info={'start': 0, 'end': 755, '_node_type': <NodeType.TEXT: '1'>}, relationships={<DocumentRelationship.SOURCE: '1'>: '283de7d5-81ed-4dcc-bc5e-98fe4494c19c'}), score=None)], extra_info={'cd546791-d1e2-420a-9e9c-fde68d2d51dd': None, 'cee04688-dbe7-4749-809e-5a3723e61ac7': None})"
-      ]
-     },
-     "execution_count": 13,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# This query could use either a keyword or vector query engine, so it will combine responses from both\n",
-    "query_engine.query(\n",
-    "    \"What were noteable events and people from the authors time at Interleaf and YC?\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.10"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 4
+    "nbformat": 4,
+    "nbformat_minor": 4
 }
diff --git a/docs/examples/query_engine/pdf_tables/recursive_retriever.ipynb b/docs/examples/query_engine/pdf_tables/recursive_retriever.ipynb
index 36947af44ffba19bc9cb9360bb779921a9701654..3eecd5bf3d75261d85a6fd62c060b6683e4d6485 100644
--- a/docs/examples/query_engine/pdf_tables/recursive_retriever.ipynb
+++ b/docs/examples/query_engine/pdf_tables/recursive_retriever.ipynb
@@ -1,1093 +1,1093 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "a0d0efc8-3e03-4f66-8f6d-a907b6b6d7c1",
-   "metadata": {},
-   "source": [
-    "# Recursive Retriever + Query Engine Demo \n",
-    "\n",
-    "In this demo, we walk through a use case of showcasing our \"RecursiveRetriever\" module over hierarchical data.\n",
-    "\n",
-    "The concept of recursive retrieval is that we not only explore the directly most relevant nodes, but also explore\n",
-    "node relationships to additional retrievers/query engines and execute them. For instance, a node may represent a concise summary of a structured table,\n",
-    "and link to a SQL/Pandas query engine over that structured table. Then if the node is retrieved, we want to also query the underlying query engine for the answer.\n",
-    "\n",
-    "This can be especially useful for documents with hierarchical relationships. In this example, we walk through a Wikipedia article about billionaires (in PDF form), which contains both text and a variety of embedded structured tables. We first create a Pandas query engine over each table, but also represent each table by an `IndexNode` (stores a link to the query engine); this Node is stored along with other Nodes in a vector store. \n",
-    "\n",
-    "During query-time, if an `IndexNode` is fetched, then the underlying query engine/retriever will be queried. \n",
-    "\n",
-    "**Notes about Setup**\n",
-    "\n",
-    "We use `camelot` to extract text-based tables from PDFs."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "199ec60c-ea45-46d3-ba21-66db6e16726f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "import camelot\n",
-    "from llama_index import Document, ListIndex\n",
-    "\n",
-    "# https://en.wikipedia.org/wiki/The_World%27s_Billionaires\n",
-    "from llama_index import VectorStoreIndex, ServiceContext, LLMPredictor\n",
-    "from llama_index.query_engine import PandasQueryEngine, RetrieverQueryEngine\n",
-    "from llama_index.retrievers import RecursiveRetriever\n",
-    "from llama_index.schema import IndexNode\n",
-    "from llama_index.llms import OpenAI\n",
-    "\n",
-    "from llama_hub.file.pymu_pdf.base import PyMuPDFReader\n",
-    "from pathlib import Path\n",
-    "from typing import List"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "c41f1a9c-939c-4a89-866e-557f43fc330b",
-   "metadata": {},
-   "source": [
-    "## Load in Document (and Tables)\n",
-    "\n",
-    "We use our `PyMuPDFReader` to read in the main text of the document.\n",
-    "\n",
-    "We also use `camelot` to extract some structured tables from the document"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "4435fcb9-bf67-4d76-9d38-f5cd19086fae",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "file_path = \"billionaires_page.pdf\""
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "e5e78b24-42e0-4d65-980d-13f8c738012f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# initialize PDF reader\n",
-    "reader = PyMuPDFReader()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "09976ddc-fe47-4eb6-b577-d5c1c9185974",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "docs = reader.load(Path(file_path))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "5f086ec9-17e3-40cb-b92d-4e121322ff51",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# use camelot to parse tables\n",
-    "def get_tables(path: str, pages: List[int]):\n",
-    "    table_dfs = []\n",
-    "    for page in pages:\n",
-    "        table_list = camelot.read_pdf(path, pages=str(page))\n",
-    "        table_df = table_list[0].df\n",
-    "        table_df = (\n",
-    "            table_df.rename(columns=table_df.iloc[0])\n",
-    "            .drop(table_df.index[0])\n",
-    "            .reset_index(drop=True)\n",
-    "        )\n",
-    "        table_dfs.append(table_df)\n",
-    "    return table_dfs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "id": "f6653cb1-d0f1-4408-94d3-31827e1a3115",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "table_dfs = get_tables(file_path, pages=[3, 25])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "24e90368-b982-4d05-91b3-0d9dae39eebc",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>No.</th>\n",
-       "      <th>Name</th>\n",
-       "      <th>Net worth\\n(USD)</th>\n",
-       "      <th>Age</th>\n",
-       "      <th>Nationality</th>\n",
-       "      <th>Primary source(s) of wealth</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>1</td>\n",
-       "      <td>Bernard Arnault &amp;\\nfamily</td>\n",
-       "      <td>$211 billion</td>\n",
-       "      <td>74</td>\n",
-       "      <td>France</td>\n",
-       "      <td>LVMH</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>2</td>\n",
-       "      <td>Elon Musk</td>\n",
-       "      <td>$180 billion</td>\n",
-       "      <td>51</td>\n",
-       "      <td>United\\nStates</td>\n",
-       "      <td>Tesla, SpaceX, X Corp.</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>3</td>\n",
-       "      <td>Jeff Bezos</td>\n",
-       "      <td>$114 billion</td>\n",
-       "      <td>59</td>\n",
-       "      <td>United\\nStates</td>\n",
-       "      <td>Amazon</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>4</td>\n",
-       "      <td>Larry Ellison</td>\n",
-       "      <td>$107 billion</td>\n",
-       "      <td>78</td>\n",
-       "      <td>United\\nStates</td>\n",
-       "      <td>Oracle Corporation</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>5</td>\n",
-       "      <td>Warren Buffett</td>\n",
-       "      <td>$106 billion</td>\n",
-       "      <td>92</td>\n",
-       "      <td>United\\nStates</td>\n",
-       "      <td>Berkshire Hathaway</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>5</th>\n",
-       "      <td>6</td>\n",
-       "      <td>Bill Gates</td>\n",
-       "      <td>$104 billion</td>\n",
-       "      <td>67</td>\n",
-       "      <td>United\\nStates</td>\n",
-       "      <td>Microsoft</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>6</th>\n",
-       "      <td>7</td>\n",
-       "      <td>Michael Bloomberg</td>\n",
-       "      <td>$94.5 billion</td>\n",
-       "      <td>81</td>\n",
-       "      <td>United\\nStates</td>\n",
-       "      <td>Bloomberg L.P.</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>7</th>\n",
-       "      <td>8</td>\n",
-       "      <td>Carlos Slim &amp; family</td>\n",
-       "      <td>$93 billion</td>\n",
-       "      <td>83</td>\n",
-       "      <td>Mexico</td>\n",
-       "      <td>Telmex, América Móvil, Grupo\\nCarso</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>8</th>\n",
-       "      <td>9</td>\n",
-       "      <td>Mukesh Ambani</td>\n",
-       "      <td>$83.4 billion</td>\n",
-       "      <td>65</td>\n",
-       "      <td>India</td>\n",
-       "      <td>Reliance Industries</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>9</th>\n",
-       "      <td>10</td>\n",
-       "      <td>Steve Ballmer</td>\n",
-       "      <td>$80.7 billion</td>\n",
-       "      <td>67</td>\n",
-       "      <td>United\\nStates</td>\n",
-       "      <td>Microsoft</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "  No.                       Name Net worth\\n(USD) Age     Nationality  \\\n",
-       "0   1  Bernard Arnault &\\nfamily     $211 billion  74          France   \n",
-       "1   2                  Elon Musk     $180 billion  51  United\\nStates   \n",
-       "2   3                 Jeff Bezos     $114 billion  59  United\\nStates   \n",
-       "3   4              Larry Ellison     $107 billion  78  United\\nStates   \n",
-       "4   5             Warren Buffett     $106 billion  92  United\\nStates   \n",
-       "5   6                 Bill Gates     $104 billion  67  United\\nStates   \n",
-       "6   7          Michael Bloomberg    $94.5 billion  81  United\\nStates   \n",
-       "7   8       Carlos Slim & family      $93 billion  83          Mexico   \n",
-       "8   9              Mukesh Ambani    $83.4 billion  65           India   \n",
-       "9  10              Steve Ballmer    $80.7 billion  67  United\\nStates   \n",
-       "\n",
-       "           Primary source(s) of wealth  \n",
-       "0                                 LVMH  \n",
-       "1               Tesla, SpaceX, X Corp.  \n",
-       "2                               Amazon  \n",
-       "3                   Oracle Corporation  \n",
-       "4                   Berkshire Hathaway  \n",
-       "5                            Microsoft  \n",
-       "6                       Bloomberg L.P.  \n",
-       "7  Telmex, América Móvil, Grupo\\nCarso  \n",
-       "8                  Reliance Industries  \n",
-       "9                            Microsoft  "
-      ]
-     },
-     "execution_count": 7,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# shows list of top billionaires in 2023\n",
-    "table_dfs[0]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "id": "0ba98e98-bfb6-4c54-8fa6-b0d7abf381c8",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Year</th>\n",
-       "      <th>Number of billionaires</th>\n",
-       "      <th>Group's combined net worth</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>2023[2]</td>\n",
-       "      <td>2,640</td>\n",
-       "      <td>$12.2 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>2022[6]</td>\n",
-       "      <td>2,668</td>\n",
-       "      <td>$12.7 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>2021[11]</td>\n",
-       "      <td>2,755</td>\n",
-       "      <td>$13.1 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>2020</td>\n",
-       "      <td>2,095</td>\n",
-       "      <td>$8.0 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>2019</td>\n",
-       "      <td>2,153</td>\n",
-       "      <td>$8.7 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>5</th>\n",
-       "      <td>2018</td>\n",
-       "      <td>2,208</td>\n",
-       "      <td>$9.1 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>6</th>\n",
-       "      <td>2017</td>\n",
-       "      <td>2,043</td>\n",
-       "      <td>$7.7 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>7</th>\n",
-       "      <td>2016</td>\n",
-       "      <td>1,810</td>\n",
-       "      <td>$6.5 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>8</th>\n",
-       "      <td>2015[18]</td>\n",
-       "      <td>1,826</td>\n",
-       "      <td>$7.1 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>9</th>\n",
-       "      <td>2014[67]</td>\n",
-       "      <td>1,645</td>\n",
-       "      <td>$6.4 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>10</th>\n",
-       "      <td>2013[68]</td>\n",
-       "      <td>1,426</td>\n",
-       "      <td>$5.4 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>11</th>\n",
-       "      <td>2012</td>\n",
-       "      <td>1,226</td>\n",
-       "      <td>$4.6 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>12</th>\n",
-       "      <td>2011</td>\n",
-       "      <td>1,210</td>\n",
-       "      <td>$4.5 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>13</th>\n",
-       "      <td>2010</td>\n",
-       "      <td>1,011</td>\n",
-       "      <td>$3.6 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>14</th>\n",
-       "      <td>2009</td>\n",
-       "      <td>793</td>\n",
-       "      <td>$2.4 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>15</th>\n",
-       "      <td>2008</td>\n",
-       "      <td>1,125</td>\n",
-       "      <td>$4.4 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>16</th>\n",
-       "      <td>2007</td>\n",
-       "      <td>946</td>\n",
-       "      <td>$3.5 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>17</th>\n",
-       "      <td>2006</td>\n",
-       "      <td>793</td>\n",
-       "      <td>$2.6 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>18</th>\n",
-       "      <td>2005</td>\n",
-       "      <td>691</td>\n",
-       "      <td>$2.2 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>19</th>\n",
-       "      <td>2004</td>\n",
-       "      <td>587</td>\n",
-       "      <td>$1.9 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>20</th>\n",
-       "      <td>2003</td>\n",
-       "      <td>476</td>\n",
-       "      <td>$1.4 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>21</th>\n",
-       "      <td>2002</td>\n",
-       "      <td>497</td>\n",
-       "      <td>$1.5 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>22</th>\n",
-       "      <td>2001</td>\n",
-       "      <td>538</td>\n",
-       "      <td>$1.8 trillion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>23</th>\n",
-       "      <td>2000</td>\n",
-       "      <td>470</td>\n",
-       "      <td>$898 billion</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>24</th>\n",
-       "      <td>Sources: Forbes.[18][67][66][68]</td>\n",
-       "      <td></td>\n",
-       "      <td></td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                Year Number of billionaires  \\\n",
-       "0                            2023[2]                  2,640   \n",
-       "1                            2022[6]                  2,668   \n",
-       "2                           2021[11]                  2,755   \n",
-       "3                               2020                  2,095   \n",
-       "4                               2019                  2,153   \n",
-       "5                               2018                  2,208   \n",
-       "6                               2017                  2,043   \n",
-       "7                               2016                  1,810   \n",
-       "8                           2015[18]                  1,826   \n",
-       "9                           2014[67]                  1,645   \n",
-       "10                          2013[68]                  1,426   \n",
-       "11                              2012                  1,226   \n",
-       "12                              2011                  1,210   \n",
-       "13                              2010                  1,011   \n",
-       "14                              2009                    793   \n",
-       "15                              2008                  1,125   \n",
-       "16                              2007                    946   \n",
-       "17                              2006                    793   \n",
-       "18                              2005                    691   \n",
-       "19                              2004                    587   \n",
-       "20                              2003                    476   \n",
-       "21                              2002                    497   \n",
-       "22                              2001                    538   \n",
-       "23                              2000                    470   \n",
-       "24  Sources: Forbes.[18][67][66][68]                          \n",
-       "\n",
-       "   Group's combined net worth  \n",
-       "0              $12.2 trillion  \n",
-       "1              $12.7 trillion  \n",
-       "2              $13.1 trillion  \n",
-       "3               $8.0 trillion  \n",
-       "4               $8.7 trillion  \n",
-       "5               $9.1 trillion  \n",
-       "6               $7.7 trillion  \n",
-       "7               $6.5 trillion  \n",
-       "8               $7.1 trillion  \n",
-       "9               $6.4 trillion  \n",
-       "10              $5.4 trillion  \n",
-       "11              $4.6 trillion  \n",
-       "12              $4.5 trillion  \n",
-       "13              $3.6 trillion  \n",
-       "14              $2.4 trillion  \n",
-       "15              $4.4 trillion  \n",
-       "16              $3.5 trillion  \n",
-       "17              $2.6 trillion  \n",
-       "18              $2.2 trillion  \n",
-       "19              $1.9 trillion  \n",
-       "20              $1.4 trillion  \n",
-       "21              $1.5 trillion  \n",
-       "22              $1.8 trillion  \n",
-       "23               $898 billion  \n",
-       "24                             "
-      ]
-     },
-     "execution_count": 8,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# shows list of top billionaires\n",
-    "table_dfs[1]"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "02967be4-be85-4046-b4e2-e5dd8e65628a",
-   "metadata": {},
-   "source": [
-    "## Create Pandas Query Engines\n",
-    "\n",
-    "We create a pandas query engine over each structured table.\n",
-    "\n",
-    "These can be executed on their own to answer queries about each table."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "id": "2f38bbf0-502a-44f7-b33b-443fcd90583a",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# define query engines over these tables\n",
-    "df_query_engines = [PandasQueryEngine(table_df) for table_df in table_dfs]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "id": "9fbc43a0-5655-4c8a-80d4-71c7fe9d7275",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "df.iloc[1]['Net worth\\n(USD)']\n"
-     ]
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "a0d0efc8-3e03-4f66-8f6d-a907b6b6d7c1",
+            "metadata": {},
+            "source": [
+                "# Recursive Retriever + Query Engine Demo \n",
+                "\n",
+                "In this demo, we walk through a use case of showcasing our \"RecursiveRetriever\" module over hierarchical data.\n",
+                "\n",
+                "The concept of recursive retrieval is that we not only explore the directly most relevant nodes, but also explore\n",
+                "node relationships to additional retrievers/query engines and execute them. For instance, a node may represent a concise summary of a structured table,\n",
+                "and link to a SQL/Pandas query engine over that structured table. Then if the node is retrieved, we want to also query the underlying query engine for the answer.\n",
+                "\n",
+                "This can be especially useful for documents with hierarchical relationships. In this example, we walk through a Wikipedia article about billionaires (in PDF form), which contains both text and a variety of embedded structured tables. We first create a Pandas query engine over each table, but also represent each table by an `IndexNode` (stores a link to the query engine); this Node is stored along with other Nodes in a vector store. \n",
+                "\n",
+                "During query-time, if an `IndexNode` is fetched, then the underlying query engine/retriever will be queried. \n",
+                "\n",
+                "**Notes about Setup**\n",
+                "\n",
+                "We use `camelot` to extract text-based tables from PDFs."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "199ec60c-ea45-46d3-ba21-66db6e16726f",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "import camelot\n",
+                "from llama_index import Document, SummaryIndex\n",
+                "\n",
+                "# https://en.wikipedia.org/wiki/The_World%27s_Billionaires\n",
+                "from llama_index import VectorStoreIndex, ServiceContext, LLMPredictor\n",
+                "from llama_index.query_engine import PandasQueryEngine, RetrieverQueryEngine\n",
+                "from llama_index.retrievers import RecursiveRetriever\n",
+                "from llama_index.schema import IndexNode\n",
+                "from llama_index.llms import OpenAI\n",
+                "\n",
+                "from llama_hub.file.pymu_pdf.base import PyMuPDFReader\n",
+                "from pathlib import Path\n",
+                "from typing import List"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "c41f1a9c-939c-4a89-866e-557f43fc330b",
+            "metadata": {},
+            "source": [
+                "## Load in Document (and Tables)\n",
+                "\n",
+                "We use our `PyMuPDFReader` to read in the main text of the document.\n",
+                "\n",
+                "We also use `camelot` to extract some structured tables from the document"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "4435fcb9-bf67-4d76-9d38-f5cd19086fae",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "file_path = \"billionaires_page.pdf\""
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "e5e78b24-42e0-4d65-980d-13f8c738012f",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# initialize PDF reader\n",
+                "reader = PyMuPDFReader()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "09976ddc-fe47-4eb6-b577-d5c1c9185974",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "docs = reader.load(Path(file_path))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 5,
+            "id": "5f086ec9-17e3-40cb-b92d-4e121322ff51",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# use camelot to parse tables\n",
+                "def get_tables(path: str, pages: List[int]):\n",
+                "    table_dfs = []\n",
+                "    for page in pages:\n",
+                "        table_list = camelot.read_pdf(path, pages=str(page))\n",
+                "        table_df = table_list[0].df\n",
+                "        table_df = (\n",
+                "            table_df.rename(columns=table_df.iloc[0])\n",
+                "            .drop(table_df.index[0])\n",
+                "            .reset_index(drop=True)\n",
+                "        )\n",
+                "        table_dfs.append(table_df)\n",
+                "    return table_dfs"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 6,
+            "id": "f6653cb1-d0f1-4408-94d3-31827e1a3115",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "table_dfs = get_tables(file_path, pages=[3, 25])"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 7,
+            "id": "24e90368-b982-4d05-91b3-0d9dae39eebc",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>No.</th>\n",
+                            "      <th>Name</th>\n",
+                            "      <th>Net worth\\n(USD)</th>\n",
+                            "      <th>Age</th>\n",
+                            "      <th>Nationality</th>\n",
+                            "      <th>Primary source(s) of wealth</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>1</td>\n",
+                            "      <td>Bernard Arnault &amp;\\nfamily</td>\n",
+                            "      <td>$211 billion</td>\n",
+                            "      <td>74</td>\n",
+                            "      <td>France</td>\n",
+                            "      <td>LVMH</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>2</td>\n",
+                            "      <td>Elon Musk</td>\n",
+                            "      <td>$180 billion</td>\n",
+                            "      <td>51</td>\n",
+                            "      <td>United\\nStates</td>\n",
+                            "      <td>Tesla, SpaceX, X Corp.</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>3</td>\n",
+                            "      <td>Jeff Bezos</td>\n",
+                            "      <td>$114 billion</td>\n",
+                            "      <td>59</td>\n",
+                            "      <td>United\\nStates</td>\n",
+                            "      <td>Amazon</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>4</td>\n",
+                            "      <td>Larry Ellison</td>\n",
+                            "      <td>$107 billion</td>\n",
+                            "      <td>78</td>\n",
+                            "      <td>United\\nStates</td>\n",
+                            "      <td>Oracle Corporation</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>5</td>\n",
+                            "      <td>Warren Buffett</td>\n",
+                            "      <td>$106 billion</td>\n",
+                            "      <td>92</td>\n",
+                            "      <td>United\\nStates</td>\n",
+                            "      <td>Berkshire Hathaway</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>5</th>\n",
+                            "      <td>6</td>\n",
+                            "      <td>Bill Gates</td>\n",
+                            "      <td>$104 billion</td>\n",
+                            "      <td>67</td>\n",
+                            "      <td>United\\nStates</td>\n",
+                            "      <td>Microsoft</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>6</th>\n",
+                            "      <td>7</td>\n",
+                            "      <td>Michael Bloomberg</td>\n",
+                            "      <td>$94.5 billion</td>\n",
+                            "      <td>81</td>\n",
+                            "      <td>United\\nStates</td>\n",
+                            "      <td>Bloomberg L.P.</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>7</th>\n",
+                            "      <td>8</td>\n",
+                            "      <td>Carlos Slim &amp; family</td>\n",
+                            "      <td>$93 billion</td>\n",
+                            "      <td>83</td>\n",
+                            "      <td>Mexico</td>\n",
+                            "      <td>Telmex, América Móvil, Grupo\\nCarso</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>8</th>\n",
+                            "      <td>9</td>\n",
+                            "      <td>Mukesh Ambani</td>\n",
+                            "      <td>$83.4 billion</td>\n",
+                            "      <td>65</td>\n",
+                            "      <td>India</td>\n",
+                            "      <td>Reliance Industries</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>9</th>\n",
+                            "      <td>10</td>\n",
+                            "      <td>Steve Ballmer</td>\n",
+                            "      <td>$80.7 billion</td>\n",
+                            "      <td>67</td>\n",
+                            "      <td>United\\nStates</td>\n",
+                            "      <td>Microsoft</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "  No.                       Name Net worth\\n(USD) Age     Nationality  \\\n",
+                            "0   1  Bernard Arnault &\\nfamily     $211 billion  74          France   \n",
+                            "1   2                  Elon Musk     $180 billion  51  United\\nStates   \n",
+                            "2   3                 Jeff Bezos     $114 billion  59  United\\nStates   \n",
+                            "3   4              Larry Ellison     $107 billion  78  United\\nStates   \n",
+                            "4   5             Warren Buffett     $106 billion  92  United\\nStates   \n",
+                            "5   6                 Bill Gates     $104 billion  67  United\\nStates   \n",
+                            "6   7          Michael Bloomberg    $94.5 billion  81  United\\nStates   \n",
+                            "7   8       Carlos Slim & family      $93 billion  83          Mexico   \n",
+                            "8   9              Mukesh Ambani    $83.4 billion  65           India   \n",
+                            "9  10              Steve Ballmer    $80.7 billion  67  United\\nStates   \n",
+                            "\n",
+                            "           Primary source(s) of wealth  \n",
+                            "0                                 LVMH  \n",
+                            "1               Tesla, SpaceX, X Corp.  \n",
+                            "2                               Amazon  \n",
+                            "3                   Oracle Corporation  \n",
+                            "4                   Berkshire Hathaway  \n",
+                            "5                            Microsoft  \n",
+                            "6                       Bloomberg L.P.  \n",
+                            "7  Telmex, América Móvil, Grupo\\nCarso  \n",
+                            "8                  Reliance Industries  \n",
+                            "9                            Microsoft  "
+                        ]
+                    },
+                    "execution_count": 7,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "# shows list of top billionaires in 2023\n",
+                "table_dfs[0]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 8,
+            "id": "0ba98e98-bfb6-4c54-8fa6-b0d7abf381c8",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Year</th>\n",
+                            "      <th>Number of billionaires</th>\n",
+                            "      <th>Group's combined net worth</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>2023[2]</td>\n",
+                            "      <td>2,640</td>\n",
+                            "      <td>$12.2 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>2022[6]</td>\n",
+                            "      <td>2,668</td>\n",
+                            "      <td>$12.7 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>2021[11]</td>\n",
+                            "      <td>2,755</td>\n",
+                            "      <td>$13.1 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>2020</td>\n",
+                            "      <td>2,095</td>\n",
+                            "      <td>$8.0 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>2019</td>\n",
+                            "      <td>2,153</td>\n",
+                            "      <td>$8.7 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>5</th>\n",
+                            "      <td>2018</td>\n",
+                            "      <td>2,208</td>\n",
+                            "      <td>$9.1 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>6</th>\n",
+                            "      <td>2017</td>\n",
+                            "      <td>2,043</td>\n",
+                            "      <td>$7.7 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>7</th>\n",
+                            "      <td>2016</td>\n",
+                            "      <td>1,810</td>\n",
+                            "      <td>$6.5 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>8</th>\n",
+                            "      <td>2015[18]</td>\n",
+                            "      <td>1,826</td>\n",
+                            "      <td>$7.1 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>9</th>\n",
+                            "      <td>2014[67]</td>\n",
+                            "      <td>1,645</td>\n",
+                            "      <td>$6.4 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>10</th>\n",
+                            "      <td>2013[68]</td>\n",
+                            "      <td>1,426</td>\n",
+                            "      <td>$5.4 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>11</th>\n",
+                            "      <td>2012</td>\n",
+                            "      <td>1,226</td>\n",
+                            "      <td>$4.6 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>12</th>\n",
+                            "      <td>2011</td>\n",
+                            "      <td>1,210</td>\n",
+                            "      <td>$4.5 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>13</th>\n",
+                            "      <td>2010</td>\n",
+                            "      <td>1,011</td>\n",
+                            "      <td>$3.6 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>14</th>\n",
+                            "      <td>2009</td>\n",
+                            "      <td>793</td>\n",
+                            "      <td>$2.4 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>15</th>\n",
+                            "      <td>2008</td>\n",
+                            "      <td>1,125</td>\n",
+                            "      <td>$4.4 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>16</th>\n",
+                            "      <td>2007</td>\n",
+                            "      <td>946</td>\n",
+                            "      <td>$3.5 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>17</th>\n",
+                            "      <td>2006</td>\n",
+                            "      <td>793</td>\n",
+                            "      <td>$2.6 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>18</th>\n",
+                            "      <td>2005</td>\n",
+                            "      <td>691</td>\n",
+                            "      <td>$2.2 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>19</th>\n",
+                            "      <td>2004</td>\n",
+                            "      <td>587</td>\n",
+                            "      <td>$1.9 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>20</th>\n",
+                            "      <td>2003</td>\n",
+                            "      <td>476</td>\n",
+                            "      <td>$1.4 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>21</th>\n",
+                            "      <td>2002</td>\n",
+                            "      <td>497</td>\n",
+                            "      <td>$1.5 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>22</th>\n",
+                            "      <td>2001</td>\n",
+                            "      <td>538</td>\n",
+                            "      <td>$1.8 trillion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>23</th>\n",
+                            "      <td>2000</td>\n",
+                            "      <td>470</td>\n",
+                            "      <td>$898 billion</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>24</th>\n",
+                            "      <td>Sources: Forbes.[18][67][66][68]</td>\n",
+                            "      <td></td>\n",
+                            "      <td></td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                Year Number of billionaires  \\\n",
+                            "0                            2023[2]                  2,640   \n",
+                            "1                            2022[6]                  2,668   \n",
+                            "2                           2021[11]                  2,755   \n",
+                            "3                               2020                  2,095   \n",
+                            "4                               2019                  2,153   \n",
+                            "5                               2018                  2,208   \n",
+                            "6                               2017                  2,043   \n",
+                            "7                               2016                  1,810   \n",
+                            "8                           2015[18]                  1,826   \n",
+                            "9                           2014[67]                  1,645   \n",
+                            "10                          2013[68]                  1,426   \n",
+                            "11                              2012                  1,226   \n",
+                            "12                              2011                  1,210   \n",
+                            "13                              2010                  1,011   \n",
+                            "14                              2009                    793   \n",
+                            "15                              2008                  1,125   \n",
+                            "16                              2007                    946   \n",
+                            "17                              2006                    793   \n",
+                            "18                              2005                    691   \n",
+                            "19                              2004                    587   \n",
+                            "20                              2003                    476   \n",
+                            "21                              2002                    497   \n",
+                            "22                              2001                    538   \n",
+                            "23                              2000                    470   \n",
+                            "24  Sources: Forbes.[18][67][66][68]                          \n",
+                            "\n",
+                            "   Group's combined net worth  \n",
+                            "0              $12.2 trillion  \n",
+                            "1              $12.7 trillion  \n",
+                            "2              $13.1 trillion  \n",
+                            "3               $8.0 trillion  \n",
+                            "4               $8.7 trillion  \n",
+                            "5               $9.1 trillion  \n",
+                            "6               $7.7 trillion  \n",
+                            "7               $6.5 trillion  \n",
+                            "8               $7.1 trillion  \n",
+                            "9               $6.4 trillion  \n",
+                            "10              $5.4 trillion  \n",
+                            "11              $4.6 trillion  \n",
+                            "12              $4.5 trillion  \n",
+                            "13              $3.6 trillion  \n",
+                            "14              $2.4 trillion  \n",
+                            "15              $4.4 trillion  \n",
+                            "16              $3.5 trillion  \n",
+                            "17              $2.6 trillion  \n",
+                            "18              $2.2 trillion  \n",
+                            "19              $1.9 trillion  \n",
+                            "20              $1.4 trillion  \n",
+                            "21              $1.5 trillion  \n",
+                            "22              $1.8 trillion  \n",
+                            "23               $898 billion  \n",
+                            "24                             "
+                        ]
+                    },
+                    "execution_count": 8,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "# shows list of top billionaires\n",
+                "table_dfs[1]"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "02967be4-be85-4046-b4e2-e5dd8e65628a",
+            "metadata": {},
+            "source": [
+                "## Create Pandas Query Engines\n",
+                "\n",
+                "We create a pandas query engine over each structured table.\n",
+                "\n",
+                "These can be executed on their own to answer queries about each table."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 9,
+            "id": "2f38bbf0-502a-44f7-b33b-443fcd90583a",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# define query engines over these tables\n",
+                "df_query_engines = [PandasQueryEngine(table_df) for table_df in table_dfs]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 10,
+            "id": "9fbc43a0-5655-4c8a-80d4-71c7fe9d7275",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "df.iloc[1]['Net worth\\n(USD)']\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "Response(response='$180\\xa0billion', source_nodes=[], metadata={'pandas_instruction_str': \"\\ndf.iloc[1]['Net worth\\\\n(USD)']\"})"
+                        ]
+                    },
+                    "execution_count": 10,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "df_query_engines[0].query(\n",
+                "    \"What's the net worth of the second richest billionaire in 2023?\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 41,
+            "id": "78dbe39d-eea0-4c7f-bea7-2c0f6f6591cd",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "df[df['Year'] == '2009']['Number of billionaires'].iloc[0]\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "Response(response='793', source_nodes=[], metadata={'pandas_instruction_str': \"\\ndf[df['Year'] == '2009']['Number of billionaires'].iloc[0]\"})"
+                        ]
+                    },
+                    "execution_count": 41,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "df_query_engines[1].query(\"How many billionaires were there in 2009?\")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "e9bca53c-8766-42e4-96c7-145e7f14be34",
+            "metadata": {},
+            "source": [
+                "## Build Vector Index\n",
+                "\n",
+                "Build vector index over the chunked document as well as over the additional `IndexNode` objects linked to the tables."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 42,
+            "id": "fffd5616-4256-46da-a765-10476a1ee1ee",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "llm = OpenAI(temperature=0, model=\"gpt-4\")\n",
+                "\n",
+                "service_context = ServiceContext.from_defaults(\n",
+                "    llm=llm,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 43,
+            "id": "6a8b6605-823a-4607-be0e-99c67d5a90ff",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "doc_nodes = service_context.node_parser.get_nodes_from_documents(docs)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 61,
+            "id": "6a3d249c-2242-4158-88ea-d16b67815107",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# define index nodes\n",
+                "summaries = [\n",
+                "    \"This node provides information about the world's richest billionaires in 2023\",\n",
+                "    \"This node provides information on the number of billionaires and their combined net worth from 2000 to 2023.\",\n",
+                "]\n",
+                "\n",
+                "df_nodes = [\n",
+                "    IndexNode(text=summary, index_id=f\"pandas{idx}\")\n",
+                "    for idx, summary in enumerate(summaries)\n",
+                "]\n",
+                "\n",
+                "df_id_query_engine_mapping = {\n",
+                "    f\"pandas{idx}\": df_query_engine\n",
+                "    for idx, df_query_engine in enumerate(df_query_engines)\n",
+                "}"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 62,
+            "id": "abf87458-cd3d-4ac4-b934-20ee8a9a820a",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# construct top-level vector index + query engine\n",
+                "vector_index = VectorStoreIndex(doc_nodes + df_nodes)\n",
+                "vector_retriever = vector_index.as_retriever(similarity_top_k=1)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "6ed917f6-e407-4ebb-9c15-36aedd207c6f",
+            "metadata": {},
+            "source": [
+                "## Use `RecursiveRetriever` in our `RetrieverQueryEngine`\n",
+                "\n",
+                "We define a `RecursiveRetriever` object to recursively retrieve/query nodes. We then put this in our `RetrieverQueryEngine` along with a `ResponseSynthesizer` to synthesize a response.\n",
+                "\n",
+                "We pass in mappings from id to retriever and id to query engine. We then pass in a root id representing the retriever we query first."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 63,
+            "id": "9a7d6031-f7a0-45c2-9a84-813b4e3fcf28",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# baseline vector index (that doesn't include the extra df nodes).\n",
+                "# used to benchmark\n",
+                "vector_index0 = VectorStoreIndex(doc_nodes)\n",
+                "vector_query_engine0 = vector_index0.as_query_engine()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 64,
+            "id": "e38d1e90-7d83-46bb-99d7-0892aef4d3ca",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.retrievers import RecursiveRetriever\n",
+                "from llama_index.query_engine import RetrieverQueryEngine\n",
+                "from llama_index.response_synthesizers import get_response_synthesizer\n",
+                "\n",
+                "recursive_retriever = RecursiveRetriever(\n",
+                "    \"vector\",\n",
+                "    retriever_dict={\"vector\": vector_retriever},\n",
+                "    query_engine_dict=df_id_query_engine_mapping,\n",
+                "    verbose=True,\n",
+                ")\n",
+                "\n",
+                "response_synthesizer = get_response_synthesizer(\n",
+                "    # service_context=service_context,\n",
+                "    response_mode=\"compact\"\n",
+                ")\n",
+                "\n",
+                "query_engine = RetrieverQueryEngine.from_args(\n",
+                "    recursive_retriever, response_synthesizer=response_synthesizer\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 65,
+            "id": "6af6823d-bd07-4088-8422-bd9aa3224b08",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[36;1m\u001b[1;3mRetrieving with query id None: What's the net worth of the second richest billionaire in 2023?\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieved node with id, entering: pandas0\n",
+                        "\u001b[0m\u001b[36;1m\u001b[1;3mRetrieving with query id pandas0: What's the net worth of the second richest billionaire in 2023?\n",
+                        "\u001b[0mdf.iloc[1]['Net worth\\n(USD)']\n",
+                        "\u001b[32;1m\u001b[1;3mGot response: $180 billion\n",
+                        "\u001b[0m"
+                    ]
+                }
+            ],
+            "source": [
+                "response = query_engine.query(\n",
+                "    \"What's the net worth of the second richest billionaire in 2023?\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 66,
+            "id": "6122ff8e-f84d-47cc-b363-44621e0623ab",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "\"Query: What's the net worth of the second richest billionaire in 2023?\\nResponse: $180\\xa0billion\""
+                        ]
+                    },
+                    "execution_count": 66,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "response.source_nodes[0].node.get_content()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 67,
+            "id": "4096f30c-c6a3-4f74-b496-1240fdc08fd4",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "'\\n$180 billion'"
+                        ]
+                    },
+                    "execution_count": 67,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "str(response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 68,
+            "id": "5cf133a7-2532-4179-af21-d495eb547083",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[36;1m\u001b[1;3mRetrieving with query id None: How many billionaires were there in 2009?\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieved node with id, entering: pandas1\n",
+                        "\u001b[0m\u001b[36;1m\u001b[1;3mRetrieving with query id pandas1: How many billionaires were there in 2009?\n",
+                        "\u001b[0mdf[df['Year'] == '2009']['Number of billionaires'].iloc[0]\n",
+                        "\u001b[32;1m\u001b[1;3mGot response: 793\n",
+                        "\u001b[0m"
+                    ]
+                }
+            ],
+            "source": [
+                "response = query_engine.query(\"How many billionaires were there in 2009?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 69,
+            "id": "489e4c59-a40c-47b8-a788-e80558ce7e3a",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "'\\n793'"
+                        ]
+                    },
+                    "execution_count": 69,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "str(response)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 35,
+            "id": "551781f3-8761-4385-966d-a0a6b0526ed6",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "response = vector_query_engine0.query(\n",
+                "    \"What's the net worth of the second richest billionaire in 2023?\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 36,
+            "id": "3656db61-32bc-49c6-9774-8439323358a5",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "7/1/23, 11:31 PM\n",
+                        "The World's Billionaires - Wikipedia\n",
+                        "https://en.wikipedia.org/wiki/The_World%27s_Billionaires\n",
+                        "3/33\n",
+                        "No.\n",
+                        "Name\n",
+                        "Net worth\n",
+                        "(USD)\n",
+                        "Age\n",
+                        "Nationality\n",
+                        "Primary source(s) of wealth\n",
+                        "1 \n",
+                        "Bernard Arnault &\n",
+                        "family\n",
+                        "$211 billion \n",
+                        "74\n",
+                        " France\n",
+                        "LVMH\n",
+                        "2 \n",
+                        "Elon Musk\n",
+                        "$180 billion \n",
+                        "51\n",
+                        " United\n",
+                        "States\n",
+                        "Tesla, SpaceX, X Corp.\n",
+                        "3 \n",
+                        "Jeff Bezos\n",
+                        "$114 billion \n",
+                        "59\n",
+                        " United\n",
+                        "States\n",
+                        "Amazon\n",
+                        "4 \n",
+                        "Larry Ellison\n",
+                        "$107 billion \n",
+                        "78\n",
+                        " United\n",
+                        "States\n",
+                        "Oracle Corporation\n",
+                        "5 \n",
+                        "Warren Buffett\n",
+                        "$106 billion \n",
+                        "92\n",
+                        " United\n",
+                        "States\n",
+                        "Berkshire Hathaway\n",
+                        "6 \n",
+                        "Bill Gates\n",
+                        "$104 billion \n",
+                        "67\n",
+                        " United\n",
+                        "States\n",
+                        "Microsoft\n",
+                        "7 \n",
+                        "Michael Bloomberg\n",
+                        "$94.5 billion \n",
+                        "81\n",
+                        " United\n",
+                        "States\n",
+                        "Bloomberg L.P.\n",
+                        "8 \n",
+                        "Carlos Slim & family\n",
+                        "$93 billion \n",
+                        "83\n",
+                        " Mexico\n",
+                        "Telmex, América Móvil, Grupo\n",
+                        "Carso\n",
+                        "9 \n",
+                        "Mukesh Ambani\n",
+                        "$83.4 billion \n",
+                        "65\n",
+                        " India\n",
+                        "Reliance Industries\n",
+                        "10 \n",
+                        "Steve Ballmer\n",
+                        "$80.7 billion \n",
+                        "67\n",
+                        " United\n",
+                        "States\n",
+                        "Microsoft\n",
+                        "In the 36th annual Forbes list of the world's billionaires, the list included 2,668 billionaires with a\n",
+                        "total net wealth of $12.7 trillion, down 97 members from 2021.[6]\n",
+                        "2022\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response.source_nodes[1].node.get_content())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 37,
+            "id": "89b47692-95f0-4077-9439-d8af237e5c16",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "The net worth of the second richest billionaire in 2023 is $211 billion.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(str(response))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 38,
+            "id": "1fbe211d-d59b-4f87-897c-0fa03e42641e",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "\"7/1/23, 11:31 PM\\nThe World's Billionaires - Wikipedia\\nhttps://en.wikipedia.org/wiki/The_World%27s_Billionaires\\n3/33\\nNo.\\nName\\nNet worth\\n(USD)\\nAge\\nNationality\\nPrimary source(s) of wealth\\n1 \\nBernard Arnault &\\nfamily\\n$211\\xa0billion\\xa0\\n74\\n\\xa0France\\nLVMH\\n2 \\nElon Musk\\n$180\\xa0billion\\xa0\\n51\\n\\xa0United\\nStates\\nTesla, SpaceX, X Corp.\\n3 \\nJeff Bezos\\n$114\\xa0billion\\xa0\\n59\\n\\xa0United\\nStates\\nAmazon\\n4 \\nLarry Ellison\\n$107\\xa0billion\\xa0\\n78\\n\\xa0United\\nStates\\nOracle Corporation\\n5 \\nWarren Buffett\\n$106\\xa0billion\\xa0\\n92\\n\\xa0United\\nStates\\nBerkshire Hathaway\\n6 \\nBill Gates\\n$104\\xa0billion\\xa0\\n67\\n\\xa0United\\nStates\\nMicrosoft\\n7 \\nMichael Bloomberg\\n$94.5\\xa0billion\\xa0\\n81\\n\\xa0United\\nStates\\nBloomberg L.P.\\n8 \\nCarlos Slim & family\\n$93\\xa0billion\\xa0\\n83\\n\\xa0Mexico\\nTelmex, América Móvil, Grupo\\nCarso\\n9 \\nMukesh Ambani\\n$83.4\\xa0billion \\n65\\n\\xa0India\\nReliance Industries\\n10 \\nSteve Ballmer\\n$80.7\\xa0billion\\xa0\\n67\\n\\xa0United\\nStates\\nMicrosoft\\nIn the 36th annual Forbes list of the world's billionaires, the list included 2,668 billionaires with a\\ntotal net wealth of $12.7 trillion, down 97 members from 2021.[6]\\n2022\""
+                        ]
+                    },
+                    "execution_count": 38,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "response.source_nodes[1].node.get_content()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "960c81f6-b7a6-43ac-aa4b-9ef20a5400b1",
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "llama_index_v2",
+            "language": "python",
+            "name": "llama_index_v2"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.10"
+        }
     },
-    {
-     "data": {
-      "text/plain": [
-       "Response(response='$180\\xa0billion', source_nodes=[], metadata={'pandas_instruction_str': \"\\ndf.iloc[1]['Net worth\\\\n(USD)']\"})"
-      ]
-     },
-     "execution_count": 10,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "df_query_engines[0].query(\n",
-    "    \"What's the net worth of the second richest billionaire in 2023?\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 41,
-   "id": "78dbe39d-eea0-4c7f-bea7-2c0f6f6591cd",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "df[df['Year'] == '2009']['Number of billionaires'].iloc[0]\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "Response(response='793', source_nodes=[], metadata={'pandas_instruction_str': \"\\ndf[df['Year'] == '2009']['Number of billionaires'].iloc[0]\"})"
-      ]
-     },
-     "execution_count": 41,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "df_query_engines[1].query(\"How many billionaires were there in 2009?\")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "e9bca53c-8766-42e4-96c7-145e7f14be34",
-   "metadata": {},
-   "source": [
-    "## Build Vector Index\n",
-    "\n",
-    "Build vector index over the chunked document as well as over the additional `IndexNode` objects linked to the tables."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 42,
-   "id": "fffd5616-4256-46da-a765-10476a1ee1ee",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "llm = OpenAI(temperature=0, model=\"gpt-4\")\n",
-    "\n",
-    "service_context = ServiceContext.from_defaults(\n",
-    "    llm=llm,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 43,
-   "id": "6a8b6605-823a-4607-be0e-99c67d5a90ff",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "doc_nodes = service_context.node_parser.get_nodes_from_documents(docs)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 61,
-   "id": "6a3d249c-2242-4158-88ea-d16b67815107",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# define index nodes\n",
-    "summaries = [\n",
-    "    \"This node provides information about the world's richest billionaires in 2023\",\n",
-    "    \"This node provides information on the number of billionaires and their combined net worth from 2000 to 2023.\",\n",
-    "]\n",
-    "\n",
-    "df_nodes = [\n",
-    "    IndexNode(text=summary, index_id=f\"pandas{idx}\")\n",
-    "    for idx, summary in enumerate(summaries)\n",
-    "]\n",
-    "\n",
-    "df_id_query_engine_mapping = {\n",
-    "    f\"pandas{idx}\": df_query_engine\n",
-    "    for idx, df_query_engine in enumerate(df_query_engines)\n",
-    "}"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 62,
-   "id": "abf87458-cd3d-4ac4-b934-20ee8a9a820a",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# construct top-level vector index + query engine\n",
-    "vector_index = VectorStoreIndex(doc_nodes + df_nodes)\n",
-    "vector_retriever = vector_index.as_retriever(similarity_top_k=1)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "6ed917f6-e407-4ebb-9c15-36aedd207c6f",
-   "metadata": {},
-   "source": [
-    "## Use `RecursiveRetriever` in our `RetrieverQueryEngine`\n",
-    "\n",
-    "We define a `RecursiveRetriever` object to recursively retrieve/query nodes. We then put this in our `RetrieverQueryEngine` along with a `ResponseSynthesizer` to synthesize a response.\n",
-    "\n",
-    "We pass in mappings from id to retriever and id to query engine. We then pass in a root id representing the retriever we query first."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 63,
-   "id": "9a7d6031-f7a0-45c2-9a84-813b4e3fcf28",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# baseline vector index (that doesn't include the extra df nodes).\n",
-    "# used to benchmark\n",
-    "vector_index0 = VectorStoreIndex(doc_nodes)\n",
-    "vector_query_engine0 = vector_index0.as_query_engine()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 64,
-   "id": "e38d1e90-7d83-46bb-99d7-0892aef4d3ca",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.retrievers import RecursiveRetriever\n",
-    "from llama_index.query_engine import RetrieverQueryEngine\n",
-    "from llama_index.response_synthesizers import get_response_synthesizer\n",
-    "\n",
-    "recursive_retriever = RecursiveRetriever(\n",
-    "    \"vector\",\n",
-    "    retriever_dict={\"vector\": vector_retriever},\n",
-    "    query_engine_dict=df_id_query_engine_mapping,\n",
-    "    verbose=True,\n",
-    ")\n",
-    "\n",
-    "response_synthesizer = get_response_synthesizer(\n",
-    "    # service_context=service_context,\n",
-    "    response_mode=\"compact\"\n",
-    ")\n",
-    "\n",
-    "query_engine = RetrieverQueryEngine.from_args(\n",
-    "    recursive_retriever, response_synthesizer=response_synthesizer\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 65,
-   "id": "6af6823d-bd07-4088-8422-bd9aa3224b08",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\u001b[36;1m\u001b[1;3mRetrieving with query id None: What's the net worth of the second richest billionaire in 2023?\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieved node with id, entering: pandas0\n",
-      "\u001b[0m\u001b[36;1m\u001b[1;3mRetrieving with query id pandas0: What's the net worth of the second richest billionaire in 2023?\n",
-      "\u001b[0mdf.iloc[1]['Net worth\\n(USD)']\n",
-      "\u001b[32;1m\u001b[1;3mGot response: $180 billion\n",
-      "\u001b[0m"
-     ]
-    }
-   ],
-   "source": [
-    "response = query_engine.query(\n",
-    "    \"What's the net worth of the second richest billionaire in 2023?\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 66,
-   "id": "6122ff8e-f84d-47cc-b363-44621e0623ab",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "\"Query: What's the net worth of the second richest billionaire in 2023?\\nResponse: $180\\xa0billion\""
-      ]
-     },
-     "execution_count": 66,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "response.source_nodes[0].node.get_content()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 67,
-   "id": "4096f30c-c6a3-4f74-b496-1240fdc08fd4",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "'\\n$180 billion'"
-      ]
-     },
-     "execution_count": 67,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "str(response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 68,
-   "id": "5cf133a7-2532-4179-af21-d495eb547083",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\u001b[36;1m\u001b[1;3mRetrieving with query id None: How many billionaires were there in 2009?\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieved node with id, entering: pandas1\n",
-      "\u001b[0m\u001b[36;1m\u001b[1;3mRetrieving with query id pandas1: How many billionaires were there in 2009?\n",
-      "\u001b[0mdf[df['Year'] == '2009']['Number of billionaires'].iloc[0]\n",
-      "\u001b[32;1m\u001b[1;3mGot response: 793\n",
-      "\u001b[0m"
-     ]
-    }
-   ],
-   "source": [
-    "response = query_engine.query(\"How many billionaires were there in 2009?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 69,
-   "id": "489e4c59-a40c-47b8-a788-e80558ce7e3a",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "'\\n793'"
-      ]
-     },
-     "execution_count": 69,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "str(response)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "id": "551781f3-8761-4385-966d-a0a6b0526ed6",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "response = vector_query_engine0.query(\n",
-    "    \"What's the net worth of the second richest billionaire in 2023?\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "id": "3656db61-32bc-49c6-9774-8439323358a5",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "7/1/23, 11:31 PM\n",
-      "The World's Billionaires - Wikipedia\n",
-      "https://en.wikipedia.org/wiki/The_World%27s_Billionaires\n",
-      "3/33\n",
-      "No.\n",
-      "Name\n",
-      "Net worth\n",
-      "(USD)\n",
-      "Age\n",
-      "Nationality\n",
-      "Primary source(s) of wealth\n",
-      "1 \n",
-      "Bernard Arnault &\n",
-      "family\n",
-      "$211 billion \n",
-      "74\n",
-      " France\n",
-      "LVMH\n",
-      "2 \n",
-      "Elon Musk\n",
-      "$180 billion \n",
-      "51\n",
-      " United\n",
-      "States\n",
-      "Tesla, SpaceX, X Corp.\n",
-      "3 \n",
-      "Jeff Bezos\n",
-      "$114 billion \n",
-      "59\n",
-      " United\n",
-      "States\n",
-      "Amazon\n",
-      "4 \n",
-      "Larry Ellison\n",
-      "$107 billion \n",
-      "78\n",
-      " United\n",
-      "States\n",
-      "Oracle Corporation\n",
-      "5 \n",
-      "Warren Buffett\n",
-      "$106 billion \n",
-      "92\n",
-      " United\n",
-      "States\n",
-      "Berkshire Hathaway\n",
-      "6 \n",
-      "Bill Gates\n",
-      "$104 billion \n",
-      "67\n",
-      " United\n",
-      "States\n",
-      "Microsoft\n",
-      "7 \n",
-      "Michael Bloomberg\n",
-      "$94.5 billion \n",
-      "81\n",
-      " United\n",
-      "States\n",
-      "Bloomberg L.P.\n",
-      "8 \n",
-      "Carlos Slim & family\n",
-      "$93 billion \n",
-      "83\n",
-      " Mexico\n",
-      "Telmex, América Móvil, Grupo\n",
-      "Carso\n",
-      "9 \n",
-      "Mukesh Ambani\n",
-      "$83.4 billion \n",
-      "65\n",
-      " India\n",
-      "Reliance Industries\n",
-      "10 \n",
-      "Steve Ballmer\n",
-      "$80.7 billion \n",
-      "67\n",
-      " United\n",
-      "States\n",
-      "Microsoft\n",
-      "In the 36th annual Forbes list of the world's billionaires, the list included 2,668 billionaires with a\n",
-      "total net wealth of $12.7 trillion, down 97 members from 2021.[6]\n",
-      "2022\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response.source_nodes[1].node.get_content())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "id": "89b47692-95f0-4077-9439-d8af237e5c16",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "The net worth of the second richest billionaire in 2023 is $211 billion.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(str(response))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 38,
-   "id": "1fbe211d-d59b-4f87-897c-0fa03e42641e",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "\"7/1/23, 11:31 PM\\nThe World's Billionaires - Wikipedia\\nhttps://en.wikipedia.org/wiki/The_World%27s_Billionaires\\n3/33\\nNo.\\nName\\nNet worth\\n(USD)\\nAge\\nNationality\\nPrimary source(s) of wealth\\n1 \\nBernard Arnault &\\nfamily\\n$211\\xa0billion\\xa0\\n74\\n\\xa0France\\nLVMH\\n2 \\nElon Musk\\n$180\\xa0billion\\xa0\\n51\\n\\xa0United\\nStates\\nTesla, SpaceX, X Corp.\\n3 \\nJeff Bezos\\n$114\\xa0billion\\xa0\\n59\\n\\xa0United\\nStates\\nAmazon\\n4 \\nLarry Ellison\\n$107\\xa0billion\\xa0\\n78\\n\\xa0United\\nStates\\nOracle Corporation\\n5 \\nWarren Buffett\\n$106\\xa0billion\\xa0\\n92\\n\\xa0United\\nStates\\nBerkshire Hathaway\\n6 \\nBill Gates\\n$104\\xa0billion\\xa0\\n67\\n\\xa0United\\nStates\\nMicrosoft\\n7 \\nMichael Bloomberg\\n$94.5\\xa0billion\\xa0\\n81\\n\\xa0United\\nStates\\nBloomberg L.P.\\n8 \\nCarlos Slim & family\\n$93\\xa0billion\\xa0\\n83\\n\\xa0Mexico\\nTelmex, América Móvil, Grupo\\nCarso\\n9 \\nMukesh Ambani\\n$83.4\\xa0billion \\n65\\n\\xa0India\\nReliance Industries\\n10 \\nSteve Ballmer\\n$80.7\\xa0billion\\xa0\\n67\\n\\xa0United\\nStates\\nMicrosoft\\nIn the 36th annual Forbes list of the world's billionaires, the list included 2,668 billionaires with a\\ntotal net wealth of $12.7 trillion, down 97 members from 2021.[6]\\n2022\""
-      ]
-     },
-     "execution_count": 38,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "response.source_nodes[1].node.get_content()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "960c81f6-b7a6-43ac-aa4b-9ef20a5400b1",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "llama_index_v2",
-   "language": "python",
-   "name": "llama_index_v2"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.10"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/docs/examples/query_engine/recursive_retriever_agents.ipynb b/docs/examples/query_engine/recursive_retriever_agents.ipynb
index 7e1bb70c219879df5c94bee8c8afa7e4586917c8..ebb4953ce34de4f7428e9032c68e1705c34579c1 100644
--- a/docs/examples/query_engine/recursive_retriever_agents.ipynb
+++ b/docs/examples/query_engine/recursive_retriever_agents.ipynb
@@ -37,7 +37,7 @@
    "source": [
     "from llama_index import (\n",
     "    VectorStoreIndex,\n",
-    "    ListIndex,\n",
+    "    SummaryIndex,\n",
     "    SimpleKeywordTableIndex,\n",
     "    SimpleDirectoryReader,\n",
     "    ServiceContext,\n",
@@ -169,7 +169,7 @@
     "        city_docs[wiki_title], service_context=service_context\n",
     "    )\n",
     "    # build list index\n",
-    "    list_index = ListIndex.from_documents(\n",
+    "    list_index = SummaryIndex.from_documents(\n",
     "        city_docs[wiki_title], service_context=service_context\n",
     "    )\n",
     "    # define query engines\n",
diff --git a/docs/examples/retrievers/auto_vs_recursive_retriever.ipynb b/docs/examples/retrievers/auto_vs_recursive_retriever.ipynb
index 5502bee6c230b0a062d4ceb61ca48b316cc6e243..d8a718e392c88684fcf3ee2c64973bca151c5584 100644
--- a/docs/examples/retrievers/auto_vs_recursive_retriever.ipynb
+++ b/docs/examples/retrievers/auto_vs_recursive_retriever.ipynb
@@ -1,1306 +1,1306 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "5fa01d8c-261f-4da2-8698-5eae785e2f81",
-   "metadata": {},
-   "source": [
-    "# Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)\n",
-    "\n",
-    "In a naive RAG system, the set of input documents are then chunked, embedded, and dumped to a vector database collection. Retrieval would just fetch the top-k documents by embedding similarity.\n",
-    "\n",
-    "This can fail if the set of documents is large - it can be hard to disambiguate raw chunks, and you're not guaranteed to filter for the set of documents that contain relevant context.\n",
-    "\n",
-    "In this guide we explore **structured retrieval** - more advanced query algorithms that take advantage of structure within your documents for higher-precision retrieval. We compare the following two methods:\n",
-    "\n",
-    "- **Metadata Filters + Auto-Retrieval**: Tag each document with the right set of metadata. During query-time, use auto-retrieval to infer metadata filters along with passing through the query string for semantic search.\n",
-    "- **Store Document Hierarchies (summaries -> raw chunks) + Recursive Retrieval**: Embed document summaries and map that to the set of raw chunks for each document. During query-time, do recursive retrieval to first fetch summaries before fetching documents."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 77,
-   "id": "15c61ad0-25e3-4a07-a5af-d604f36b84aa",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "d38e8cbf-53da-4fed-8770-860e9e83d329",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "from llama_index import SimpleDirectoryReader, ListIndex, ServiceContext\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 58,
-   "id": "9107ed7c-9727-40c9-ae43-862a8c67b7d0",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "wiki_titles = [\"Michael Jordan\", \"Elon Musk\", \"Richard Branson\", \"Rihanna\"]\n",
-    "wiki_metadatas = {\n",
-    "    \"Michael Jordan\": {\n",
-    "        \"category\": \"Sports\",\n",
-    "        \"country\": \"United States\",\n",
-    "    },\n",
-    "    \"Elon Musk\": {\n",
-    "        \"category\": \"Business\",\n",
-    "        \"country\": \"United States\",\n",
-    "    },\n",
-    "    \"Richard Branson\": {\n",
-    "        \"category\": \"Business\",\n",
-    "        \"country\": \"UK\",\n",
-    "    },\n",
-    "    \"Rihanna\": {\n",
-    "        \"category\": \"Music\",\n",
-    "        \"country\": \"Barbados\",\n",
-    "    },\n",
-    "}"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 59,
-   "id": "ec14903c-9e13-4a41-a8e0-f8bf9e0b5c89",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from pathlib import Path\n",
-    "\n",
-    "import requests\n",
-    "\n",
-    "for title in wiki_titles:\n",
-    "    response = requests.get(\n",
-    "        \"https://en.wikipedia.org/w/api.php\",\n",
-    "        params={\n",
-    "            \"action\": \"query\",\n",
-    "            \"format\": \"json\",\n",
-    "            \"titles\": title,\n",
-    "            \"prop\": \"extracts\",\n",
-    "            # 'exintro': True,\n",
-    "            \"explaintext\": True,\n",
-    "        },\n",
-    "    ).json()\n",
-    "    page = next(iter(response[\"query\"][\"pages\"].values()))\n",
-    "    wiki_text = page[\"extract\"]\n",
-    "\n",
-    "    data_path = Path(\"data\")\n",
-    "    if not data_path.exists():\n",
-    "        Path.mkdir(data_path)\n",
-    "\n",
-    "    with open(data_path / f\"{title}.txt\", \"w\") as fp:\n",
-    "        fp.write(wiki_text)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 60,
-   "id": "cee3a660-dfff-4865-8fe8-75862a2b4c78",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# Load all wiki documents\n",
-    "docs_dict = {}\n",
-    "for wiki_title in wiki_titles:\n",
-    "    doc = SimpleDirectoryReader(input_files=[f\"data/{wiki_title}.txt\"]).load_data()[0]\n",
-    "\n",
-    "    doc.metadata.update(wiki_metadatas[wiki_title])\n",
-    "    docs_dict[wiki_title] = doc"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 61,
-   "id": "438b87ce-729d-4fad-9464-b9fa30e069b0",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.llms import OpenAI\n",
-    "from llama_index.callbacks import LlamaDebugHandler, CallbackManager\n",
-    "\n",
-    "\n",
-    "llm = OpenAI(\"gpt-4\")\n",
-    "callback_manager = CallbackManager([LlamaDebugHandler()])\n",
-    "service_context = ServiceContext.from_defaults(\n",
-    "    llm=llm, callback_manager=callback_manager, chunk_size=256\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "c07f3302-f0c1-485e-990e-ac6b1dda2577",
-   "metadata": {},
-   "source": [
-    "## Metadata Filters + Auto-Retrieval\n",
-    "\n",
-    "In this approach, we tag each Document with metadata (category, country), and store in a Weaviate vector db.\n",
-    "\n",
-    "During retrieval-time, we then perform \"auto-retrieval\" to infer the relevant set of metadata filters."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 105,
-   "id": "c3a35007-9bd0-42cc-be52-ba0316f80635",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/sessions.py:806: ResourceWarning: unclosed <ssl.SSLSocket fd=77, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=0, laddr=('192.168.1.78', 63780), raddr=('34.111.207.94', 443)>\n",
-      "  self.adapters[prefix] = adapter\n",
-      "ResourceWarning: Enable tracemalloc to get the object allocation traceback\n"
-     ]
-    }
-   ],
-   "source": [
-    "## Setup Weaviate\n",
-    "import weaviate\n",
-    "\n",
-    "# cloud\n",
-    "resource_owner_config = weaviate.AuthClientPassword(\n",
-    "    username=\"username\",\n",
-    "    password=\"password\",\n",
-    ")\n",
-    "client = weaviate.Client(\n",
-    "    \"https://llamaindex-test-ul4sgpxc.weaviate.network\",\n",
-    "    auth_client_secret=resource_owner_config,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 106,
-   "id": "7051c353-ba65-4a47-bf9a-f5b97d7212ce",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index import VectorStoreIndex, SimpleDirectoryReader\n",
-    "from llama_index.vector_stores import WeaviateVectorStore\n",
-    "from IPython.display import Markdown, display"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 107,
-   "id": "0c2dcf5d-f539-4e83-8017-0a79ef398132",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# drop items from collection first\n",
-    "client.schema.delete_class(\"LlamaIndex\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 108,
-   "id": "f2f0e889-1f16-4660-863c-dca8ef63fbe0",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.storage.storage_context import StorageContext\n",
-    "\n",
-    "# If you want to load the index later, be sure to give it a name!\n",
-    "vector_store = WeaviateVectorStore(weaviate_client=client, index_name=\"LlamaIndex\")\n",
-    "storage_context = StorageContext.from_defaults(vector_store=vector_store)\n",
-    "\n",
-    "# NOTE: you may also choose to define a index_name manually.\n",
-    "# index_name = \"test_prefix\"\n",
-    "# vector_store = WeaviateVectorStore(weaviate_client=client, index_name=index_name)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 109,
-   "id": "d01b32c2-5625-4b6f-87eb-198b5f3f37da",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "{'class': 'LlamaIndex',\n",
-       " 'description': 'Class for LlamaIndex',\n",
-       " 'invertedIndexConfig': {'bm25': {'b': 0.75, 'k1': 1.2},\n",
-       "  'cleanupIntervalSeconds': 60,\n",
-       "  'stopwords': {'additions': None, 'preset': 'en', 'removals': None}},\n",
-       " 'multiTenancyConfig': {'enabled': False},\n",
-       " 'properties': [{'dataType': ['text'],\n",
-       "   'description': 'Text property',\n",
-       "   'indexFilterable': True,\n",
-       "   'indexSearchable': True,\n",
-       "   'name': 'text',\n",
-       "   'tokenization': 'whitespace'},\n",
-       "  {'dataType': ['text'],\n",
-       "   'description': 'The ref_doc_id of the Node',\n",
-       "   'indexFilterable': True,\n",
-       "   'indexSearchable': True,\n",
-       "   'name': 'ref_doc_id',\n",
-       "   'tokenization': 'whitespace'},\n",
-       "  {'dataType': ['text'],\n",
-       "   'description': 'node_info (in JSON)',\n",
-       "   'indexFilterable': True,\n",
-       "   'indexSearchable': True,\n",
-       "   'name': 'node_info',\n",
-       "   'tokenization': 'whitespace'},\n",
-       "  {'dataType': ['text'],\n",
-       "   'description': 'The relationships of the node (in JSON)',\n",
-       "   'indexFilterable': True,\n",
-       "   'indexSearchable': True,\n",
-       "   'name': 'relationships',\n",
-       "   'tokenization': 'whitespace'}],\n",
-       " 'replicationConfig': {'factor': 1},\n",
-       " 'shardingConfig': {'virtualPerPhysical': 128,\n",
-       "  'desiredCount': 1,\n",
-       "  'actualCount': 1,\n",
-       "  'desiredVirtualCount': 128,\n",
-       "  'actualVirtualCount': 128,\n",
-       "  'key': '_id',\n",
-       "  'strategy': 'hash',\n",
-       "  'function': 'murmur3'},\n",
-       " 'vectorIndexConfig': {'skip': False,\n",
-       "  'cleanupIntervalSeconds': 300,\n",
-       "  'maxConnections': 64,\n",
-       "  'efConstruction': 128,\n",
-       "  'ef': -1,\n",
-       "  'dynamicEfMin': 100,\n",
-       "  'dynamicEfMax': 500,\n",
-       "  'dynamicEfFactor': 8,\n",
-       "  'vectorCacheMaxObjects': 1000000000000,\n",
-       "  'flatSearchCutoff': 40000,\n",
-       "  'distance': 'cosine',\n",
-       "  'pq': {'enabled': False,\n",
-       "   'bitCompression': False,\n",
-       "   'segments': 0,\n",
-       "   'centroids': 256,\n",
-       "   'trainingLimit': 100000,\n",
-       "   'encoder': {'type': 'kmeans', 'distribution': 'log-normal'}}},\n",
-       " 'vectorIndexType': 'hnsw',\n",
-       " 'vectorizer': 'none'}"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "# validate that the schema was created\n",
-    "class_schema = client.schema.get(\"LlamaIndex\")\n",
-    "display(class_schema)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 110,
-   "id": "3fce23e0-0632-4c72-97d3-e2c845e32555",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "Exception in thread TokenRefresh:\n",
-      "Traceback (most recent call last):\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 703, in urlopen\n",
-      "    httplib_response = self._make_request(\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 449, in _make_request\n",
-      "    six.raise_from(e, None)\n",
-      "  File \"<string>\", line 3, in raise_from\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 444, in _make_request\n",
-      "    httplib_response = conn.getresponse()\n",
-      "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 1374, in getresponse\n",
-      "    response.begin()\n",
-      "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 318, in begin\n",
-      "    version, status, reason = self._read_status()\n",
-      "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 287, in _read_status\n",
-      "    raise RemoteDisconnected(\"Remote end closed connection without\"\n",
-      "http.client.RemoteDisconnected: Remote end closed connection without response\n",
-      "\n",
-      "During handling of the above exception, another exception occurred:\n",
-      "\n",
-      "Traceback (most recent call last):\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/adapters.py\", line 486, in send\n",
-      "    resp = conn.urlopen(\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 787, in urlopen\n",
-      "    retries = retries.increment(\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/util/retry.py\", line 550, in increment\n",
-      "    raise six.reraise(type(error), error, _stacktrace)\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/packages/six.py\", line 769, in reraise\n",
-      "    raise value.with_traceback(tb)\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 703, in urlopen\n",
-      "    httplib_response = self._make_request(\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 449, in _make_request\n",
-      "    six.raise_from(e, None)\n",
-      "  File \"<string>\", line 3, in raise_from\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 444, in _make_request\n",
-      "    httplib_response = conn.getresponse()\n",
-      "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 1374, in getresponse\n",
-      "    response.begin()\n",
-      "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 318, in begin\n",
-      "    version, status, reason = self._read_status()\n",
-      "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 287, in _read_status\n",
-      "    raise RemoteDisconnected(\"Remote end closed connection without\"\n",
-      "urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))\n",
-      "\n",
-      "During handling of the above exception, another exception occurred:\n",
-      "\n",
-      "Traceback (most recent call last):\n",
-      "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/threading.py\", line 1016, in _bootstrap_inner\n",
-      "    self.run()\n",
-      "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/threading.py\", line 953, in run\n",
-      "    self._target(*self._args, **self._kwargs)\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/weaviate/connect/connection.py\", line 276, in periodic_refresh_token\n",
-      "    self._session.token = self._session.refresh_token(\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/authlib/oauth2/client.py\", line 252, in refresh_token\n",
-      "    return self._refresh_token(\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/authlib/oauth2/client.py\", line 368, in _refresh_token\n",
-      "    resp = self._http_post(url, body=body, auth=auth, headers=headers, **kwargs)\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/authlib/oauth2/client.py\", line 425, in _http_post\n",
-      "    return self.session.post(\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/sessions.py\", line 637, in post\n",
-      "    return self.request(\"POST\", url, data=data, json=json, **kwargs)\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/authlib/integrations/requests_client/oauth2_session.py\", line 109, in request\n",
-      "    return super(OAuth2Session, self).request(\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/sessions.py\", line 589, in request\n",
-      "    resp = self.send(prep, **send_kwargs)\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/sessions.py\", line 703, in send\n",
-      "    r = adapter.send(request, **kwargs)\n",
-      "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/adapters.py\", line 501, in send\n",
-      "    raise ConnectionError(err, request=request)\n",
-      "requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))\n",
-      "sys:1: ResourceWarning: Unclosed socket <zmq.Socket(zmq.PUSH) at 0x2c4e7ebc0>\n",
-      "ResourceWarning: Enable tracemalloc to get the object allocation traceback\n"
-     ]
-    }
-   ],
-   "source": [
-    "index = VectorStoreIndex(\n",
-    "    [], storage_context=storage_context, service_context=service_context\n",
-    ")\n",
-    "\n",
-    "# add documents to index\n",
-    "for wiki_title in wiki_titles:\n",
-    "    index.insert(docs_dict[wiki_title])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 68,
-   "id": "40f0717a-c868-4570-a8c6-57e3f50ce819",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.indices.vector_store.retrievers import VectorIndexAutoRetriever\n",
-    "from llama_index.vector_stores.types import MetadataInfo, VectorStoreInfo\n",
-    "\n",
-    "\n",
-    "vector_store_info = VectorStoreInfo(\n",
-    "    content_info=\"brief biography of celebrities\",\n",
-    "    metadata_info=[\n",
-    "        MetadataInfo(\n",
-    "            name=\"category\",\n",
-    "            type=\"str\",\n",
-    "            description=\"Category of the celebrity, one of [Sports, Entertainment, Business, Music]\",\n",
-    "        ),\n",
-    "        MetadataInfo(\n",
-    "            name=\"country\",\n",
-    "            type=\"str\",\n",
-    "            description=\"Country of the celebrity, one of [United States, Barbados, Portugal]\",\n",
-    "        ),\n",
-    "    ],\n",
-    ")\n",
-    "retriever = VectorIndexAutoRetriever(\n",
-    "    index,\n",
-    "    vector_store_info=vector_store_info,\n",
-    "    service_context=service_context,\n",
-    "    max_top_k=10000,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 69,
-   "id": "249a8a74-5bde-4b42-9b43-62764484158b",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using query str: celebrity\n",
-      "Using query str: celebrity\n",
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using filters: {'country': 'United States'}\n",
-      "Using filters: {'country': 'United States'}\n",
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using top_k: 10000\n",
-      "Using top_k: 10000\n"
-     ]
-    }
-   ],
-   "source": [
-    "# NOTE: the \"set top-k to 10000\" is a hack to return all data.\n",
-    "# Right now auto-retrieval will always return a fixed top-k, there's a TODO to allow it to be None\n",
-    "# to fetch all data.\n",
-    "# So it's theoretically possible to have the LLM infer a None top-k value.\n",
-    "nodes = retriever.retrieve(\n",
-    "    \"Tell me about a celebrity from the United States, set top k to 10000\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 70,
-   "id": "56408d2f-f532-4010-bf81-7a8487433f9e",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Number of nodes: 124\n",
-      "The Super Bowl commercial inspired the 1996 live action/animated film Space Jam, which starred Jordan and Bugs in a fictional story set during the former's first retirement from basketball.They have subsequently appeared together in several commercials for MCI.Jordan also made an appearance in the music video for Michael Jackson's \"Jam\" (1992).Since 2008, Jordan's yearly income from the endorsements is estimated to be over $40 million.In addition, when Jordan's power at the ticket gates was at its highest point, the Bulls regularly sold out both their home and road games.Due to this, Jordan set records in player salary by signing annual contracts worth in excess of US$30 million per season.An academic study found that Jordan's first NBA comeback resulted in an increase in the market capitalization of his client firms of more than $1 billion.Most of Jordan's endorsement deals, including his first deal with Nike, were engineered by his agent, David Falk.Jordan has described Falk as \"the best at what he does\" and that \"marketing-wise, he's great.He's the one who came up with the concept of 'Air Jordan'.\"\n",
-      "Musk blamed the estrangement of his daughter on what the Financial Times characterized as \"the supposed takeover of elite schools and universities by neo-Marxists.\"In 2008, Musk began dating English actress Talulah Riley.They married two years later at Dornoch Cathedral in Scotland.In 2012, the couple divorced, before remarrying the following year.After briefly filing for divorce in 2014, Musk finalized a second divorce from Riley in 2016.Musk then dated Amber Heard for several months in 2017; he had reportedly been pursuing her since 2012.Johnny Depp later accused Musk of having an affair with Heard while she was still married to Depp.Musk and Heard both denied the affair.In 2018, Musk and Canadian musician Grimes revealed that they were dating.Grimes gave birth to their son in May 2020.According to Musk and Grimes, his name was \"X Æ A-12\" (); however, the name would have violated California regulations as it contained characters that are not in the modern English alphabet, and was then changed to \"X Æ A-Xii\".This drew more confusion, as Æ is not a letter in the modern English alphabet.\n",
-      "=== Film and television ===\n",
-      "Jordan played himself in the 1996 comedy film Space Jam.The film received mixed reviews, but it was a box office success, making $230 million worldwide, and earned more than $1 billion through merchandise sales.In 2000, Jordan was the subject of an IMAX documentary about his career with the Chicago Bulls, especially the 1998 NBA playoffs, titled Michael Jordan to the Max.Two decades later, the same period of Jordan's life was covered in much greater and more personal detail by the Emmy Award-winning The Last Dance, a 10-part TV documentary which debuted on ESPN in April and May 2020.The Last Dance relied heavily on about 500 hours of candid film of Jordan's and his teammates' off-court activities which an NBA Entertainment crew had shot over the course of the 1997–98 NBA season for use in a documentary.The project was delayed for many years because Jordan had not yet given his permission for the footage to be used.\n",
-      "He was interviewed at three homes associated with the production and did not want cameras in his home or on his plane, as according to director Jason Hehir \"there are certain aspects of his life that he wants to keep private\".Jordan granted rapper Travis Scott permission to film a music video for his single \"Franchise\" at his home in Highland Park, Illinois.Jordan appeared in the 2022 miniseries The Captain, which follows the life and career of Derek Jeter.\n",
-      "\n",
-      "\n",
-      "=== Books ===\n",
-      "Jordan has authored several books focusing on his life, basketball career, and world view.\n",
-      "\n",
-      "Rare Air: Michael on Michael, with Mark Vancil and Walter Iooss (Harper San Francisco, 1993).\n",
-      "I Can't Accept Not Trying: Michael Jordan on the Pursuit of Excellence, with Mark Vancil and Sandro Miller (Harper San Francisco, 1994).\n",
-      "For the Love of the Game: My Story, with Mark Vancil (Crown Publishers, 1998).\n",
-      "Driven from Within, with Mark Vancil (Atria Books, 2005).\n",
-      "\"In April 2023, the government of the U.S. Virgin Islands sought to subpoena Musk for documents in a lawsuit alleging that JPMorgan Chase profited from Jeffrey Epstein's sex trafficking operation.In May, a judge granted the U.S. Virgin Islands' request to serve Musk electronically through Tesla after the U.S. territory had difficulty locating him.The efforts to subpoena Musk for documents do not implicate him in any wrongdoing and do not seek to have Musk testify under oath.\n",
-      "\n",
-      "\n",
-      "== Public perception ==\n",
-      "\n",
-      "Though Musk's ventures were influential within their own industries in the 2000s, he only became a public figure in the early 2010s.He has often been described as an eccentric who makes spontaneous and controversial statements, contrary to other billionaires who prefer reclusiveness to protect their businesses.Celebrated by fans and hated by critics, Musk was described by Vance as having become very polarizing because of his \"part philosopher, part troll\" role on Twitter.With Steve Jobs and Donald Trump, Musk served as inspiration for the characterization of Tony Stark in the Marvel film Iron Man (2008).Musk had a cameo appearance in the film's 2010 sequel, Iron Man 2.\n",
-      "Knafel claimed Jordan promised her $5 million for remaining silent and agreeing not to file a paternity suit after Knafel learned she was pregnant in 1991; a DNA test showed Jordan was not the father of the child.Jordan proposed to his longtime girlfriend, Cuban-American model Yvette Prieto, on Christmas 2011, and they were married on April 27, 2013, at Bethesda-by-the-Sea Episcopal Church.It was announced on November 30, 2013, that the two were expecting their first child together.On February 11, 2014, Prieto gave birth to identical twin daughters named Victoria and Ysabel.In 2019, Jordan became a grandfather when his daughter Jasmine gave birth to a son, whose father is professional basketball player Rakeem Christmas.\n",
-      "\n",
-      "\n",
-      "== Media figure and business interests ==\n",
-      "\n",
-      "\n",
-      "=== Endorsements ===\n",
-      "Jordan is one of the most marketed sports figures in history.He has been a major spokesman for such brands as Nike, Coca-Cola, Chevrolet, Gatorade, McDonald's, Ball Park Franks, Rayovac, Wheaties, Hanes, and MCI.\n",
-      "=== Business ventures ===\n",
-      "In June 2010, Jordan was ranked by Forbes as the 20th-most-powerful celebrity in the world, with $55 million earned between June 2009 and June 2010.According to Forbes, Jordan Brand generates $1 billion in sales for Nike.In June 2014, Jordan was named the first NBA player to become a billionaire, after he increased his stake in the Charlotte Hornets from 80% to 89.5%.On January 20, 2015, Jordan was honored with the Charlotte Business Journal's Business Person of the Year for 2014.In 2017, he became a part owner of the Miami Marlins of Major League Baseball.Forbes designated Jordan as the athlete with the highest career earnings in 2017.From his Jordan Brand income and endorsements, Jordan's 2015 income was an estimated $110 million, the most of any retired athlete.As of 2023, his net worth is estimated at $2 billion by Forbes, making him the fifth-richest African-American, behind Robert F. Smith, David Steward, Oprah Winfrey, and Rihanna.Jordan co-owns an automotive group which bears his name.\n",
-      "He reportedly hosted large, ticketed house parties to help pay for tuition, and wrote a business plan for an electronic book-scanning service similar to Google Books.In 1994, Musk held two internships in Silicon Valley: one at energy storage startup Pinnacle Research Institute, which investigated electrolytic ultracapacitors for energy storage, and another at Palo Alto–based startup Rocket Science Games.In 1995, he was accepted to a PhD program in materials science at Stanford University.However, Musk decided to join the Internet boom, dropping out two days after being accepted and applied for a job at Netscape, to which he reportedly never received a response.\n",
-      "\n",
-      "\n",
-      "== Business career ==\n",
-      "He starred as himself in the live-action/animation hybrid film Space Jam (1996) and was the central focus of the Emmy-winning documentary series The Last Dance (2020).He became part-owner and head of basketball operations for the Charlotte Hornets (then named the Bobcats) in 2006 and bought a controlling interest in 2010, before selling his majority stake in 2023, and he is also the owner of 23XI Racing in the NASCAR Cup Series.In 2016, he became the first billionaire player in NBA history.That year, President Barack Obama awarded him the Presidential Medal of Freedom.As of 2023, his net worth is estimated at $2 billion.\n",
-      "\n",
-      "\n",
-      "== Early life ==\n",
-      "Michael Jeffrey Jordan was born at Cumberland Hospital in the Fort Greene neighborhood of New York City's Brooklyn borough on February 17, 1963, to bank employee Deloris (née Peoples) and equipment supervisor James R. Jordan Sr.He has two older brothers, James R. Jordan Jr. and fellow basketball player Larry Jordan, as well as an older sister named Deloris and a younger sister named Roslyn.\n",
-      "The New York Post revealed that Musk's ex-wife Talulah Riley had encouraged Musk to purchase Twitter, specifically citing the Bee's ban.Following the acquisition, he made reinstatement of accounts like the Bee an immediate priority.The Independent reported that Musk has \"appealed to far-right activists and influencers and unleashed a wave of hate speech and abuse aimed at LGBT+ people\" since taking control of Twitter.On December 18, Musk posted a poll to his Twitter account asking users to decide whether he should step down as the head of Twitter, with 57.5% out of the more than 17.5 million votes supporting that decision.Musk then announced that he would resign as CEO \"as soon as I find someone foolish enough to take the job\".On May 11, 2023, Musk announced that he would be stepping down from the CEO position and instead moving to \"exec chair & CTO, overseeing product, software & sysops\" and announced the new CEO, former NBCUniversal executive Linda Yaccarino.\n",
-      "Musk has made cameos and appearances in other films such as Machete Kills (2013), Why Him?(2016), and Men in Black: International (2019).Television series in which he has appeared include The Simpsons (\"The Musk Who Fell to Earth\", 2015), The Big Bang Theory (\"The Platonic Permutation\", 2015), South Park (\"Members Only\", 2016), Young Sheldon (\"A Patch, a Modem, and a Zantac®\", 2017), Rick and Morty (\"One Crew over the Crewcoo's Morty\", 2019), and Saturday Night Live (2021).He contributed interviews to the documentaries Racing Extinction (2015) and the Werner Herzog-directed Lo and Behold (2016).Musk was elected a Fellow of the Royal Society (FRS) in 2018.In 2015, he received an honorary doctorate in engineering and technology from Yale University and IEEE Honorary Membership.\n",
-      "In March 2019, Musk was later one of the 187 people who received various honors conferred by the King of Thailand for involvement in the rescue effort.Soon after the rescue, Vernon Unsworth, a British recreational caver who had been exploring the cave for the previous six years and played a key advisory role in the operation, criticized the submarine on CNN as amounting to nothing more than a public relations effort with no chance of success, maintaining that Musk \"had no conception of what the cave passage was like\" and \"can stick his submarine where it hurts\".Musk asserted on Twitter that the device would have worked and referred to Unsworth as a \"pedo guy\".He deleted the tweets, and apologized, and he deleted his responses to critical tweets from Cher Scarlett, a software engineer, which had caused his followers to harass her.In an email to BuzzFeed News, Musk later called Unsworth a \"child rapist\" and said that he had married a child.In September, Unsworth filed a defamation suit in the District Court for the Central District of California.\n",
-      "== See also ==\n",
-      "Forbes' list of the world's highest-paid athletes\n",
-      "List of athletes who came out of retirement\n",
-      "List of NBA teams by single season win percentage\n",
-      "Michael Jordan's Restaurant\n",
-      "Michael Jordan: Chaos in the Windy City\n",
-      "Michael Jordan in Flight\n",
-      "NBA 2K11\n",
-      "NBA 2K12\n",
-      "\n",
-      "\n",
-      "== Notes ==\n",
-      "\n",
-      "\n",
-      "== References ==\n",
-      "\n",
-      "\n",
-      "== Sources ==\n",
-      "Condor, Bob (1998).Michael Jordan's 50 Greatest Games.Carol Publishing Group.ISBN 978-0-8065-2030-8.Halberstam, David (2000).Playing for Keeps: Michael Jordan and the World He Made.Broadway Books.ISBN 978-0-7679-0444-5.Jordan, Michael (1998).For the Love of the Game: My Story.New York City: Crown Publishers.ISBN 978-0-609-60206-5.Kotler, Philip; Rein, Irving J.; Shields, Ben (2006).The Elusive Fan: Reinventing Sports in a Crowded Marketplace.The McGraw-Hill Companies.ISBN 978-0-07-149114-3.\n",
-      "23 retired by the North Carolina Tar HeelsHigh schoolMcDonald's All-American – 1981\n",
-      "Parade All-American First Team – 1981Halls of FameTwo-time Naismith Memorial Basketball Hall of Fame inductee:\n",
-      "Class of 2009 – individual\n",
-      "Class of 2010 – as a member of the \"Dream Team\"\n",
-      "United States Olympic Hall of Fame – Class of 2009 (as a member of the \"Dream Team\")\n",
-      "North Carolina Sports Hall of Fame – Class of 2010\n",
-      "Two-time FIBA Hall of Fame inductee:\n",
-      "Class of 2015 – individual\n",
-      "Class of 2017 – as a member of the \"Dream Team\"MediaThree-time Associated Press Athlete of the Year – 1991, 1992, 1993\n",
-      "Sports Illustrated Sportsperson of the Year – 1991\n",
-      "Ranked No.1 by Slam magazine's \"Top 50 Players of All-Time\"\n",
-      "Ranked No.1 by ESPN SportsCentury's \"Top North American Athletes of the 20th Century\"\n",
-      "10-time ESPY Award winner (in various categories)\n",
-      "1997 Marca Leyenda winnerNational2016 Presidential Medal of FreedomState/localStatue inside the United Center\n",
-      "Section of Madison Street in Chicago renamed Michael Jordan Drive – 1994\n",
-      "=== Music ===\n",
-      "In 2019, Musk, through Emo G Records, released a rap track, \"RIP Harambe\", on SoundCloud. The track, which refers to the killing of Harambe the gorilla and the subsequent Internet sensationalism surrounding the event, was performed by Yung Jake, written by Yung Jake and Caroline Polachek, and produced by BloodPop. The following year, Musk released an EDM track, \"Don't Doubt Ur Vibe\", featuring his own lyrics and vocals. While Guardian critic Alexi Petridis described it as \"indistinguishable... from umpteen competent but unthrilling bits of bedroom electronica posted elsewhere on Soundcloud\", TechCrunch said it was \"not a bad representation of the genre\".\n",
-      "Also in July 2022, The Wall Street Journal reported that Musk allegedly had an affair with Nicole Shanahan, the wife of Google co-founder Sergey Brin, in 2021, leading to their divorce the following year.Musk denied the report.\n",
-      "\n",
-      "\n",
-      "=== Legal matters ===\n",
-      "\n",
-      "In May 2022, Business Insider cited an anonymous friend of an unnamed SpaceX contract flight attendant, alleging that Musk engaged in sexual misconduct in 2016.The source stated that in November 2018, Musk, SpaceX, and the former flight attendant entered into a severance agreement granting the attendant a $250,000 payment in exchange for a promise not to sue over the claims.Musk responded, \"If I were inclined to engage in sexual harassment, this is unlikely to be the first time in my entire 30-year career that it comes to light\".He accused the article from Business Insider of being a \"politically motivated hit piece\".After the release of the Business Insider article, Tesla's stock fell by more than 6%, decreasing Musk's net worth by $10 billion.Barron's wrote \"...some investors considered key-man risk – the danger that a company could be badly hurt by the loss of one individual.\n",
-      "=== Works cited ===\n",
-      "Belfiore, Michael (2007). Rocketeers. New York: HarperCollins. ISBN 9780061149023.\n",
-      "Berger, Eric (2021). Liftoff. William Morrow and Company. ISBN 9780062979971.\n",
-      "Jackson, Erik (2004). The PayPal Wars: Battles with eBay, the Media, the Mafia, and the Rest of Planet Earth. Los Angeles: World Ahead Publishing. ISBN 9780974670102.\n",
-      "Kidder, David; Hoffman, Reid (2013). The Startup Playbook: Secrets of the Fastest Growing Start-Ups from the founding Entrepreneurs. San Francisco: Chronicle Books. ISBN 9781452105048.\n",
-      "Vance, Ashlee (2017) [2015]. Elon Musk: Tesla, SpaceX, and the Quest for a Fantastic Future (2nd ed.). New York: Ecco. ISBN 9780062301253.\n",
-      "They had two sons, Jeffrey and Marcus, and a daughter, Jasmine.The Jordans filed for divorce on January 4, 2002, citing irreconcilable differences, but reconciled shortly thereafter.They again filed for divorce and were granted a final decree of dissolution of marriage on December 29, 2006, commenting that the decision was made \"mutually and amicably\".It is reported that Juanita received a $168 million settlement (equivalent to $244 million in 2022), making it the largest celebrity divorce settlement on public record at the time.In 1991, Jordan purchased a lot in Highland Park, Illinois, where he planned to build a 56,000-square-foot (5,200 m2) mansion.It was completed in 1995.He listed the mansion for sale in 2012.He also owns homes in North Carolina and Jupiter Island, Florida.On July 21, 2006, a judge in Cook County, Illinois, determined that Jordan did not owe his alleged former lover Karla Knafel $5 million in a breach of contract claim.Jordan had allegedly paid Knafel $250,000 to keep their relationship a secret.\n",
-      "2003\n",
-      "Three-time NBA All-Star Game MVP – 1988, 1996, 1998\n",
-      "10-time All-NBA First Team – 1987–1993, 1996–1998\n",
-      "One-time All-NBA Second Team – 1985\n",
-      "Nine-time NBA All-Defensive First Team – 1988–1993, 1996–1998\n",
-      "NBA All-Rookie First Team – 1985\n",
-      "Two-time NBA Slam Dunk Contest champion – 1987, 1988\n",
-      "Two-time IBM Award winner – 1985, 1989\n",
-      "Named one of the 50 Greatest Players in NBA History in 1996\n",
-      "Selected on the NBA 75th Anniversary Team in 2021\n",
-      "No.23 retired by the Chicago Bulls\n",
-      "No.\n",
-      "Michael Jeffrey Jordan (born February 17, 1963), also  known by his initials MJ, is an American former professional basketball player and businessman.The official National Basketball Association (NBA) website states: \"By acclamation, Michael Jordan is the greatest basketball player of all time.\"He played fifteen seasons in the NBA, winning six NBA championships with the Chicago Bulls.He was integral in popularizing the sport of basketball and the NBA around the world in the 1980s and 1990s, becoming a global cultural icon.Jordan played college basketball for three seasons under coach Dean Smith with the North Carolina Tar Heels.As a freshman, he was a member of the Tar Heels' national championship team in 1982.Jordan joined the Bulls in 1984 as the third overall draft pick and quickly emerged as a league star, entertaining crowds with his prolific scoring while gaining a reputation as one of the game's best defensive players.His leaping ability, demonstrated by performing slam dunks from the free-throw line in Slam Dunk Contests, earned him the nicknames \"Air Jordan\" and \"His Airness\".Jordan won his first NBA title with the Bulls in 1991 and followed that achievement with titles in 1992 and 1993, securing a three-peat.\n",
-      "== Personal life ==\n",
-      "From the early 2000s until late 2020, Musk resided in California, where both Tesla and SpaceX were founded. In 2020, he relocated to Texas, saying that California had become \"complacent\" about its economic success. While hosting Saturday Night Live in May 2021, Musk revealed that he has Asperger syndrome. Musk is also a practitioner of Brazilian jiu-jitsu.\n",
-      "\n",
-      "\n",
-      "=== Relationships and children ===\n",
-      "Musk met his first wife, Canadian author Justine Wilson, while attending Queen's University in Ontario, Canada; and they married in 2000.In 2002, their first child died of sudden infant death syndrome at the age of 10 weeks.After his death, the couple decided to use IVF to continue their family.They had twins in 2004 followed by triplets in 2006.The couple divorced in 2008 and shared custody of their children.In 2022, one of the twins officially changed her name to reflect her gender identity as a trans woman, and to use Wilson as her last name because she no longer wished to be associated with Musk.\n",
-      "In the September 1996 issue of Sport, which was the publication's 50th-anniversary issue, Jordan was named the greatest athlete of the past 50 years.Jordan's athletic leaping ability, highlighted in his back-to-back Slam Dunk Contest championships in 1987 and 1988, is credited by many people with having influenced a generation of young players.Several NBA players, including James and Dwyane Wade, have stated that they considered Jordan their role model while they were growing up.In addition, commentators have dubbed a number of next-generation players \"the next Michael Jordan\" upon their entry to the NBA, including Penny Hardaway, Grant Hill, Allen Iverson, Bryant, Vince Carter, James, and Wade.Some analysts, such as The Ringer's Dan Devine, drew parallels between Jordan's experiment at point guard in the 1988–89 season and the modern NBA; for Devine, it \"inadvertently foreshadowed the modern game's stylistic shift toward monster-usage primary playmakers\", such as Russell Westbrook, James Harden, Luka Dončić, and James.Don Nelson stated: \"I would've been playing him at point guard the day he showed up as a rookie.\n",
-      "In his defense, Musk argued that \"'pedo guy' was a common insult used in South Africa when I was growing up ... synonymous with 'creepy old man' and is used to insult a person's appearance and demeanor\".The defamation case began in December 2019, with Unsworth seeking $190 million in damages.During the trial Musk apologized to Unsworth again for the tweet.On December 6, the jury found in favor of Musk and ruled he was not liable.\n",
-      "Elon Reeve Musk ( EE-lon; born June 28, 1971) is a business magnate and investor.Musk is the founder, chairman, CEO and chief technology officer of SpaceX;  angel investor, CEO, product architect and former chairman of Tesla, Inc.; owner, chairman and CTO of X Corp.; founder of the Boring Company; co-founder of Neuralink and OpenAI; and president of the Musk Foundation.He is the wealthiest person in the world, with an estimated net worth of US$217 billion as of August 2023, according to the Bloomberg Billionaires Index, and $219 billion according to Forbes, primarily from his ownership stakes in both Tesla and SpaceX.Musk was born in Pretoria, South Africa, and briefly attended the University of Pretoria before immigrating to Canada at age 18, acquiring citizenship through his Canadian-born mother.Two years later, he matriculated at Queen's University in Kingston, Ontario.Musk later transferred to the University of Pennsylvania, and received bachelor's degrees in economics and physics there.He moved to California in 1995 to attend Stanford University.\n",
-      "He also endorsed Kanye West's 2020 presidential campaign.He said he voted for Joe Biden in the 2020 U.S. presidential election.In 2022, Musk said that he could \"no longer support\" the Democrats because they are the \"party of division & hate\", and wrote a tweet encouraging \"independent-minded voters\" to vote Republican in the 2022 U.S. elections, which was an outlier among social media executives who typically avoid partisan political advocacy.He has supported Republican Ron DeSantis for the 2024 U.S. presidential election, and Twitter hosted DeSantis's campaign announcement on a Twitter Spaces event As of May 2023, Musk was declining to endorse any specific candidate.Musk opposes a \"billionaire's tax\", and has argued on Twitter with more left-leaning Democratic politicians such as Bernie Sanders, Alexandria Ocasio-Cortez, and Elizabeth Warren.He has raised questions about the Black Lives Matter protests, partially based on the fact that the phrase \"Hands up, don't shoot\" was made up.\n",
-      "Two months later, Musk contracted COVID-19 and suggested his COVID-19 rapid antigen test results were dubious, after which the phrase \"Space Karen\" trended on Twitter, in reference to Musk.However, in December 2021, Musk revealed that he and his eligible children had received the vaccine.\n",
-      "\n",
-      "\n",
-      "=== Finance ===\n",
-      "Musk said that the U.S. government should not provide subsidies to companies, but impose a carbon tax to discourage poor behavior.The free market, in his view, would achieve the best solution, and producing environmentally unfriendly vehicles should have consequences.Tesla has received billions of dollars in subsidies.In addition, Tesla made large sums from government-initiated systems of zero-emissions credits offered in California and at the United States federal level, which facilitated initial consumer adoption of Tesla vehicles, as the tax credits given by governments enabled Tesla's battery electric vehicles to be price-competitive, in comparison with existing lower-priced internal combustion engine vehicles.\n",
-      "== Personal views and Twitter (later X) usage ==\n",
-      "\n",
-      "Since joining Twitter (now known as X) in 2009, Musk has been an active user and has over 100 million followers as of June 2022. He posts memes, promotes business interests, and comments on contemporary political and cultural issues. Musk's statements have provoked controversy, such as for mocking preferred gender pronouns, and comparing Canadian prime minister Justin Trudeau to Adolf Hitler. The New York Times describes his contributions to international relations as \"chaotic\", and critics of Musk argue that there is a lack of separation between his opinions and his business interests. As CEO of Twitter, Musk emerged as a source of misinformation, for example by suggesting online details about mass murderer Mauricio Garcia's apparent interest in Nazism could have been planted as part of a psyop. Allegations of him being transphobic appeared as well in response to actions taken by Twitter under his guidance. The Israel government and several media outlets accused Musk of antisemitism due to him spreading George Soros conspiracy theories, although some Israeli officials defended Musk.\n",
-      "\n",
-      "\n",
-      "=== Existential threats ===\n",
-      "Musk has been described as believing in longtermism, emphasizing the needs of future populations.\n",
-      "=== Tham Luang cave rescue and defamation case ===\n",
-      "\n",
-      "In July 2018, Musk arranged for his employees to build a mini-submarine to assist the rescue of children trapped in a flooded cavern in Thailand.Richard Stanton, leader of the international rescue diving team, urged Musk to facilitate the construction of the vehicle as a back-up, in case flooding worsened.Engineers at SpaceX and the Boring Company built the mini-submarine from a Falcon 9 liquid oxygen transfer tube in eight hours and personally delivered it to Thailand.By this time, however, eight of the 12 children, had already been rescued, the rescuers employing full face masks, oxygen, and anesthesia; consequently, Thai authorities declined to use the submarine.\n",
-      "==== First retirement and stint in Minor League Baseball (1993–1995) ====\n",
-      "\n",
-      "On October 6, 1993, Jordan announced his retirement, saying that he lost his desire to play basketball.Jordan later said that the murder of his father three months earlier helped shape his decision.James R. Jordan Sr. was murdered on July 23, 1993, at a highway rest area in Lumberton, North Carolina, by two teenagers, Daniel Green and Larry Martin Demery, who carjacked his Lexus bearing the license plate \"UNC 0023\".His body, dumped in a South Carolina swamp, was not discovered until August 3.Green and Demery were found after they made calls on James Jordan's cell phone, convicted at a trial, and sentenced to life in prison.Jordan was close to his father; as a child, he imitated the way his father stuck out his tongue while absorbed in work.He later adopted it as his own signature, often displaying it as he drove to the basket.In 1996, he founded a Chicago-area Boys & Girls Club and dedicated it to his father.\n",
-      "The child was eventually named X AE A-XII Musk, with \"X\" as a first name, \"AE A-XII\" as a middle name, and \"Musk\" as surname.In December 2021, Grimes and Musk had a second child, a daughter named Exa Dark Sideræl Musk (nicknamed \"Y\"), born via surrogacy.Despite the pregnancy, Musk confirmed reports that the couple were \"semi-separated\" in September 2021; in an interview with Time in December 2021, he said he was single.In March 2022, Grimes said of her relationship with Musk: \"I would probably refer to him as my boyfriend, but we're very fluid.\"Later that month, Grimes tweeted that she and Musk had broken up again but remained on good terms.In July 2022, Insider published court documents revealing that Musk had had twins with Shivon Zilis, director of operations and special projects at Neuralink, in November 2021.They were born weeks before Musk and Grimes had their second child via surrogate in December.The news \"raise[d] questions about workplace ethics\", given that Zilis directly reported to Musk.\n",
-      "The company has a Nissan dealership in Durham, North Carolina, acquired in 1990, and formerly had a Lincoln–Mercury dealership from 1995 until its closure in June 2009.The company also owned a Nissan franchise in Glen Burnie, Maryland.The restaurant industry is another business interest of Jordan's.Restaurants he has owned include a steakhouse in New York City's Grand Central Terminal, among others; that restaurant closed in 2018.Jordan is the majority investor in a golf course, Grove XXIII, under construction in Hobe Sound, Florida.In September 2020, Jordan became an investor and advisor for DraftKings.\n",
-      "\n",
-      "\n",
-      "=== Philanthropy ===\n",
-      "From 2001 to 2014, Jordan hosted an annual golf tournament, the Michael Jordan Celebrity Invitational, that raised money for various charities.In 2006, Jordan and his wife Juanita pledged $5 million to Chicago's Hales Franciscan High School.The Jordan Brand has made donations to Habitat for Humanity and a Louisiana branch of the Boys & Girls Clubs of America.The Make-A-Wish Foundation named Jordan its Chief Wish Ambassador in 2008.In 2013, he granted his 200th wish for the organization.\n",
-      "After Jordan received word of his acceptance into the Hall of Fame, he selected Class of 1996 member David Thompson to present him.As Jordan would later explain during his induction speech in September 2009, he was not a fan of the Tar Heels when growing up in North Carolina but greatly admired Thompson, who played for the rival NC State Wolfpack.In September, he was inducted into the Hall with several former Bulls teammates in attendance, including Scottie Pippen, Dennis Rodman, Charles Oakley, Ron Harper, Steve Kerr, and Toni Kukoč.Dean Smith and Doug Collins, two of Jordan's former coaches, were also among those present.His emotional reaction during his speech when he began to cry was captured by Associated Press photographer Stephan Savoia and would later go viral on social media as the \"Crying Jordan\" Internet meme.In 2016, President Barack Obama honored Jordan with the Presidential Medal of Freedom.In October 2021, Jordan was named to the NBA 75th Anniversary Team.In September 2022, Jordan's jersey in which he played the opening game of the 1998 NBA Finals was sold for $10.1 million, making it the most expensive game-worn sports memorabilia in history.\n",
-      "Awards for his contributions to the development of the Falcon rockets include the American Institute of Aeronautics and Astronautics George Low Transportation Award in 2008, the Fédération Aéronautique Internationale Gold Space Medal in 2010, and the Royal Aeronautical Society Gold Medal in 2012.Time has listed Musk as one of the most influential people in the world on four occasions in 2010, 2013, 2018, and 2021.Musk was selected as Time's \"Person of the Year\" for 2021.Time editor-in-chief Edward Felsenthal wrote that \"Person of the Year is a marker of influence, and few individuals have had more influence than Musk on life on Earth, and potentially life off Earth too\".In February 2022, Musk was elected as a member of the National Academy of Engineering.\n",
-      "\n",
-      "\n",
-      "== Notes and references ==\n",
-      "\n",
-      "\n",
-      "=== Notes ===\n",
-      "\n",
-      "\n",
-      "=== Citations ===\n",
-      "Kruger, Mitchell (2003).One Last Shot: The Story of Michael Jordan's Comeback.New York City: St. Martin's Paperbacks.ISBN 978-0-312-99223-1.Lazenby, Roland (2014).Michael Jordan: The Life.New York City: Little, Brown and Company.ISBN 978-0-316-19477-8.LaFeber, Walter (2002).Michael Jordan and the New Global Capitalism.W. W. Norton.ISBN 978-0-393-32369-6.Markovits, Andrei S.; Rensman, Lars (June 3, 2010).Gaming the World: How Sports are Reshaping Global Politics and Culture.Princeton University Press.ISBN 978-0-691-13751-3.Porter, David L. (2007).Michael Jordan: A Biography.Greenwood Publishing Group.ISBN 978-0-313-33767-3.The Sporting News Official NBA Register 1994–95 (1994).The Sporting News.ISBN 978-0-89204-501-3.\n",
-      "His mother, Maye Musk (née Haldeman), is a model and dietitian born in Saskatchewan, Canada, and raised in South Africa.His father, Errol Musk, is a South African electromechanical engineer, pilot, sailor, consultant, and property developer, who partly owned a Zambian emerald mine near Lake Tanganyika.Musk has a younger brother, Kimbal, and a younger sister, Tosca.Musk's family was wealthy during his youth.His father was elected to the Pretoria City Council as a representative of the anti-apartheid Progressive Party and has said that his children shared their father's dislike of apartheid.His maternal grandfather, Joshua Haldeman, was an American-born Canadian who took his family on record-breaking journeys to Africa and Australia in a single-engine Bellanca airplane.After his parents divorced in 1980, Musk chose to live primarily with his father.Musk later regretted his decision and became estranged from his father.He has a paternal half-sister and a half-brother.Maye Musk has said of her son that he \"was shy and awkward at school\" and \"didn't have many friends\".\n",
-      "He holds the NBA records for career regular season scoring average (30.1 points per game) and career playoff scoring average (33.4 points per game).In 1999, he was named the 20th century's greatest North American athlete by ESPN and was second to Babe Ruth on the Associated Press' list of athletes of the century.Jordan was twice inducted into the Naismith Memorial Basketball Hall of Fame, once in 2009 for his individual career, and again in 2010 as part of the 1992 United States men's Olympic basketball team (\"The Dream Team\").He became a member of the United States Olympic Hall of Fame in 2009, a member of the North Carolina Sports Hall of Fame in 2010, and an individual member of the FIBA Hall of Fame in 2015 and a \"Dream Team\" member in 2017.In 2021, he was named to the NBA 75th Anniversary Team.One of the most effectively marketed athletes of his generation, Jordan is known for his product endorsements.He fueled the success of Nike's Air Jordan sneakers, which were introduced in 1984 and remain popular today.\n",
-      "This included about $12.5 billion in loans against his Tesla stock and $21 billion in equity financing.Tesla's stock market value sank by over $100 billion the next day in reaction to the deal, causing Musk to lose around $30 billion of his net worth.He subsequently tweeted criticism of Twitter executive Vijaya Gadde's policies to his 86 million followers, which led to some of them engaging in sexist and racist harassment against her.Exactly a month after announcing the takeover, Musk stated that the deal was \"on hold\" following a report that 5% of Twitter's daily active users were spam accounts, causing Twitter shares to drop more than 10 percent.Although he initially affirmed his commitment to the acquisition, he sent notification of his termination of the deal in July; Twitter's Board of Directors responded that they were committed to holding him to the transaction.On July 12, 2022, Twitter formally sued Musk in the Chancery Court of Delaware for breaching a legally binding agreement to purchase Twitter.In October 2022, Musk reversed again, offering to purchase Twitter at $54.20 per share.\n",
-      "Coincidentally, Jordan and the Bulls met Barkley and his Phoenix Suns in the 1993 NBA Finals.The Bulls won their third NBA championship on a game-winning shot by John Paxson and a last-second block by Horace Grant, but Jordan was once again Chicago's leader.He averaged a Finals-record 41.0 ppg during the six-game series, and became the first player in NBA history to win three straight Finals MVP awards.He scored more than 30 points in every game of the series, including 40 or more points in four consecutive games.With his third Finals triumph, Jordan capped off a seven-year run where he attained seven scoring titles and three championships, but there were signs that Jordan was tiring of his massive celebrity and all of the non-basketball hassles in his life.\n",
-      "\n",
-      "\n",
-      "==== Gambling ====\n",
-      "During the Bulls' 1993 NBA playoffs, Jordan was seen gambling in Atlantic City, New Jersey, the night before Game 2 of the Eastern Conference Finals against the New York Knicks.\n",
-      "The previous year, he admitted that he had to cover $57,000 in gambling losses, and author Richard Esquinas wrote a book in 1993 claiming he had won $1.25 million from Jordan on the golf course.David Stern, the commissioner of the NBA, denied in 1995 and 2006 that Jordan's 1993 retirement was a secret suspension by the league for gambling, but the rumor spread widely.In 2005, Jordan discussed his gambling with Ed Bradley of 60 Minutes and admitted that he made reckless decisions.Jordan stated: \"Yeah, I've gotten myself into situations where I would not walk away and I've pushed the envelope.Is that compulsive?Yeah, it depends on how you look at it.If you're willing to jeopardize your livelihood and your family, then yeah.\"When Bradley asked him if his gambling ever got to the level where it jeopardized his livelihood or family, Jordan replied: \"No.\"In 2010, Ron Shelton, director of Jordan Rides the Bus, said that he began working on the documentary believing that the NBA had suspended him, but that research \"convinced [him it] was nonsense\".\n",
-      "The media, hoping to recreate a Magic–Bird rivalry, highlighted the similarities between \"Air\" Jordan and Clyde \"The Glide\" during the pre-Finals hype.In the first game, Jordan scored a Finals-record 35 points in the first half, including a record-setting six three-point field goals.After the sixth three-pointer, he jogged down the court shrugging as he looked courtside.Marv Albert, who broadcast the game, later stated that it was as if Jordan was saying: \"I can't believe I'm doing this.\"The Bulls went on to win Game 1 and defeat the Blazers in six games.Jordan was named Finals MVP for the second year in a row, and finished the series averaging 35.8 ppg, 4.8 rpg, and 6.5 apg, while shooting 52.6% from the floor.In the 1992–93 season, despite a 32.6 ppg, 6.7 rpg, and 5.5 apg campaign, including a second-place finish in Defensive Player of the Year voting, Jordan's streak of consecutive MVP seasons ended, as he lost the award to his friend Charles Barkley, which upset him.\n",
-      "While this resulted in saved costs for SpaceX's rocket, vertical integration has caused many usability problems for Tesla's software.Musk's handling of employees—whom he communicates with directly through mass emails—has been characterized as \"carrot and stick\", rewarding those \"who offer constructive criticism\" while also being known to impulsively threaten, swear at, and fire his employees.Musk said he expects his employees to work for long hours, sometimes for 80 hours per week.He has his new employees sign strict non-disclosure agreements and often fires in sprees, such as during the Model 3 \"production hell\" in 2018.In 2022, Musk revealed plans to fire 10 percent of Tesla's workforce, due to his concerns about the economy.That same month, he suspended remote work at SpaceX and Tesla and threatened to fire employees who do not work 40 hours per week in the office.Musk's leadership has been praised by some, who credit it with the success of Tesla and his other endeavors, and criticized by others, who see him as callous and his managerial decisions as \"show[ing] a lack of human understanding.\"The 2021 book Power Play contains anecdotes of Musk berating employees.\n",
-      "As a senior, he was selected to play in the 1981 McDonald's All-American Game and scored 30 points, after averaging 27 ppg, 12 rebounds (rpg), and six assists per game (apg) for the season.He was recruited by numerous college basketball programs, including Duke, North Carolina, South Carolina, Syracuse, and Virginia.In 1981, he accepted a basketball scholarship to the University of North Carolina at Chapel Hill, where he majored in cultural geography.\n",
-      "=== 2018 Joe Rogan podcast appearance ===\n",
-      "In 2018, Musk appeared on The Joe Rogan Experience podcast and discussed various topics for over two hours. During the interview, Musk sampled a puff from a cigar consisting, the host claimed, of tobacco laced with cannabis. Tesla stock dropped after the incident, which coincided with the confirmation of the departure of Tesla's vice president of worldwide finance earlier that day. Fortune wondered if the cannabis use could have ramifications for SpaceX contracts with the United States Air Force, though an Air Force spokesperson told The Verge that there was no investigation and that the Air Force was still determining the facts. In 2022, Musk claimed that he and other Space-X employees were subjected to random drug tests for about a year following the incident. In a 60 Minutes interview, Musk said of the incident: \"I do not smoke pot. As anybody who watched that podcast could tell, I have no idea how to smoke pot.\"\n",
-      "=== Private jet ===\n",
-      "\n",
-      "In 2003, Musk said his favorite plane he owned was an L-39 Albatros. He uses a private jet owned by Falcon Landing LLC, a SpaceX-linked company, and acquired a second jet in August 2020. His heavy use of the jet—it flew over 150,000 miles in 2018—and the consequent fossil fuel usage has received criticism.His flight usage is tracked on social media through ElonJet. The Twitter version of the account was blocked in December 2022, after Musk claimed that his son X AE A-XII had been harassed by a stalker after the account posted the airport at which his jet had landed. This led to Musk banning the ElonJet account on Twitter, as well as the accounts of journalists that posted stories regarding the incident, including Donie O'Sullivan, Keith Olbermann, and journalists from The New York Times, The Washington Post, CNN, and The Intercept. Musk equated the reporting to doxxing. The police do not believe there is a link between the account and alleged stalker. Musk later took a Twitter poll on whether the journalists' accounts should be reinstated, which resulted in reinstating the accounts.\n",
-      "\"Although Jordan was a well-rounded player, his \"Air Jordan\" image is also often credited with inadvertently decreasing the jump shooting skills, defense, and fundamentals of young players, a fact Jordan himself has lamented, saying: \"I think it was the exposure of Michael Jordan; the marketing of Michael Jordan.Everything was marketed towards the things that people wanted to see, which was scoring and dunking.That Michael Jordan still played defense and an all-around game, but it was never really publicized.\"During his heyday, Jordan did much to increase the status of the game; television ratings increased only during his time in the league.The popularity of the NBA in the U.S. declined after his last title.As late as 2022, NBA Finals television ratings had not returned to the level reached during his last championship-winning season.In August 2009, the Naismith Memorial Basketball Hall of Fame in Springfield, Massachusetts, opened a Michael Jordan exhibit that contained items from his college and NBA careers as well as from the 1992 \"Dream Team\"; the exhibit also has a batting baseball glove to signify Jordan's short career in the Minor League Baseball.\n",
-      "Jordan finished among the top three in regular season MVP voting 10 times.He was named one of the 50 Greatest Players in NBA History in 1996, and selected to the NBA 75th Anniversary Team in 2021.Jordan is one of only seven players in history to win an NCAA championship, an NBA championship, and an Olympic gold medal (doing so twice with the 1984 and 1992 U.S. men's basketball teams).Since 1976, the year of the ABA–NBA merger, Jordan and Pippen are the only two players to win six NBA Finals playing for one team.In the All-Star Game fan ballot, Jordan received the most votes nine times, more than any other player.Many of Jordan's contemporaries have said that Jordan is the greatest basketball player of all time.In 1999, an ESPN survey of journalists, athletes and other sports figures ranked Jordan the greatest North American athlete of the 20th century, above Babe Ruth and Muhammad Ali.Jordan placed second to Ruth in the Associated Press' December 1999 list of 20th century athletes.In addition, the Associated Press voted him the greatest basketball player of the 20th century.Jordan has also appeared on the front cover of Sports Illustrated a record 50 times.\n",
-      "James Jr. became command sergeant major of the 35th Signal Brigade of the U.S. Army's XVIII Airborne Corps and retired in 2006.In 1968, Jordan moved with his family to Wilmington, North Carolina.He attended Emsley A. Laney High School in Wilmington, where he highlighted his athletic career by playing basketball, baseball, and football.He tried out for the basketball varsity team during his sophomore year, but at a height of 5 feet 11 inches (1.80 m), he was deemed too short to play at that level.His taller friend Harvest Leroy Smith was the only sophomore to make the team.Motivated to prove his worth, Jordan became the star of Laney's junior varsity team and tallied some 40-point games.The following summer, he grew four inches (10 cm) and trained rigorously.Upon earning a spot on the varsity roster, he averaged more than 25 points per game (ppg) over his final two seasons of high school play.\n",
-      "23 retired by the Miami Heat\n",
-      "NBA MVP trophy renamed in Jordan's honor (\"Michael Jordan Trophy\") in 2022USA BasketballTwo-time Olympic gold medal winner – 1984, 1992\n",
-      "Tournament of the Americas gold medal winner – 1992\n",
-      "Pan American Games gold medal winner – 1983\n",
-      "Two-time USA Basketball Male Athlete of the Year – 1983, 1984NCAANCAA national championship – 1981–82\n",
-      "ACC Rookie of the Year – 1981–82\n",
-      "Two-time Consensus NCAA All-American First Team – 1982–83, 1983–84\n",
-      "ACC Men's Basketball Player of the Year – 1983–84\n",
-      "ACC Athlete of the Year – 1984\n",
-      "USBWA College Player of the Year – 1983–84\n",
-      "Naismith College Player of the Year – 1983–84\n",
-      "Adolph Rupp Trophy – 1983–84\n",
-      "John R. Wooden Award – 1983–84\n",
-      "Two-time Sporting News National Player of  the Year (1983, 1984)\n",
-      "No.\n",
-      "He spread misinformation about the virus, including promoting a widely discredited paper on the benefits of chloroquine and claiming that COVID-19 death statistics were inflated.In March 2020, Musk stated, \"The coronavirus panic is dumb.\"In an email to Tesla employees, Musk referred to COVID-19 as a \"specific form of the common cold\" and predicted that confirmed COVID-19 cases would not exceed 0.1% of the U.S. population.On March 19, 2020, Musk predicted that there would be \"probably close to zero new cases in [the U.S.] by end of April\".Politico labeled this statement one of \"the most audacious, confident, and spectacularly incorrect prognostications [of 2020]\".Musk also claimed falsely that children \"are essentially immune\" to COVID-19.Musk condemned COVID-19 lockdowns and initially refused to close the Tesla Fremont Factory in March 2020, defying the local shelter-in-place order.\n",
-      "Under Musk, Tesla has also constructed multiple lithium-ion battery and electric vehicle factories, named Gigafactories.Since its initial public offering in 2010, Tesla stock has risen significantly; it became the most valuable carmaker in summer 2020, and it entered the S&P 500 later that year.In October 2021, it reached a market capitalization of $1 trillion, the sixth company in U.S. history to do so.In November 2021, Musk proposed, on Twitter, to sell 10% of his Tesla stock, since \"much is made lately of unrealized gains being a means of tax avoidance\".After more than 3.5 million Twitter accounts supported the sale, Musk sold $6.9 billion of Tesla stock within a week, and a total of $16.4 billion by year end, reaching the 10% target.In February 2022, The Wall Street Journal reported that both Elon and Kimbal Musk were under investigation by the SEC for possible insider trading related to the sale.In 2022, Musk unveiled a robot developed by Tesla, Optimus.\n",
-      "During his rookie 1984–85 season with the Bulls, Jordan averaged 28.2 ppg on 51.5% shooting, and helped make a team that had won 35% of games in the previous three seasons playoff contenders.He quickly became a fan favorite even in opposing arenas.Roy S. Johnson of The New York Times described him as \"the phenomenal rookie of the Bulls\" in November, and Jordan appeared on the cover of Sports Illustrated with the heading \"A Star Is Born\" in December.The fans also voted in Jordan as an All-Star starter during his rookie season.Controversy arose before the 1985 NBA All-Star Game when word surfaced that several veteran players, led by Isiah Thomas, were upset by the amount of attention Jordan was receiving.This led to a so-called \"freeze-out\" on Jordan, where players refused to pass the ball to him throughout the game.The controversy left Jordan relatively unaffected when he returned to regular season play, and he would go on to be voted the NBA Rookie of the Year.\n",
-      "The acquisition was officially completed on October 27.Immediately after the acquisition, Musk fired several top Twitter executives including CEO Parag Agrawal; Musk became the CEO instead.He instituted a $7.99 monthly subscription for a \"blue check\", and laid off a significant portion of the company's staff.Musk lessened content moderation, and in December, Musk released internal documents relating to Twitter's moderation of Hunter Biden's laptop controversy in the leadup to the 2020 presidential election.The Southern Poverty Law Center noted that Twitter has verified numerous extremists, and a study of millions of tweets following the acquisition indicated that hate speech on the platform has become \"more visible\" under Musk's leadership.Within the first weeks of ownership, Musk made a series of decisions and changes that he quickly reversed, including the paid blue checkmark, creating an \"official\" label and forbidding linking to one's profiles on other social media platforms.Under Musk's management, Twitter experienced several large scale outages.In April 2022, The Washington Post reported that Musk privately claimed that supposed censorship on the platform, including the banning of accounts such as The Babylon Bee, had prompted him to begin the acquisition.\n",
-      "Musk also promoted a baseless theory relating to the attack of Speaker Nancy Pelosi's husband, but Musk deleted his tweet.Musk has praised China and has been described as having a close relationship with the Chinese government, allowing access to its markets for Tesla.After Gigafactory Shanghai produced its first batch of vehicles, Musk thanked the Chinese government and Chinese people while criticizing the United States and its people.: 207–208  In 2022, Musk wrote an article for China Cyberspace, the official publication of Cyberspace Administration of China, which enforces Internet censorship in China.His writing the article was described as conflicting with his advocacy for free speech.Musk later advocated for Taiwan to become a \"special administrative zone\" of China which drew cross-party criticism from Taiwanese lawmakers.In October 2022, Musk posted a Twitter poll and \"peace plan\" to resolve the Russian invasion of Ukraine.It was reported that Musk allegedly spoke with Russian President Vladimir Putin prior to the proposal, which Musk denied.\n",
-      "\n",
-      "\n",
-      "=== COVID-19 ===\n",
-      "Musk was criticized for his public comments and conduct related to the COVID-19 pandemic.\n",
-      "Jordan has had a long relationship with Gatorade, appearing in over 20 commercials for the company since 1991, including the \"Be Like Mike\" commercials in which a song was sung by children wishing to be like Jordan.Nike created a signature shoe for Jordan, called the Air Jordan, in 1984.One of Jordan's more popular commercials for the shoe involved Spike Lee playing the part of Mars Blackmon.In the commercials, Lee, as Blackmon, attempted to find the source of Jordan's abilities and became convinced that \"it's gotta be the shoes\".The hype and demand for the shoes even brought on a spate of \"shoe-jackings\", in which people were robbed of their sneakers at gunpoint.Subsequently, Nike spun off the Jordan line into its own division named the \"Jordan Brand\".The company features a list of athletes and celebrities as endorsers.The brand has also sponsored college sports programs such as those of North Carolina, UCLA, California, Oklahoma, Florida, Georgetown, and Marquette.Jordan also has been associated with the Looney Tunes cartoon characters.A Nike commercial shown during 1992's Super Bowl XXVI featured Jordan and Bugs Bunny playing basketball.\n",
-      "Accordingly, Musk has stated that artificial intelligence poses the greatest existential threat to humanity.He has warned of a \"Terminator-like\" AI apocalypse and suggested that the government should regulate its safe development.In 2015, Musk was a cosignatory, along with Stephen Hawking and hundreds of others, of the Open Letter on Artificial Intelligence, which called for the ban of autonomous weapons.Musk's AI stances have been called alarmist and sensationalist by critics such as computer scientist Yann LeCun and Meta CEO Mark Zuckerberg, and led the think tank Information Technology and Innovation Foundation to award Musk its Annual Luddite Award in 2016.Musk has described climate change as the greatest threat to humanity after AI, and has advocated for a carbon tax.Musk was a critic of President Donald Trump's stance on climate change, and resigned from two presidential business advisory councils following Trump's 2017 decision to withdraw the United States from the Paris Agreement.Musk has long promoted the colonization of Mars and argues that humanity should become a \"multiplanetary species\".He has suggested the use of nuclear weapons to terraform Mars.\n",
-      "In 2022, he acquired Twitter for $44 billion and subsequently merged the company into newly created X Corp. and rebranded the service as X the following year.In March 2023, he founded xAI, an artificial-intelligence company.Musk has expressed views that have made him a polarizing figure.He has been criticized for making unscientific and misleading statements, including that of spreading COVID-19 misinformation, and promoting conspiracy theories.His Twitter ownership has been similarly controversial, including letting off a large number of employees, an increase in hate speech on the platform and features such as Twitter Blue and the implementation of limits on the amount of viewable Tweets per day being criticized.In 2018, the U.S. Securities and Exchange Commission (SEC) sued him for falsely tweeting that he had secured funding for a private takeover of Tesla.To settle the case, Musk stepped down as the chairman of Tesla and paid a $20 million fine.\n",
-      "\n",
-      "\n",
-      "== Early life ==\n",
-      "\n",
-      "\n",
-      "=== Childhood and family ===\n",
-      "\n",
-      "Elon Reeve Musk was born on June 28, 1971, in Pretoria, one of South Africa's capital cities.Musk has British and Pennsylvania Dutch ancestry.\n",
-      "Jordan abruptly retired from basketball before the 1993–94 NBA season to play Minor League Baseball but returned to the Bulls in March 1995 and led them to three more championships in 1996, 1997, and 1998, as well as a then-record 72 regular season wins in the 1995–96 NBA season.He retired for the second time in January 1999 but returned for two more NBA seasons from 2001 to 2003 as a member of the Washington Wizards.During the course of his professional career, he was also selected to play for the United States national team, winning four gold medals—at the 1983 Pan American Games, 1984 Summer Olympics, 1992 Tournament of the Americas and 1992 Summer Olympics—while also being undefeated.Jordan's individual accolades and accomplishments include six NBA Finals Most Valuable Player (MVP) awards, ten NBA scoring titles (both all-time records), five NBA MVP awards, ten All-NBA First Team designations, nine All-Defensive First Team honors, fourteen NBA All-Star Game selections, three NBA All-Star Game MVP awards, three NBA steals titles, and the 1988 NBA Defensive Player of the Year Award.\n",
-      "In May 2020, he reopened the Tesla factory, defying the local stay-at-home order, and warned workers that they would be unpaid, and their unemployment benefits might be jeopardized, if they did not report to work.In December 2022, Musk called for prosecution of former National Institute of Allergy and Infectious Diseases director Anthony Fauci.In March 2020, Musk promised that Tesla would make ventilators for COVID-19 patients if there were a shortage.After figures like New York City mayor Bill de Blasio responded to Musk's offer, Musk offered to donate ventilators which Tesla would build or buy from a third party.However, Musk ended up buying and donating BiPAP and CPAP machines, which are devices that support respirations of someone able to breathe on their own, rather than the much more expensive and sought-after mechanical ventilator machines that are able to breathe for a patient entirely.In September 2020, Musk stated that he would not get the COVID-19 vaccine, because he and his children were \"not at risk for COVID\".\n",
-      "Broadcaster Al Michaels said that he was able to read baseball box scores on a 27-inch (69 cm) television clearly from about 50 feet (15 m) away.During the 2001 NBA Finals, Phil Jackson compared Jordan's dominance to Shaquille O'Neal, stating: \"Michael would get fouled on every play and still have to play through it and just clear himself for shots instead and would rise to that occasion.\"\n",
-      "\n",
-      "\n",
-      "== Legacy ==\n",
-      "Jordan's talent was clear from his first NBA season; by November 1984, he was being compared to Julius Erving.Larry Bird said that rookie Jordan was the best player he ever saw, and that he was \"one of a kind\", and comparable to Wayne Gretzky as an athlete.In his first game in Madison Square Garden against the New York Knicks, Jordan received a near minute-long standing ovation.After establishing the single game playoff record of 63 points against the Boston Celtics on April 20, 1986, Bird described him as \"God disguised as Michael Jordan\".Jordan led the NBA in scoring in 10 seasons (NBA record) and tied Wilt Chamberlain's record of seven consecutive scoring titles.\n",
-      "=== Twitter ===\n",
-      "\n",
-      "Musk expressed interest in buying Twitter as early as 2017, and had previously questioned the platform's commitment to freedom of speech.In January 2022, Musk started purchasing Twitter shares, reaching a 9.2% stake by April, making him the largest shareholder.When this was publicly disclosed, Twitter shares experienced the largest intraday price surge since the company's 2013 IPO.On April 4, Musk agreed to a deal that would appoint him to Twitter's board of directors and prohibit him from acquiring more than 14.9% of the company.However, on April 13, Musk made a $43 billion offer to buy Twitter, launching a takeover bid to buy 100% of Twitter's stock at $54.20 per share.In response, Twitter's board adopted a \"poison pill\" shareholder rights plan to make it more expensive for any single investor to own more than 15% of the company without board approval.Nevertheless, by the end of the month Musk had successfully concluded his bid for approximately $44 billion.\n",
-      "In his 1998 autobiography For the Love of the Game, Jordan wrote that he was preparing for retirement as early as the summer of 1992.The added exhaustion due to the \"Dream Team\" run in the 1992 Summer Olympics solidified Jordan's feelings about the game and his ever-growing celebrity status.Jordan's announcement sent shock waves throughout the NBA and appeared on the front pages of newspapers around the world.Jordan further surprised the sports world by signing a Minor League Baseball contract with the Chicago White Sox on February 7, 1994.He reported to spring training in Sarasota, Florida, and was assigned to the team's minor league system on March 31, 1994.Jordan said that this decision was made to pursue the dream of his late father, who always envisioned his son as a Major League Baseball player.The White Sox were owned by Bulls owner Jerry Reinsdorf, who continued to honor Jordan's basketball contract during the years he played baseball.In 1994, Jordan played for the Birmingham Barons, a Double-A minor league affiliate of the Chicago White Sox, batting .202 with three home runs, 51 runs batted in, 30 stolen bases, 114 strikeouts, 51 bases on balls, and 11 errors.\n",
-      "As of 2019, he has raised more than $5 million for the Make-A-Wish Foundation.In 2023, Jordan donated $10 million to the organization for his 60th birthday.In 2015, Jordan donated a settlement of undisclosed size from a lawsuit against supermarkets that had used his name without permission to 23 different Chicago charities.In 2017, Jordan funded two Novant Health Michael Jordan Family Clinics in Charlotte, North Carolina, by giving $7 million, the biggest donation he had made at the time.In 2018, after Hurricane Florence damaged parts of North Carolina, including his former hometown of Wilmington, Jordan donated $2 million to relief efforts.He gave $1 million to aid the Bahamas' recovery following Hurricane Dorian in 2019.On June 5, 2020, in the wake of the protests following the murder of George Floyd, Jordan and his brand announced in a joint statement that they would be donating $100 million over the next 10 years to organizations dedicated to \"ensuring racial equality, social justice and greater access to education\".In February 2021, Jordan funded two Novant Health Michael Jordan Family Clinics in New Hanover County, North Carolina, by giving $10 million.\n",
-      "Jordan was undefeated in the four tournaments he played for the United States national team, winning all 30 games he took part in.\n",
-      "\n",
-      "\n",
-      "== Player profile ==\n",
-      "Jordan was a shooting guard who could also play as a small forward, the position he would primarily play during his second return to professional basketball with the Washington Wizards, and as a point guard.Jordan was known throughout his career as a strong clutch performer.With the Bulls, he decided 25 games with field goals or free throws in the last 30 seconds, including two NBA Finals games and five other playoff contests.His competitiveness was visible in his prolific trash talk and well-known work ethic.Jordan often used perceived slights to fuel his performances.Sportswriter Wright Thompson described him as \"a killer, in the Darwinian sense of the word, immediately sensing and attacking someone's weakest spot\".As the Bulls organization built the franchise around Jordan, management had to trade away players who were not \"tough enough\" to compete with him in practice.To help improve his defense, he spent extra hours studying film of opponents.\n",
-      "== National team career ==\n",
-      "Jordan made his debut for the U.S. national basketball team at the 1983 Pan American Games in Caracas, Venezuela.He led the team in scoring with 17.3 ppg as the U.S., coached by Jack Hartman, won the gold medal in the competition.A year later, he won another gold medal in the 1984 Summer Olympics.The 1984 U.S. team was coached by Bob Knight and featured players such as Patrick Ewing, Sam Perkins, Chris Mullin, Steve Alford, and Wayman Tisdale.Jordan led the team in scoring, averaging 17.1 ppg for the tournament.In 1992, Jordan was a member of the star-studded squad that was dubbed the \"Dream Team\", which included Larry Bird and Magic Johnson.The team went on to win two gold medals: the first one in the 1992 Tournament of the Americas, and the second one in the 1992 Summer Olympics.He was the only player to start all eight games in the Olympics, averaged 14.9 ppg, and finished second on the team in scoring.\n",
-      "In 2020, SpaceX launched its first crewed flight, the Demo-2, becoming the first private company to place astronauts into orbit and dock a crewed spacecraft with the ISS.\n",
-      "\n",
-      "\n",
-      "==== Starlink ====\n",
-      "\n",
-      "In 2015, SpaceX began development of the Starlink constellation of low-Earth-orbit satellites to provide satellite Internet access, with the first two prototype satellites launched in February 2018.A second set of test satellites, and the first large deployment of a piece of the constellation, occurred in May 2019, when the first 60 operational satellites were launched.The total cost of the decade-long project to design, build, and deploy the constellation is estimated by SpaceX to be about $10 billion.Some critics, including the International Astronomical Union, have alleged that Starlink blocks the view of the sky and poses a collision threat to spacecraft.During the Russian invasion of Ukraine, Musk sent Starlink terminals to Ukraine to provide Internet access and communication.However, Musk refused to block Russian state media on Starlink, declaring himself \"a free speech absolutist\".\n",
-      "During the season, Sam Vincent, Chicago's point guard, was having trouble running the offense, and Jordan expressed his frustration with head coach Doug Collins, who would put Jordan at point guard.In his time as a point guard, Jordan averaged 10 triple-doubles in eleven games, with 33.6 ppg, 11.4 rpg, 10.8 apg, 2.9 spg, and 0.8 bpg on 51% shooting.The Bulls finished with a 47–35 record, and advanced to the Eastern Conference Finals, defeating the Cavaliers and New York Knicks along the way.The Cavaliers series included a career highlight for Jordan when he hit \"The Shot\" over Craig Ehlo at the buzzer in the fifth and final game of the series.\n",
-      "On June 20, 2023, Musk met with Indian Prime Minister Narendra Modi in New York City, suggesting that he might be interested in investing in India \"as soon as humanly possible\".\n",
-      "\n",
-      "\n",
-      "==== SEC and shareholder lawsuits regarding tweets ====\n",
-      "In 2018, Musk was sued by the SEC for a tweet claiming that funding had been secured for potentially taking Tesla private.The lawsuit characterized the tweet as false, misleading, and damaging to investors, and sought to bar Musk from serving as CEO of publicly traded companies.Two days later, Musk settled with the SEC, without admitting or denying the SEC's allegations.As a result, Musk and Tesla were fined $20 million each, and Musk was forced to step down for three years as Tesla chairman but was able to remain as CEO.Musk has stated in interviews that he does not regret posting the tweet that triggered the SEC investigation.In April 2022, the shareholder who sued Musk over the tweet, along with several Tesla shareholders, said that a federal judge had ruled that the tweet was false, although the ruling in question has not been unsealed.\n",
-      "At age ten, he developed an interest in computing and video games, teaching himself how to program from the VIC-20 user manual.At age twelve, he sold his BASIC-based game Blastar to PC and Office Technology magazine for approximately $500.\n",
-      "\n",
-      "\n",
-      "=== Education ===\n",
-      "Musk attended Waterkloof House Preparatory School, Bryanston High School, and Pretoria Boys High School, from where he graduated.Musk applied for a Canadian passport through his Canadian-born mother, knowing that it would be easier to immigrate to the United States this way.While waiting for his application to be processed, he attended the University of Pretoria for five months.Musk arrived in Canada in June 1989 and lived with a second cousin in Saskatchewan for a year, working odd jobs at a farm and lumber mill.In 1990, he entered Queen's University in Kingston, Ontario.Two years later, he transferred to the University of Pennsylvania (UPenn), where he completed studies for a Bachelor of Arts degree in physics and a Bachelor of Science degree in economics from the Wharton School.Although Musk claims he earned the degrees in 1995, UPenn maintains it awarded them in 1997.\n",
-      "== Further reading ==\n",
-      "Leahy, Michael (2004). When Nothing Else Matters: Michael Jordan's Last Comeback. Simon & Schuster. ISBN 978-0-7432-7648-1.\n",
-      "McGovern, Mike (2005). Michael Jordan: Basketball Player. Ferguson. ISBN 978-0-8160-5876-1.\n",
-      "\n",
-      "\n",
-      "== External links ==\n",
-      "\n",
-      "Career statistics and player information from NBA.com and Basketball-Reference.com\n",
-      "Michael Jordan at the Naismith Memorial Basketball Hall of Fame\n",
-      "Michael Jordan at Curlie\n",
-      "Career statistics and player information from Baseball Reference (Minors)\n",
-      "Michael Jordan Career Retrospective on YouTube\n",
-      "Michael Jordan at IMDb\n",
-      "\"Jordan archives\". Chicago Tribune. Archived from the original on June 5, 1997. Retrieved April 29, 2020.\n",
-      "He was also a fixture of the NBA All-Defensive First Team, making the roster nine times (NBA record shared with Gary Payton, Kevin Garnett, and Kobe Bryant).Jordan also holds the top career regular season and playoff scoring averages of 30.1 and 33.4 ppg, respectively.By 1998, the season of his Finals-winning shot against the Jazz, he was well known throughout the league as a clutch performer.In the regular season, Jordan was the Bulls' primary threat in the final seconds of a close game and in the playoffs; he would always ask for the ball at crunch time.Jordan's total of 5,987 points in the playoffs is the second-highest among NBA career playoff scoring leaders.He scored 32,292 points in the regular season, placing him fifth on the NBA all-time scoring list behind LeBron James, Kareem Abdul-Jabbar, Karl Malone, and Bryant.With five regular season MVPs (tied for second place with Bill Russell—only Abdul-Jabbar has won more, with six), six Finals MVPs (NBA record), and three NBA All-Star Game MVPs, Jordan is the most decorated player in NBA history.\n",
-      "His strikeout total led the team and his games played tied for the team lead.His 30 stolen bases were second on the team only to Doug Brady.He also appeared for the Scottsdale Scorpions in the 1994 Arizona Fall League, batting .252 against the top prospects in baseball.On November 1, 1994, his No.23 was retired by the Bulls in a ceremony that included the erection of a permanent sculpture known as The Spirit outside the new United Center.\n",
-      "\n",
-      "\n",
-      "==== \"I'm back\": Return to the NBA (1995) ====\n",
-      "The Bulls went 55–27 in 1993–94 without Jordan in the lineup and lost to the New York Knicks in the second round of the playoffs.The 1994–95 Bulls were a shell of the championship team of just two years earlier.Struggling at mid-season to ensure a spot in the playoffs, Chicago was 31–31 at one point in mid-March; the team received help when Jordan decided to return to the Bulls.In March 1995, Jordan decided to quit baseball because he feared he might become a replacement player during the Major League Baseball strike.\n",
-      "Though the rocket failed to reach Earth orbit, it was awarded a Commercial Orbital Transportation Services program contract from NASA Administrator (and former SpaceX consultant) Mike Griffin later that year.After two more failed attempts that nearly caused Musk and his companies to go bankrupt, SpaceX succeeded in launching the Falcon 1 into orbit in 2008.Later that year, SpaceX received a $1.6 billion Commercial Resupply Services contract from NASA for 12 flights of its Falcon 9 rocket and Dragon spacecraft to the International Space Station, replacing the Space Shuttle after its 2011 retirement.In 2012, the Dragon vehicle docked with the ISS, a first for a commercial spacecraft.Working towards its goal of reusable rockets, in 2015 SpaceX successfully landed the first stage of a Falcon 9 on an inland platform.Later landings were achieved on autonomous spaceport drone ships, an ocean-based recovery platform.In 2018, SpaceX launched the Falcon Heavy; the inaugural mission carried Musk's personal Tesla Roadster as a dummy payload.Since 2019, SpaceX has been developing Starship, a fully-reusable, super-heavy-lift launch vehicle intended to replace the Falcon 9 and the Falcon Heavy.\n",
-      "At the 2003 All-Star Game, Jordan was offered a starting spot from Tracy McGrady and Allen Iverson but refused both; in the end, he accepted the spot of Vince Carter.Jordan played in his final NBA game on April 16, 2003, in Philadelphia.After scoring 13 points in the game, Jordan went to the bench with 4 minutes and 13 seconds remaining in the third quarter and his team trailing the Philadelphia 76ers 75–56.Just after the start of the fourth quarter, the First Union Center crowd began chanting \"We want Mike!\"After much encouragement from coach Doug Collins, Jordan finally rose from the bench and re-entered the game, replacing Larry Hughes with 2:35 remaining.At 1:45, Jordan was intentionally fouled by the 76ers' Eric Snow, and stepped to the line to make both free throws.After the second foul shot, the 76ers in-bounded the ball to rookie John Salmons, who in turn was intentionally fouled by Bobby Simmons one second later, stopping time so that Jordan could return to the bench.Jordan received a three-minute standing ovation from his teammates, his opponents, the officials, and the crowd of 21,257 fans.\n",
-      "==== SolarCity and Tesla Energy ====\n",
-      "\n",
-      "Musk provided the initial concept and financial capital for SolarCity, which his cousins Lyndon and Peter Rive founded in 2006. By 2013, SolarCity was the second largest provider of solar power systems in the United States. In 2014, Musk promoted the idea of SolarCity building an advanced production facility in Buffalo, New York, triple the size of the largest solar plant in the United States. Construction of the factory started in 2014 and was completed in 2017. It operated as a joint venture with Panasonic until early 2020.Tesla acquired SolarCity for over $2 billion in 2016 and merged it with its battery unit to create Tesla Energy. The deal's announcement resulted in a more than 10% drop in Tesla's stock price. At the time, SolarCity was facing liquidity issues. Multiple shareholder groups filed a lawsuit against Musk and Tesla's directors, claiming that the purchase of SolarCity was done solely to benefit Musk and came at the expense of Tesla and its shareholders. Tesla directors settled the lawsuit in January 2020, leaving Musk the sole remaining defendant. Two years later, the court ruled in Musk's favor.\n",
-      "In December 2022, the NBA unveiled a new MVP trophy, named in Jordan's honor, to be awarded beginning with the 2022–23 season.The \"Michael Jordan Trophy\" will replace the original trophy, named in honor of former NBA commissioner Maurice Podoloff, with a new Podoloff Trophy set to be awarded to the team with the best overall regular season record.\n",
-      "\n",
-      "\n",
-      "== NBA career statistics ==\n",
-      "\n",
-      "\n",
-      "=== Regular season ===\n",
-      "\n",
-      "\n",
-      "=== Playoffs ===\n",
-      "\n",
-      "\n",
-      "== Awards and honors ==\n",
-      "\n",
-      "NBASix-time NBA champion – 1991, 1992, 1993, 1996, 1997, 1998\n",
-      "Six-time NBA Finals MVP – 1991, 1992, 1993, 1996, 1997, 1998\n",
-      "Five-time NBA MVP – 1988, 1991, 1992, 1996, 1998\n",
-      "NBA Defensive Player of the Year – 1987–88\n",
-      "NBA Rookie of the Year – 1984–85\n",
-      "10-time NBA scoring leader – 1987–1993, 1996–1998\n",
-      "Three-time NBA steals leader – 1988, 1990, 1993\n",
-      "14-time NBA All-Star – 1985–1993, 1996–1998, 2002,\n",
-      "Consequently, Tesla's 2021 announcement, against the backdrop of Musk's social media behavior, that it bought $1.5 billion worth of Bitcoin, raised questions.Tesla's announcement that it would accept Bitcoin for payment was criticized by environmentalists and investors, due to the environmental impact of cryptocurrency mining.A few months later, in response to the criticism, Musk announced on Twitter that Tesla would no longer accept payments in Bitcoin and would not engage in any Bitcoin transactions until the environmental issues are solved.Despite the Boring Company's involvement in building mass transit infrastructure, Musk has criticized public transport and promoted individualized transport (private vehicles).His comments have been called \"elitist\" and have sparked widespread criticism from both transportation and urban planning experts, who have pointed out that public transportation in dense urban areas is more economical, more energy efficient, and requires much less space than private cars.\n",
-      "Musk assumed leadership of the company as CEO and product architect in 2008.A 2009 lawsuit settlement with Eberhard designated Musk as a Tesla co-founder, along with Tarpenning and two others.As of 2019, Musk was the longest-tenured CEO of any automotive manufacturer globally.In 2021, Musk nominally changed his title to \"Technoking\" while retaining his position as CEO.Tesla began delivery of an electric sports car, the Roadster, in 2008.With sales of about 2,500 vehicles, it was the first serial production all-electric car to use lithium-ion battery cells.Tesla began delivery of its four-door Model S sedan in 2012.A cross-over, the Model X was launched in 2015.A mass-market sedan, the Model 3, was released in 2017.The Model 3 is the all-time bestselling plug-in electric car worldwide, and in June 2021 it became the first electric car to sell 1 million units globally.A fifth vehicle, the Model Y crossover, was launched in 2020.The Cybertruck, an all-electric pickup truck, was unveiled in 2019.\n",
-      "Perhaps the best-known moment of the series came in Game 2 when, attempting a dunk, Jordan avoided a potential Sam Perkins block by switching the ball from his right hand to his left in mid-air to lay the shot into the basket.In his first Finals appearance, Jordan had 31.2 ppg on 56% shooting from the field, 11.4 apg, 6.6 rpg, 2.8 spg, and 1.4 bpg.Jordan won his first NBA Finals MVP award, and he cried while holding the Finals trophy.Jordan and the Bulls continued their dominance in the 1991–92 season, establishing a 67–15 record, topping their franchise record from the 1990–91 campaign.Jordan won his second consecutive MVP award with averages of 30.1 ppg, 6.4 rbg, and 6.1 apg on 52% shooting.After winning a physical seven-game series over the New York Knicks in the second round of the playoffs and finishing off the Cleveland Cavaliers in the Conference Finals in six games, the Bulls met Clyde Drexler and the Portland Trail Blazers in the Finals.\n",
-      "On April 20 at the Boston Garden, in Game 2 of the First Round, a 135–131 double overtime loss to the eventual NBA Champion Boston Celtics, Jordan scored a playoff career-high 63 points, breaking Elgin Baylor’s single-game playoff scoring record.A Celtics team that is often considered one of the greatest in NBA history swept the series in three games.Jordan completely recovered in time for the 1986–87 season, and had one of the most prolific scoring seasons in NBA history; he became the only player other than Wilt Chamberlain to score 3,000 points in a season, averaging a league-high 37.1 ppg on 48.2% shooting.In addition, Jordan demonstrated his defensive prowess, as he became the first player in NBA history to record 200 steals and 100 blocked shots in a season.Despite Jordan's success, Magic Johnson won the NBA Most Valuable Player Award.The Bulls reached 40 wins, and advanced to the playoffs for the third consecutive year but were again swept by the Celtics.\n",
-      "The Wall Street Journal reported that, after Musk insisted on branding his vehicles as \"self-driving\", he faced criticism from his engineers for putting customer \"lives at risk\", with some employees resigning in consequence.\n",
-      "\n",
-      "\n",
-      "== Other activities ==\n",
-      "\n",
-      "\n",
-      "=== Musk Foundation ===\n",
-      "Musk is president of the Musk Foundation he founded in 2001, whose stated purpose is to provide solar-power energy systems in disaster areas; support research, development, and advocacy (for interests including human space exploration, pediatrics, renewable energy and \"safe artificial intelligence\"); and support science and engineering educational efforts.From 2002 to 2018, the foundation gave $25 million directly to non-profit organizations, nearly half of which went to Musk's OpenAI, which was a non-profit at the time.Since 2002, the foundation has made over 350 donations.Around half of them were made to scientific research or education nonprofits.Notable beneficiaries include the Wikimedia Foundation, his alma mater the University of Pennsylvania, and his brother Kimbal's non-profit Big Green.In 2012, Musk took the Giving Pledge, thereby committing to give the majority of his wealth to charitable causes either during his lifetime or in his will.\n",
-      "He envisioned establishing a direct democracy on Mars, with a system in which more votes would be required to create laws than remove them.Musk has also voiced concerns about human population decline, saying that \"Mars has zero human population.We need a lot of people to become a multiplanet civilization.\"Speaking at The Wall Street Journal's CEO Council session in 2021, Musk stated that a declining birth rate, and consequent population decline, is one of the biggest risks to human civilization.\n",
-      "\n",
-      "\n",
-      "=== Politics ===\n",
-      "\n",
-      "While often described as libertarian, Musk has called himself \"politically moderate\" and was a registered independent voter when he lived in California.The New York Times wrote that Musk \"expresses views that don't fit neatly into [the American] binary, left-right political framework\".Historically, Musk has donated to both Democrats and Republicans, many of whom are in states in which he has a vested interest.Beginning in the late 2010s, Musk's political contributions have shifted to almost entirely supporting Republicans.Musk voted for Hillary Clinton in the 2016 U.S. presidential election.In the 2020 Democratic Party presidential primaries, Musk endorsed candidate Andrew Yang and expressed support for his proposed universal basic income.\n",
-      "With 10 seconds remaining, Jordan started to dribble right, then crossed over to his left, possibly pushing off Russell, although the officials did not call a foul.With 5.2 seconds left, Jordan made the climactic shot of his Bulls career, a top-key jumper over a stumbling Russell to give Chicago an 87–86 lead.Afterwards, the Jazz' John Stockton narrowly missed a game-winning three-pointer, and the buzzer sounded as Jordan and the Bulls won their sixth NBA championship, achieving a second three-peat in the decade.Once again, Jordan was voted Finals MVP, having led all scorers by averaging 33.5 ppg, including 45 in the deciding Game 6.Jordan's six Finals MVPs is a record.The 1998 Finals holds the highest television rating of any Finals series in history, and Game 6 holds the highest television rating of any game in NBA history.\n",
-      "\n",
-      "\n",
-      "==== Second retirement (1999–2001) ====\n",
-      "With Phil Jackson's contract expiring, the pending departures of Scottie Pippen and Dennis Rodman looming, and being in the latter stages of an owner-induced lockout of NBA players, Jordan retired for the second time on January 13, 1999.\n",
-      "On January 19, 2000, Jordan returned to the NBA not as a player but as part owner and president of basketball operations for the Washington Wizards.Jordan's responsibilities with the Wizards were comprehensive, as he controlled all aspects of the Wizards' basketball operations, and had the final say in all personnel matters; opinions of Jordan as a basketball executive were mixed.He managed to purge the team of several highly paid, unpopular players (like forward Juwan Howard and point guard Rod Strickland) but used the first pick in the 2001 NBA draft to select high school student Kwame Brown, who did not live up to expectations and was traded away after four seasons.Despite his January 1999 claim that he was \"99.9% certain\" he would never play another NBA game, Jordan expressed interest in making another comeback in the summer of 2001, this time with his new team.Inspired by the NHL comeback of his friend Mario Lemieux the previous winter, Jordan spent much of the spring and summer of 2001 in training, holding several invitation-only camps for NBA players in Chicago.\n",
-      "In February 2023, the jury found Musk and Tesla not liable.In 2019, Musk stated in a tweet that Tesla would build half a million cars that year.The SEC reacted to Musk's tweet by filing in court, asking the court to hold him in contempt for violating the terms of a settlement agreement with such a tweet; the accusation was disputed by Musk.This was eventually settled by a joint agreement between Musk and the SEC clarifying the previous agreement details.The agreement included a list of topics that Musk would need preclearance before tweeting about.In 2020, a judge prevented a lawsuit from proceeding that claimed a tweet by Musk regarding Tesla stock price (\"too high imo\") violated the agreement.FOIA-released records showed that the SEC itself concluded Musk has subsequently violated the agreement twice by tweeting regarding \"Tesla's solar roof production volumes and its stock price\".\n",
-      "The Bulls won the Eastern Conference Championship for a third straight season, including surviving a seven-game series with the Indiana Pacers in the Eastern Conference Finals; it was the first time Jordan had played in a Game 7 since the 1992 Eastern Conference Semifinals with the New York Knicks.After winning, they moved on for a rematch with the Jazz in the Finals.The Bulls returned to the Delta Center for Game 6 on June 14, 1998, leading the series 3–2.Jordan executed a series of plays, considered to be one of the greatest clutch performances in NBA Finals history.With 41.9 seconds remaining and the Bulls trailing 86–83, Phil Jackson called a timeout.When play resumed, Jordan received the inbound pass, drove to the basket, and sank a shot over several Jazz defenders, cutting Utah's lead to 86–85.The Jazz brought the ball upcourt and passed the ball to Malone, who was set up in the low post and was being guarded by Rodman.Malone jostled with Rodman and caught the pass, but Jordan cut behind him and stole the ball out of his hands.Jordan then dribbled down the court and paused, eyeing his defender, Jazz guard Bryon Russell.\n",
-      "== Post-retirement ==\n",
-      "After his third retirement, Jordan assumed that he would be able to return to his front office position as Director of Basketball Operations with the Wizards. His previous tenure in the Wizards' front office had produced mixed results and may have also influenced the trade of Richard \"Rip\" Hamilton for Jerry Stackhouse, although Jordan was not technically Director of Basketball Operations in 2002. On May 7, 2003, Wizards owner Abe Pollin fired Jordan as the team's president of basketball operations. Jordan later stated that he felt betrayed, and that if he had known he would be fired upon retiring, he never would have come back to play for the Wizards.Jordan kept busy over the next few years. He stayed in shape, played golf in celebrity charity tournaments, and spent time with his family in Chicago. He also promoted his Jordan Brand clothing line and rode motorcycles. Since 2004, Jordan has owned Michael Jordan Motorsports, a professional closed-course motorcycle road racing team that competed with two Suzukis in the premier Superbike championship sanctioned by the American Motorcyclist Association (AMA) until the end of the 2013 season.\n",
-      "Notably, Tesla generates some of its revenue from its sales of carbon credits granted to the company, by both the European Union Emissions Trading System and the Chinese national carbon trading scheme.Musk, a longtime opponent of short-selling, has repeatedly criticized the practice and argued it should be illegal.Wired magazine speculated that Musk's opposition to short-selling stems from how short sellers have an incentive to find and promote unfavorable information about his companies.In early 2021, he encouraged the GameStop short squeeze.In December 2022, Musk sold $3.6 billion of his stock in Tesla, equal to 22 million shares in the company, despite pledging earlier in the year that he would not sell any additional shares.\n",
-      "\n",
-      "\n",
-      "=== Technology ===\n",
-      "Musk has promoted cryptocurrencies and supports them over traditional government-issued fiat currencies.Given the influence of Musk's tweets in moving cryptocurrency markets, his statements about cryptocurrencies have been viewed as market manipulation by some, such as economist Nouriel Roubini.Musk's social media praising of Bitcoin and Dogecoin was credited for increasing their prices.\n",
-      "On March 18, 1995, Jordan announced his return to the NBA through a two-word press release: \"I'm back.\"The next day, Jordan took to the court with the Bulls to face the Indiana Pacers in Indianapolis, scoring 19 points.The game had the highest Nielsen rating of any regular season NBA game since 1975.Although he could have worn his original number even though the Bulls retired it, Jordan wore No.45, his baseball number.Despite his eighteen-month hiatus from the NBA, Jordan played well, making a game-winning jump shot against Atlanta in his fourth game back.He scored 55 points in his next game, against the New York Knicks at Madison Square Garden on March 28, 1995.Boosted by Jordan's comeback, the Bulls went 13–4 to make the playoffs and advanced to the Eastern Conference Semifinals against the Orlando Magic.At the end of Game 1, Orlando's Nick Anderson stripped Jordan from behind, leading to the game-winning basket for the Magic; he later commented that Jordan \"didn't look like the old Michael Jordan\", and said that \"No.45 doesn't explode like No.\n",
-      "That team included Karl Malone, who had beaten Jordan for the NBA MVP award in a tight race (986–957).The series against the Jazz featured two of the more memorable clutch moments of Jordan's career.He won Game 1 for the Bulls with a buzzer-beating jump shot.In Game 5, with the series tied at 2, Jordan played despite being feverish and dehydrated from a stomach virus.In what is known as \"The Flu Game\", Jordan scored 38 points, including the game-deciding 3-pointer with 25 seconds remaining.The Bulls won 90–88 and went on to win the series in six games.For the fifth time in as many Finals appearances, Jordan received the Finals MVP award.During the 1997 NBA All-Star Game, Jordan posted the first triple-double in All-Star Game history in a victorious effort, but the MVP award went to Glen Rice.Jordan and the Bulls compiled a 62–20 record in the 1997–98 season.Jordan led the league with 28.7 ppg, securing his fifth regular season MVP award, plus honors for All-NBA First Team, First Defensive Team, and the All-Star Game MVP.\n",
-      "The team closed out the season with a 23-game losing streak; their .106 winning percentage was the worst in NBA history.Before the next season, Jordan said: \"I'm not real happy about the record book scenario last year.It's very, very frustrating.\"During the 2019 NBA offseason, Jordan sold a minority piece of the Hornets to Gabe Plotkin and Daniel Sundheim, retaining the majority of the team for himself, as well as the role of chairman.In 2023, Jordan finalized the sale of his majority stake of the team to Gabe Plotkin and Rick Schnall, ending his 13-year tenure as majority owner of the Hornets, although he is keeping a minority stake, The sale was officially completed in August 2023 for approximately $3 billion, more than 10 times the $275 million Jordan had paid for the team.\n",
-      "During the demonstration, Musk revealed a pig with a Neuralink implant that tracked neural activity related to smell.In 2022, Neuralink announced that clinical trials would begin by the end of the year.Neuralink has conducted further animal testing on macaque monkeys at the University of California, Davis' Primate Research Center.In 2021, the company released a video in which a Macaque played the video game Pong via a Neuralink implant.The company's animal trials—which have caused the deaths of some monkeys—have led to claims of animal cruelty.The Physicians Committee for Responsible Medicine has alleged that Neuralink's animal trials have violated the Animal Welfare Act.Employees have complained that pressure from Musk to accelerate development has led to botched experiments and unnecessary animal deaths.In 2022, a federal probe was launched into possible animal welfare violations by Neuralink.\n",
-      "=== Neuralink ===\n",
-      "\n",
-      "In 2016, Musk co-founded Neuralink, a neurotechnology startup company, with an investment of $100 million.Neuralink aims to integrate the human brain with artificial intelligence (AI) by creating devices that are embedded in the brain to facilitate its merging with machines.Such technology could enhance memory or allow the devices to communicate with software.The company also hopes to develop devices with which to treat neurological conditions such as Alzheimer's disease, dementia, and spinal cord injuries.In 2019, Musk announced work on a device akin to a sewing machine that could embed threads into a human brain.He is listed as the sole author of an October 2019 paper that details some of Neuralink's research, although Musk's being listed as such rankled the Neuralink team's researchers.At a 2020 live demonstration, Musk described one of their early devices as \"a Fitbit in your skull\" that could soon cure paralysis, deafness, blindness, and other disabilities.Many neuroscientists and publications criticized these claims, with MIT Technology Review describing them as \"highly speculative\" and \"neuroscience theater\".\n",
-      "Despite media criticism by some as a selfish player early in his career, Jordan was willing to defer to this teammates, with a career average of 5.3 apg and a season-high of 8.0 apg.For a guard, Jordan was also a good rebounder, finishing with 6.2 rpg.Defensively, he averaged 2.3 spg and 0.8 bpg.Three-point field goal was not Jordan's strength, especially in his early years.Later on in Jordan's career, he improved his three-point shooting, and finished his career with a respectable 32% success rate.His three-point field-goal percentages ranged from 35% to 43% in seasons in which he attempted at least 230 three-pointers between 1989–90 and 1996–97.\n",
-      "He has endowed prizes at the X Prize Foundation, including $100 million to reward improved carbon capture technology.Vox said \"the Musk Foundation is almost entertaining in its simplicity and yet is strikingly opaque\", noting that its website was only 33 words in plain-text.The foundation has been criticized for the relatively small amount of wealth donated.In 2020, Forbes gave Musk a philanthropy score of 1, because he had given away less than 1% of his net worth.In November 2021, Musk donated $5.7 billion of Tesla's shares to charity, according to regulatory filings.However, Bloomberg News noted that all of it went to his own foundation, bringing Musk Foundation's assets up to $9.4 billion at the end of 2021.The foundation disbursed $160 million to non-profits that year.\n",
-      "\n",
-      "\n",
-      "=== Hyperloop ===\n",
-      "\n",
-      "In 2013, Musk announced plans for a version of a vactrain—a vacuum tube train—and assigned a dozen engineers from SpaceX and Tesla to establish the conceptual foundations and create initial designs.Later that year, Musk unveiled the concept, which he dubbed the hyperloop.\n",
-      "==== First three-peat (1991–1993) ====\n",
-      "In the 1990–91 season, Jordan won his second MVP award after averaging 31.5 ppg on 53.9% shooting, 6.0 rpg, and 5.5 apg for the regular season.The Bulls finished in first place in their division for the first time in sixteen years and set a franchise record with 61 wins in the regular season.With Scottie Pippen developing into an All-Star, the Bulls had elevated their play.The Bulls defeated the New York Knicks and the Philadelphia 76ers in the opening two rounds of the playoffs.They advanced to the Eastern Conference Finals where their rival, the Detroit Pistons, awaited them; this time, the Bulls beat the Pistons in a four-game sweep.The Bulls advanced to the Finals for the first time in franchise history to face the Los Angeles Lakers, who had Magic Johnson and James Worthy, two formidable opponents.The Bulls won the series four games to one, and compiled a 15–2 playoff record along the way.\n",
-      "Jordan led the league in scoring with 30.4 ppg, and he won the league's regular season and All-Star Game MVP awards.In the playoffs, the Bulls lost only three games in four series (Miami Heat 3–0, New York Knicks 4–1, and Orlando Magic 4–0), as they defeated the Seattle SuperSonics 4–2 in the NBA Finals to win their fourth championship.Jordan was named Finals MVP for a record fourth time, surpassing Magic Johnson's three Finals MVP awards; he also achieved only the second sweep of the MVP awards in the All-Star Game, regular season, and NBA Finals after Willis Reed in the 1969–70 season.Upon winning the championship, his first since his father's murder, Jordan reacted emotionally, clutching the game ball and crying on the locker room floor.In the 1996–97 season, the Bulls stood at a 69–11 record but ended the season by losing their final two games to finish the year 69–13, missing out on a second consecutive 70-win season.The Bulls again advanced to the Finals, where they faced the Utah Jazz.\n",
-      "On September 27, 2021, after Tesla stock surged, Forbes announced that Musk had a net worth of over $200 billion, and was the richest person in the world.In November 2021, Musk became the first person to have a net worth of more than $300 billion.On December 30, 2022, it was reported that Musk had lost $200 billion from his net worth due to declining stock values in Tesla, becoming the first person in history to lose such a large sum of money.In January 2023, Musk was recognized by Guinness World Records for experiencing the \"largest loss of personal fortune in history\" with regards to his financial losses since November 2021, which Guinness quoted a Forbes estimate of $182 billion.Musk's personal wealth is managed by his family office called Excession LLC, which was formed in 2016 and run by Jared Birchall.\n",
-      "\n",
-      "\n",
-      "=== Sources of wealth ===\n",
-      "Around 75% of Musk's wealth derived from Tesla stock in November 2020, a proportion that fell to about 37% as of December 2022, after selling nearly $40 billion in company shares since late 2021.\n",
-      "== College career ==\n",
-      "\n",
-      "As a freshman in coach Dean Smith's team-oriented system, Jordan was named ACC Freshman of the Year after he averaged 13.4 ppg on 53.4% shooting (field goal percentage). He made the game-winning jump shot in the 1982 NCAA Championship game against Georgetown, which was led by future NBA rival Patrick Ewing. Jordan later described this shot as the major turning point in his basketball career. During his three seasons with the Tar Heels, he averaged 17.7 ppg on 54.0% shooting and added 5.0 rpg and 1.8 apg.Jordan was selected by consensus to the NCAA All-American First Team in both his sophomore (1983) and junior (1984) seasons. After winning the Naismith and the Wooden College Player of the Year awards in 1984, Jordan left North Carolina one year before his scheduled graduation to enter the 1984 NBA draft. Jordan returned to North Carolina to complete his degree in 1986, when he graduated with a Bachelor of Arts degree in geography. In 2002, Jordan was named to the ACC 50th Anniversary men's basketball team honoring the 50 greatest players in ACC history.\n",
-      "\n",
-      "\n",
-      "== Professional career ==\n",
-      "=== 23XI Racing ===\n",
-      "On September 21, 2020, Jordan and NASCAR driver Denny Hamlin announced they would be fielding a NASCAR Cup Series team with Bubba Wallace driving, beginning competition in the 2021 season. On October 22, the team's name was confirmed to be 23XI Racing (pronounced twenty-three eleven) and the team's entry would bear No. 23. After the team's inaugural season, it added a second car with No. 45, driven by Kurt Busch in 2022 and Tyler Reddick in 2023. Ty Gibbs, John Hunter Nemechek, and Daniel Hemric also drove for 23XI as substitute drivers during the 2022 season. The team fielded a third car, No. 67, driven by Travis Pastrana in the 2023 Daytona 500. 23XI Racing has won four races, two by Wallace, one by Busch, and one by Reddick.\n",
-      "\n",
-      "\n",
-      "== Personal life ==\n",
-      "Jordan's nephew through his brother Larry, Justin Jordan, played NCAA Division I basketball for the UNC Greensboro Spartans and is a scout for the Charlotte Hornets.Jordan married Juanita Vanoy at A Little White Wedding Chapel in Las Vegas on September 2, 1989.\n",
-      "However, Musk dropped out after two days and, with his brother Kimbal, co-founded online city guide software company Zip2.The startup was acquired by Compaq for $307 million in 1999, and with $12 million of the money he made, that same year Musk co-founded X.com, a direct bank.X.com merged with Confinity in 2000 to form PayPal.In 2002, eBay acquired PayPal for $1.5 billion, and that same year, with $100 million of the money he made, Musk founded SpaceX, a spaceflight services company.In 2004, he became an early investor in electric vehicle manufacturer Tesla Motors, Inc. (now Tesla, Inc.).He became its chairman and product architect, assuming the position of CEO in 2008.In 2006, Musk helped create SolarCity, a solar energy company that was acquired by Tesla in 2016 and became Tesla Energy.In 2013, he proposed a hyperloop high-speed vactrain transportation system.In 2015, he co-founded OpenAI, a nonprofit artificial intelligence research company.The following year, Musk co-founded Neuralink—a neurotechnology company developing brain–computer interfaces—and the Boring Company, a tunnel construction company.\n",
-      "On March 17, the NBA Board of Governors unanimously approved Jordan's purchase, making him the first former player to become the majority owner of an NBA team.It also made him the league's only African-American majority owner.In 2023, Johnson said he regretted selling the Charlotte Hornets to Jordan.During the 2011 NBA lockout, The New York Times wrote that Jordan led a group of 10 to 14 hardline owners who wanted to cap the players' share of basketball-related income at 50 percent and as low as 47.Journalists observed that, during the labor dispute in 1998, Jordan had told Washington Wizards then-owner Abe Pollin: \"If you can't make a profit, you should sell your team.\"Jason Whitlock of FoxSports.com called Jordan \"a hypocrite sellout who can easily betray the very people who made him a billionaire global icon\" for wanting \"current players to pay for his incompetence\".He cited Jordan's executive decisions to draft disappointing players Kwame Brown and Adam Morrison.During the 2011–12 NBA season that was shortened to 66 games by the lockout, the Bobcats posted a 7–59 record.\n",
-      "The tunnel project to Hawthorne was discontinued in 2022 and is cited to be converted into parking spots for SpaceX workers.Biographer Ashlee Vance has noted that Musk hoped Hyperloop would \"make the public and legislators rethink the high-speed train\" proposal current in California at the time and consider more \"creative\" ideas.\n",
-      "23 used to\".Jordan responded by scoring 38 points in the next game, which Chicago won.Before the game, Jordan decided that he would immediately resume wearing his former No.23.The Bulls were fined $25,000 for failing to report the impromptu number change to the NBA.Jordan was fined an additional $5,000 for opting to wear white sneakers when the rest of the Bulls wore black.He averaged 31 ppg in the playoffs, but Orlando won the series in six games.\n",
-      "\n",
-      "\n",
-      "==== Second three-peat (1996–1998) ====\n",
-      "Jordan was freshly motivated by the playoff defeat, and he trained aggressively for the 1995–96 season.The Bulls were strengthened by the addition of rebound specialist Dennis Rodman, and the team dominated the league, starting the season at 41–3.The Bulls eventually finished with the best regular season record in NBA history, 72–10, a mark broken two decades later by the 2015–16 Golden State Warriors.\n",
-      "Even though Musk founded the company, investors regarded him as inexperienced and replaced him with Intuit CEO Bill Harris by the end of the year.In 2000, X.com merged with online bank Confinity to avoid competition, as the latter's money-transfer service PayPal was more popular than X.com's service.Musk then returned as CEO of the merged company.His preference for Microsoft over Unix-based software caused a rift among the company's employees, and eventually led Confinity co-founder Peter Thiel to resign.With the company suffering from compounding technological issues and the lack of a cohesive business model, the board ousted Musk and replaced him with Thiel in September 2000.Under Thiel, the company focused on the money-transfer service and was renamed PayPal in 2001.In 2002, PayPal was acquired by eBay for $1.5 billion in stock, of which Musk—PayPal's largest shareholder with 11.72% of shares—received $175.8 million.In 2017, more than 15 years later, Musk purchased the X.com domain from PayPal for its \"sentimental value\".In 2022, Musk discussed a goal of creating \"X, the everything app\".\n",
-      "In addition, Jordan hired his old Chicago Bulls head coach, Doug Collins, as Washington's coach for the upcoming season, a decision that many saw as foreshadowing another Jordan return.\n",
-      "\n",
-      "\n",
-      "=== Washington Wizards (2001–2003) ===\n",
-      "On September 25, 2001, Jordan announced his return to the NBA to play for the Washington Wizards, indicating his intention to donate his salary as a player to a relief effort for the victims of the September 11 attacks.In an injury-plagued 2001–02 season, Jordan led the team in scoring (22.9 ppg), assists (5.2 apg), and steals (1.4 spg), and was an MVP candidate, as he led the Wizards to a winning record and playoff contention; he would eventually finish 13th in the MVP ballot.After suffering torn cartilage in his right knee, and subsequent knee soreness, the Wizards missed the playoffs, and Jordan's season ended after only 60 games, the fewest he had played in a regular season since playing 17 games after returning from his first retirement during the 1994–95 season.\n",
-      "=== SpaceX ===\n",
-      "\n",
-      "In early 2001, Musk became involved with the nonprofit Mars Society and discussed funding plans to place a growth-chamber for plants on Mars.In October of the same year, he traveled to Moscow with Jim Cantrell and Adeo Ressi to buy refurbished intercontinental ballistic missiles (ICBMs) that could send the greenhouse payloads into space.He met with the companies NPO Lavochkin and Kosmotras; however, Musk was seen as a novice and the group returned to the United States empty-handed.In February 2002, the group returned to Russia with Mike Griffin (president of In-Q-Tel) to look for three ICBMs.They had another meeting with Kosmotras and were offered one rocket for $8 million, which Musk rejected.He instead decided to start a company that could build affordable rockets.With $100 million of his own money, Musk founded SpaceX in May 2002 and became the company's CEO and Chief Engineer.SpaceX attempted its first launch of the Falcon 1 rocket in 2006.\n",
-      "Jordan started 53 of his 60 games for the season, averaging 24.3 ppg, 5.4 apg, and 6.0 rpg, and shooting 41.9% from the field in his 53 starts.His last seven appearances were in a reserve role, in which he averaged just over 20 minutes per game.The Wizards finished the season with a 37–45 record, an 18-game improvement.Playing in his 14th and final NBA All-Star Game in 2003, Jordan passed Kareem Abdul-Jabbar as the all-time leading scorer in All-Star Game history, a record since broken by Kobe Bryant and LeBron James.That year, Jordan was the only Washington player to play in all 82 games, starting in 67 of them, and coming from off the bench in 15.He averaged 20.0 ppg, 6.1 rpg, 3.8 assists, and 1.5 spg per game.He also shot 45% from the field, and 82% from the free-throw line.Even though he turned 40 during the season, he scored 20 or more points 42 times, 30 or more points nine times, and 40 or more points three times.\n",
-      "In the Eastern Conference Finals, the Pistons again defeated the Bulls, this time in six games, by utilizing their \"Jordan Rules\" method of guarding Jordan, which consisted of double and triple teaming him every time he touched the ball.The Bulls entered the 1989–90 season as a team on the rise, with their core group of Jordan and young improving players like Scottie Pippen and Horace Grant, and under the guidance of new coach Phil Jackson.On March 28, 1990, Jordan scored a career-high 69 points in a 117–113 road win over the Cavaliers.He averaged a league-leading 33.6 ppg on 52.6% shooting, to go with 6.9 rpg and 6.3 apg, in leading the Bulls to a 55–27 record.They again advanced to the Eastern Conference Finals after beating the Bucks and Philadelphia 76ers; despite pushing the series to seven games, the Bulls lost to the Pistons for the third consecutive season.\n",
-      "Jordan shot 37%, 35%, 42%, and 37% in all the seasons he shot over 200 three-pointers, and also shot 38.5%, 38.6%, 38.9%, 40.3%, 19.4%, and 30.2% in the playoffs during his championship runs, improving his shooting even after the three-point line reverted to the original line.In 1988, Jordan was honored with the NBA Defensive Player of the Year and the Most Valuable Player awards, becoming the first NBA player to win both awards in a career let alone season.In addition, he set both seasonal and career records for blocked shots by a guard, and combined this with his ball-thieving ability to become a standout defensive player.He ranks fourth in NBA history in total steals with 2,514, trailing John Stockton, Jason Kidd and Chris Paul.Jerry West often stated that he was more impressed with Jordan's defensive contributions than his offensive ones.Doc Rivers declared Jordan \"the best superstar defender in the history of the game\".Jordan was known to have strong eyesight.\n",
-      "== Wealth ==\n",
-      "\n",
-      "\n",
-      "=== Net worth ===\n",
-      "Musk made $175.8 million when PayPal was sold to eBay in 2002.He was first listed on the Forbes Billionaires List in 2012, with a net worth of $2 billion.At the start of 2020, Musk had a net worth of $27 billion.By the end of the year his net worth had increased by $150 billion, mostly driven by his ownership of around 20% of Tesla stock.During this period, Musk's net worth was often volatile.For example, it dropped $16.3 billion in September, the largest single-day plunge in Bloomberg Billionaires Index's history.In November of that year, Musk passed Facebook co-founder Mark Zuckerberg to become the third-richest person in the world; a week later he passed Microsoft co-founder Bill Gates to become the second-richest.In January 2021, Musk, with a net worth of $185 billion, surpassed Amazon founder Jeff Bezos to become the richest person in the world.Bezos reclaimed the top spot the following month.\n",
-      "=== xAI ===\n",
-      "On July 12, 2023, Elon Musk launched an artificial intelligence company called xAI, which aims to develop a generative AI program that competes with existing offerings like ChatGPT. The company has reportedly hired engineers from Google and OpenAI.\n",
-      "\n",
-      "\n",
-      "=== Leadership style ===\n",
-      "Musk is often described as a micromanager and has called himself a \"nano-manager\".The New York Times has characterized his approach as absolutist.Musk does not make formal business plans; instead, he says he prefers to approach engineering problems with an \"iterative design methodology\" and \"tolerance for failures\".He has forced employees to adopt the company's own jargon and launched ambitious, risky, and costly projects against his advisors' recommendations, such as removing front-facing radar from Tesla Autopilot.His insistence on vertical integration causes his companies to move most production in-house.\n",
-      "The Bulls finished the season 38–44, and lost to the Milwaukee Bucks in four games in the first round of the playoffs.An often-cited moment was on August 26, 1985, when Jordan shook the arena during a Nike exhibition game in Trieste, Italy, by shattering the glass of the backboard with a dunk.The moment was filmed and is often referred to worldwide as an important milestone in Jordan's rise.The shoes Jordan wore during the game were auctioned in August 2020 and sold for $615,000, a record for a pair of sneakers.Jordan's 1985–86 season was cut short when he broke his foot in the third game of the year, causing him to miss 64 games.The Bulls made the playoffs despite Jordan's injury and a 30–52 record, at the time the fifth-worst record of any team to qualify for the playoffs in NBA history.Jordan recovered in time to participate in the postseason and performed well upon his return.\n",
-      "On February 21, 2003, Jordan became the first 40-year-old to tally 43 points in an NBA game.During his stint with the Wizards, all of Jordan's home games at the MCI Center were sold out and the Wizards were the second most-watched team in the NBA, averaging 20,172 fans a game at home and 19,311 on the road.Jordan's final two seasons did not result in a playoff appearance for the Wizards, and he was often unsatisfied with the play of those around him.At several points, he openly criticized his teammates to the media, citing their lack of focus and intensity, notably that of Kwame Brown, the number-one draft pick in the 2001 NBA draft.\n",
-      "\n",
-      "\n",
-      "==== Final retirement (2003) ====\n",
-      "With the recognition that 2002–03 would be Jordan's final season, tributes were paid to him throughout the NBA.In his final game at the United Center in Chicago, which was his old home court, Jordan received a four-minute standing ovation.The Miami Heat retired the No.23 jersey on April 11, 2003, even though Jordan never played for the team.\n",
-      "On offense, he relied more upon instinct and improvization at game time.Noted as a durable player, Jordan did not miss four or more games while active for a full season from 1986–87 to 2001–02, when he injured his right knee.Of the 15 seasons Jordan was in the NBA, he played all 82 regular season games nine times.Jordan has frequently cited David Thompson, Walter Davis, and Jerry West as influences.Confirmed at the start of his career, and possibly later on, Jordan had a special \"Love of the Game Clause\" written into his contract, which was unusual at the time, and allowed him to play basketball against anyone at any time, anywhere.Jordan had a versatile offensive game and was capable of aggressively driving to the basket as well as drawing fouls from his opponents at a high rate.His 8,772 free throw attempts are the 11th-highest total in NBA history.As his career progressed, Jordan also developed the ability to post up his opponents and score with his trademark fadeaway jump shot, using his leaping ability to avoid block attempts.According to Hubie Brown, this move alone made him nearly unstoppable.\n",
-      "In October 2022, Musk stated that about 20,000 satellite terminals had been donated to Ukraine, together with free data transfer subscriptions, which cost SpaceX $80 million.After asking the United States Department of Defense to pay for further units and future subscriptions on behalf of Ukraine, Musk publicly stated that SpaceX would continue to provide Starlink to Ukraine for free, at a yearly cost to itself of $400 million.\n",
-      "\n",
-      "\n",
-      "=== Tesla ===\n",
-      "\n",
-      "Tesla, Inc.—originally Tesla Motors—was incorporated in July 2003 by Martin Eberhard and Marc Tarpenning, who financed the company until the Series A round of funding.Both men played active roles in the company's early development prior to Musk's involvement.Musk led the Series A round of investment in February 2004; he invested $6.5 million, became the majority shareholder, and joined Tesla's board of directors as chairman.Musk took an active role within the company and oversaw Roadster product design but was not deeply involved in day-to-day business operations.Following a series of escalating conflicts in 2007, and the financial crisis of 2007–2008, Eberhard was ousted from the firm.\n",
-      "=== Zip2 ===\n",
-      "\n",
-      "In 1995, Musk, his brother Kimbal, and Greg Kouri founded Zip2. Errol Musk provided them with $28,000 in funding. The company developed an Internet city guide with maps, directions, and yellow pages, and marketed it to newspapers. They worked at a small rented office in Palo Alto, with Musk coding the website every night. Eventually, Zip2 obtained contracts with The New York Times and the Chicago Tribune. The brothers persuaded the board of directors to abandon a merger with CitySearch; however, Musk's attempts to become CEO were thwarted. Compaq acquired Zip2 for $307 million in cash in February 1999, and Musk received $22 million for his 7-percent share.\n",
-      "\n",
-      "\n",
-      "=== X.com and PayPal ===\n",
-      "\n",
-      "Later in 1999, Musk co-founded X.com, an online financial services and e-mail payment company with $12 million of the money he made from the Compaq acquisition.X.com was one of the first federally insured online banks, and over 200,000 customers joined in its initial months of operation.\n",
-      "=== Charlotte Bobcats/Hornets ===\n",
-      "On June 15, 2006, Jordan bought a minority stake in the Charlotte Bobcats (known as the Hornets since 2013), becoming the team's second-largest shareholder behind majority owner Robert L. Johnson.As part of the deal, Jordan took full control over the basketball side of the operation, with the title Managing Member of Basketball Operations.Despite Jordan's previous success as an endorser, he has made an effort not to be included in Charlotte's marketing campaigns.A decade earlier, Jordan had made a bid to become part-owner of Charlotte's original NBA team, the Charlotte Hornets, but talks collapsed when owner George Shinn refused to give Jordan complete control of basketball operations.In February 2010, it was reported that Jordan was seeking majority ownership of the Bobcats.As February wore on, it became apparent that Jordan and former Houston Rockets president George Postolos were the leading contenders for ownership of the team.On February 27, the Bobcats announced that Johnson had reached an agreement with Jordan and his group, MJ Basketball Holdings, to buy the team from Johnson pending NBA approval.\n",
-      "Musk does not receive a salary from Tesla; he agreed with the board in 2018 to a compensation plan that ties his personal earnings to Tesla's valuation and revenue.The deal stipulated that Musk only receives the compensation if Tesla reaches certain market values.It was the largest such deal ever done between a CEO and a company board.In the first award, given in May 2020, he was eligible to purchase 1.69 million Tesla shares (about 1% of the company) at below-market prices, which was worth about $800 million.Musk paid $455 million in taxes on $1.52 billion of income between 2014 and 2018.According to ProPublica, Musk paid no federal income taxes in 2018.He claimed his 2021 tax bill was estimated at $12 billion based on his sale of $14 billion worth of Tesla stock.Musk has repeatedly described himself as \"cash poor\", and has \"professed to have little interest in the material trappings of wealth\".In May 2020, he pledged to sell almost all physical possessions.Musk has defended his wealth by saying he is accumulating resources for humanity's outward expansion to space.\n",
-      "The alpha design for the system was published in a whitepaper posted to the Tesla and SpaceX blogs.The document scoped out the technology and outlined a notional route where such a transport system could be built between the Greater Los Angeles Area and the San Francisco Bay Area, at an estimated cost of $6 billion.The proposal, if technologically feasible at the costs cited, would make Hyperloop travel cheaper than any other mode of transport for such long distances.In 2015, Musk announced a design competition for students and others to build Hyperloop pods, to operate on a SpaceX-sponsored mile-long track, for a 2015–2017 Hyperloop pod competition.The track was used in January 2017, and Musk also announced that the company had started a tunnel project, with Hawthorne Municipal Airport as its destination.In July 2017, Musk claimed that he had received \"verbal government approval\" to build a hyperloop from New York City to Washington, D.C., with stops in both Philadelphia and Baltimore.Mention of the projected DC-to-Baltimore leg was removed from the Boring Company website in 2021.\n",
-      "==== Pistons roadblock (1987–1990) ====\n",
-      "Jordan again led the league in scoring during the 1987–88 season, averaging 35.0 ppg on 53.5% shooting, and he won his first league MVP Award.He was also named the NBA Defensive Player of the Year, as he averaged 1.6 blocks per game (bpg), a league-high 3.1 steals per game (spg), and led the Bulls defense to the fewest points per game allowed in the league.The Bulls finished 50–32, and made it out of the first round of the playoffs for the first time in Jordan's career, as they defeated the Cleveland Cavaliers in five games.In the Eastern Conference Semifinals, the Bulls lost in five games to the more experienced Detroit Pistons, who were led by Isiah Thomas and a group of physical players known as the \"Bad Boys\".In the 1988–89 season, Jordan again led the league in scoring, averaging 32.5 ppg on 53.8% shooting from the field, along with 8 rpg and 8 apg.\n",
-      "=== OpenAI ===\n",
-      "\n",
-      "In 2015, Musk co-founded OpenAI, a not-for-profit artificial intelligence (AI) research company aiming to develop artificial general intelligence intended to be safe and beneficial to humanity. A particular focus of the company is to democratize artificial superintelligence systems, against governments and corporations. Musk pledged $1 billion of funding to OpenAI. In 2023, Musk tweeted that he had ended up giving a total of $100 million to OpenAI. TechCrunch later reported that, according to its own investigation of public records, \"only $15 million\" of OpenAI's funding could be definitively traced to Musk. Musk subsequently stated that he had donated about $50 million.In 2018, Musk left the OpenAI board to avoid possible future conflicts with his role as CEO of Tesla as the latter company increasingly became involved in AI through Tesla Autopilot. Since then, OpenAI has made significant advances in machine learning, producing neural networks such as GPT-3 (producing human-like text), and DALL-E (generating digital images from natural language descriptions).\n",
-      "Jordan's effective field goal percentage was 50%, and he had six seasons with at least 50% shooting, five of which consecutively (1988–1992); he also shot 51% and 50%, and 30% and 33% from the three-point range, throughout his first and second retirements, respectively, finishing his Chicago Bulls career with 31.5 points per game on 50.5 FG% shooting and his overall career with 49.7 FG% shooting.Unlike NBA players often compared to Jordan, such as Kobe Bryant and LeBron James, who had a similar three-point percentage, he did not shoot as many threes as they did, as he did not need to rely on the three-pointer in order to be effective on offense.Three-point shooting was only introduced in 1979 and would not be a more fundamental aspect of the game until the first decades of the 21st century, with the NBA having to briefly shorten the line to incentivize more shots.Jordan's three-point shooting was better selected, resulting in three-point field goals made in important games during the playoffs and the Finals, such as hitting six consecutive three-point shots in Game 1 of the 1992 NBA Finals.\n",
-      "=== The Boring Company ===\n",
-      "\n",
-      "In 2017, Musk founded the Boring Company to construct tunnels, and revealed plans for specialized, underground, high-occupancy vehicles that could travel up to 150 miles per hour (240 km/h) and thus circumvent above-ground traffic in major cities. Early in 2017, the company began discussions with regulatory bodies and initiated construction of a 30-foot (9.1 m) wide, 50-foot (15 m) long, and 15-foot (4.6 m) deep \"test trench\" on the premises of SpaceX's offices, as that required no permits. The Los Angeles tunnel, less than two miles (3.2 km) in length, debuted to journalists in 2018. It used Tesla Model Xs and was reported to be a rough ride while traveling at suboptimal speeds.Two tunnel projects announced in 2018, in Chicago and West Los Angeles, have been canceled. However, a tunnel beneath the Las Vegas Convention Center was completed in early 2021. Local officials have approved further expansions of the tunnel system. In 2021, tunnel construction was approved for Fort Lauderdale, Florida.\n",
-      "=== Chicago Bulls (1984–1993; 1995–1998) ===\n",
-      "\n",
-      "\n",
-      "==== Early NBA years (1984–1987) ====\n",
-      "The Chicago Bulls selected Jordan with the third overall pick of the 1984 NBA draft after Hakeem Olajuwon (Houston Rockets) and Sam Bowie (Portland Trail Blazers).One of the primary reasons why Jordan was not drafted sooner was because the first two teams were in need of a center.Trail Blazers general manager Stu Inman contended that it was not a matter of drafting a center but more a matter of taking Bowie over Jordan, in part because Portland already had Clyde Drexler, who was a guard with similar skills to Jordan.Citing Bowie's injury-laden college career, ESPN named the Blazers' choice of Bowie as the worst draft pick in North American professional sports history.Jordan made his NBA debut at Chicago Stadium on October 26, 1984, and scored 16 points.In 2021, a ticket stub from the game sold at auction for $264,000, setting a record for a collectible ticket stub.\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(f\"Number of nodes: {len(nodes)}\")\n",
-    "for node in nodes:\n",
-    "    print(node.node.get_content())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 71,
-   "id": "4ead6887-18c4-4855-8152-9b99016d4618",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using query str: childhood of a popular sports celebrity\n",
-      "Using query str: childhood of a popular sports celebrity\n",
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using filters: {'category': 'Sports', 'country': 'United States'}\n",
-      "Using filters: {'category': 'Sports', 'country': 'United States'}\n",
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using top_k: 2\n",
-      "Using top_k: 2\n",
-      "Knafel claimed Jordan promised her $5 million for remaining silent and agreeing not to file a paternity suit after Knafel learned she was pregnant in 1991; a DNA test showed Jordan was not the father of the child.Jordan proposed to his longtime girlfriend, Cuban-American model Yvette Prieto, on Christmas 2011, and they were married on April 27, 2013, at Bethesda-by-the-Sea Episcopal Church.It was announced on November 30, 2013, that the two were expecting their first child together.On February 11, 2014, Prieto gave birth to identical twin daughters named Victoria and Ysabel.In 2019, Jordan became a grandfather when his daughter Jasmine gave birth to a son, whose father is professional basketball player Rakeem Christmas.\n",
-      "\n",
-      "\n",
-      "== Media figure and business interests ==\n",
-      "\n",
-      "\n",
-      "=== Endorsements ===\n",
-      "Jordan is one of the most marketed sports figures in history.He has been a major spokesman for such brands as Nike, Coca-Cola, Chevrolet, Gatorade, McDonald's, Ball Park Franks, Rayovac, Wheaties, Hanes, and MCI.\n",
-      "James Jr. became command sergeant major of the 35th Signal Brigade of the U.S. Army's XVIII Airborne Corps and retired in 2006.In 1968, Jordan moved with his family to Wilmington, North Carolina.He attended Emsley A. Laney High School in Wilmington, where he highlighted his athletic career by playing basketball, baseball, and football.He tried out for the basketball varsity team during his sophomore year, but at a height of 5 feet 11 inches (1.80 m), he was deemed too short to play at that level.His taller friend Harvest Leroy Smith was the only sophomore to make the team.Motivated to prove his worth, Jordan became the star of Laney's junior varsity team and tallied some 40-point games.The following summer, he grew four inches (10 cm) and trained rigorously.Upon earning a spot on the varsity roster, he averaged more than 25 points per game (ppg) over his final two seasons of high school play.\n"
-     ]
-    }
-   ],
-   "source": [
-    "nodes = retriever.retrieve(\n",
-    "    \"Tell me about the childhood of a popular sports celebrity in the United States\"\n",
-    ")\n",
-    "for node in nodes:\n",
-    "    print(node.node.get_content())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 72,
-   "id": "c0a8627c-662f-47b9-8e48-7c2f237dda1a",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using query str: college life of a billionaire who started at company at the age of 16\n",
-      "Using query str: college life of a billionaire who started at company at the age of 16\n",
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using filters: {}\n",
-      "Using filters: {}\n",
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using top_k: 2\n",
-      "Using top_k: 2\n",
-      "He reportedly hosted large, ticketed house parties to help pay for tuition, and wrote a business plan for an electronic book-scanning service similar to Google Books.In 1994, Musk held two internships in Silicon Valley: one at energy storage startup Pinnacle Research Institute, which investigated electrolytic ultracapacitors for energy storage, and another at Palo Alto–based startup Rocket Science Games.In 1995, he was accepted to a PhD program in materials science at Stanford University.However, Musk decided to join the Internet boom, dropping out two days after being accepted and applied for a job at Netscape, to which he reportedly never received a response.\n",
-      "\n",
-      "\n",
-      "== Business career ==\n",
-      "At age ten, he developed an interest in computing and video games, teaching himself how to program from the VIC-20 user manual.At age twelve, he sold his BASIC-based game Blastar to PC and Office Technology magazine for approximately $500.\n",
-      "\n",
-      "\n",
-      "=== Education ===\n",
-      "Musk attended Waterkloof House Preparatory School, Bryanston High School, and Pretoria Boys High School, from where he graduated.Musk applied for a Canadian passport through his Canadian-born mother, knowing that it would be easier to immigrate to the United States this way.While waiting for his application to be processed, he attended the University of Pretoria for five months.Musk arrived in Canada in June 1989 and lived with a second cousin in Saskatchewan for a year, working odd jobs at a farm and lumber mill.In 1990, he entered Queen's University in Kingston, Ontario.Two years later, he transferred to the University of Pennsylvania (UPenn), where he completed studies for a Bachelor of Arts degree in physics and a Bachelor of Science degree in economics from the Wharton School.Although Musk claims he earned the degrees in 1995, UPenn maintains it awarded them in 1997.\n"
-     ]
-    }
-   ],
-   "source": [
-    "nodes = retriever.retrieve(\n",
-    "    \"Tell me about the college life of a billionaire who started at company at the age of 16\"\n",
-    ")\n",
-    "for node in nodes:\n",
-    "    print(node.node.get_content())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 73,
-   "id": "33425c39-e0e7-4415-b695-66c96d3fc7d1",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using query str: childhood of a billionaire\n",
-      "Using query str: childhood of a billionaire\n",
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using filters: {'country': 'UK'}\n",
-      "Using filters: {'country': 'UK'}\n",
-      "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using top_k: 2\n",
-      "Using top_k: 2\n",
-      "Branson has also talked openly about having ADHD.Branson's parents were supportive of his endeavours from an early age.His mother was an entrepreneur; one of her most successful ventures was building and selling wooden tissue boxes and wastepaper bins.In London, he started off squatting from 1967 to 1968.Branson is an atheist.He said in a 2011 interview with CNN's Piers Morgan that he believes in evolution and the importance of humanitarian efforts but not in the existence of God.\"I would love to believe,\" he said.\"It's very comforting to believe\".\n",
-      "\n",
-      "\n",
-      "== Early business career ==\n",
-      "After failed attempts to grow and sell both Christmas trees and budgerigars, Branson launched a magazine named Student in 1966 with Nik Powell.The first issue of Student appeared in January 1968, and a year later, Branson's net worth was estimated at £50,000.The office for the venture was situated in the crypt of St. John's Church, off Bayswater Road, in London.Though not initially as successful as he hoped, the magazine later became a vital component of the mail-order record business Branson started from the same church he used for Student.\n",
-      "In March 2000, Branson was knighted at Buckingham Palace for \"services to entrepreneurship\".For his work in retail, music and transport (with interests in land, air, sea and space travel), his taste for adventure and for his humanitarian work, he has become a prominent global figure.In 2007, he was placed in the Time 100 Most Influential People in the World list.In June 2023, Forbes listed Branson's estimated net worth at US$3 billion.On 11 July 2021, Branson travelled as a passenger onboard Virgin Galactic Unity 22 at the edge of space, a suborbital test flight for his spaceflight company Virgin Galactic.The mission lasted approximately one hour, reaching a peak altitude of 53.5 miles (86.1 km).At 70, Branson became the third oldest person to fly to space.\n",
-      "\n",
-      "\n",
-      "== Early life ==\n",
-      "Richard Charles Nicholas Branson was born on 18 July 1950 in Blackheath, London, the son of Edward James Branson (1918–2011), a barrister, and his wife Evette Huntley Branson (née Flindt; 1924–2021), a former ballet dancer and air hostess.\n"
-     ]
-    }
-   ],
-   "source": [
-    "nodes = retriever.retrieve(\"Tell me about the childhood of a UK billionaire\")\n",
-    "for node in nodes:\n",
-    "    print(node.node.get_content())"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "45612783-c82c-48f7-ace9-d04341b9d3ec",
-   "metadata": {},
-   "source": [
-    "## Build Recursive Retriever over Document Summaries"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 87,
-   "id": "6a127983-f083-41d8-9299-e8f93e1ee112",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.schema import IndexNode"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 98,
-   "id": "07d4509e-715c-4844-975b-db2afa900fe5",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "**Summary for Michael Jordan: Michael Jordan, often referred to as MJ, is a retired professional basketball player from the United States who is widely considered one of the greatest players in the history of the sport. He played 15 seasons in the NBA, primarily with the Chicago Bulls, and won six NBA championships. His individual accolades include six NBA Finals MVP awards, ten NBA scoring titles, five NBA MVP awards, and fourteen NBA All-Star Game selections. He also holds the NBA records for career regular season scoring average and career playoff scoring average. Jordan briefly retired to play Minor League Baseball, but returned to lead the Bulls to three more championships. He was twice inducted into the Naismith Memorial Basketball Hall of Fame. \n",
-      "\n",
-      "After retiring, Jordan became a successful businessman, part-owner and head of basketball operations for the Charlotte Hornets, and owner of 23XI Racing in the NASCAR Cup Series. He has also made significant contributions to charitable causes, donating millions to organizations such as the Make-A-Wish Foundation and Habitat for Humanity. In the entertainment industry, he has appeared in productions like \"Space Jam\" and \"The Last Dance\", and has authored several books about his life and career. His influence extends beyond sports, making him a significant cultural figure.\n",
-      "**Summary for Elon Musk: Elon Musk is a globally recognized business magnate and investor, who has founded and led numerous high-profile technology companies. He is the founder, CEO, and chief technology officer of SpaceX, an aerospace manufacturer and space transportation company, and the CEO and product architect of Tesla, Inc., a company specializing in electric vehicles and clean energy. Musk also owns and chairs X Corp, and founded the Boring Company, a tunnel construction and infrastructure company. He co-founded Neuralink, a neurotechnology company, and OpenAI, a nonprofit artificial intelligence research company. \n",
-      "\n",
-      "In 2022, Musk acquired Twitter and merged it with X Corp, and also founded xAI, an AI company. Despite his success, he has faced criticism for his controversial statements and management style. Musk was born in South Africa, moved to Canada at 18, and later to the United States to attend Stanford University, but dropped out to start his entrepreneurial journey. He co-founded Zip2 and X.com (later PayPal), which was sold to eBay in 2002. \n",
-      "\n",
-      "Musk envisions a future that includes Mars colonization and the development of a high-speed transportation system known as the Hyperloop. As of August 2023, he is the wealthiest person in the world, with a net worth of over $200 billion. Despite various controversies, Musk has made significant contributions to the tech industry. He has been married multiple times, has several children, and is known for his active presence on social media, particularly Twitter.\n",
-      "**Summary for Richard Branson: Richard Branson, born on 18 July 1950, is a British business magnate, commercial astronaut, and philanthropist. He founded the Virgin Group in the 1970s, which now controls over 400 companies in various fields such as aviation, music, and space travel. His first business venture was a magazine called Student, and he later established a mail-order record business and a chain of record stores known as Virgin Records. The Virgin brand expanded rapidly during the 1980s with the start of Virgin Atlantic airline and the expansion of the Virgin Records music label. In 1997, he founded the Virgin Rail Group, and in 2004, he founded Virgin Galactic. Branson was knighted in 2000 for his services to entrepreneurship. He has a net worth of US$3 billion as of June 2023. Branson has also been involved in numerous philanthropic activities and has launched initiatives like Virgin Startup. Despite his success, he has faced criticism and legal issues, including a brief jail term for tax evasion in 1971. He is married to Joan Templeman, with whom he has two children.\n",
-      "**Summary for Rihanna: Rihanna, whose real name is Robyn Rihanna Fenty, is a renowned Barbadian singer, songwriter, actress, and businesswoman. She rose to fame after signing with Def Jam in 2005 and releasing her first two albums, \"Music of the Sun\" and \"A Girl Like Me\". Her third album, \"Good Girl Gone Bad\", solidified her status as a major music icon. Some of her other successful albums include \"Rated R\", \"Loud\", \"Talk That Talk\", and \"Unapologetic\", which was her first to reach number one on the Billboard 200. \n",
-      "\n",
-      "Rihanna has sold over 250 million records worldwide, making her one of the best-selling music artists of all time. She has received numerous awards, including nine Grammy Awards, 12 Billboard Music Awards, and 13 American Music Awards. She also holds six Guinness World Records. \n",
-      "\n",
-      "In addition to her music career, Rihanna has ventured into business, founding the cosmetics brand Fenty Beauty and the fashion house Fenty under LVMH. She has also acted in several films, including \"Battleship\", \"Home\", \"Valerian and the City of a Thousand Planets\", and \"Ocean's 8\". \n",
-      "\n",
-      "Rihanna is also known for her philanthropic work, particularly through her Believe Foundation and the Clara Lionel Foundation. As of 2023, she is the wealthiest female musician, with an estimated net worth of $1.4 billion.\n"
-     ]
-    }
-   ],
-   "source": [
-    "# define top-level nodes and vector retrievers\n",
-    "nodes = []\n",
-    "vector_query_engines = {}\n",
-    "vector_retrievers = {}\n",
-    "\n",
-    "for wiki_title in wiki_titles:\n",
-    "    # build vector index\n",
-    "    vector_index = VectorStoreIndex.from_documents(\n",
-    "        [docs_dict[wiki_title]], service_context=service_context\n",
-    "    )\n",
-    "    # define query engines\n",
-    "    vector_query_engine = vector_index.as_query_engine()\n",
-    "    vector_query_engines[wiki_title] = vector_query_engine\n",
-    "    vector_retrievers[wiki_title] = vector_index.as_retriever()\n",
-    "\n",
-    "    # save summaries\n",
-    "    out_path = Path(\"summaries\") / f\"{wiki_title}.txt\"\n",
-    "    if not out_path.exists():\n",
-    "        # use LLM-generated summary\n",
-    "        list_index = ListIndex.from_documents(\n",
-    "            [docs_dict[wiki_title]], service_context=service_context\n",
-    "        )\n",
-    "\n",
-    "        summarizer = list_index.as_query_engine(response_mode=\"tree_summarize\")\n",
-    "        response = await summarizer.aquery(f\"Give me a summary of {wiki_title}\")\n",
-    "\n",
-    "        wiki_summary = response.response\n",
-    "        Path(\"summaries\").mkdir(exist_ok=True)\n",
-    "        with open(out_path, \"w\") as fp:\n",
-    "            fp.write(wiki_summary)\n",
-    "    else:\n",
-    "        with open(out_path, \"r\") as fp:\n",
-    "            wiki_summary = fp.read()\n",
-    "\n",
-    "    print(f\"**Summary for {wiki_title}: {wiki_summary}\")\n",
-    "    node = IndexNode(text=wiki_summary, index_id=wiki_title)\n",
-    "    nodes.append(node)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 99,
-   "id": "f3f10b9d-2cdc-44d7-9f2c-44c1438fad9e",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# define top-level retriever\n",
-    "top_vector_index = VectorStoreIndex(nodes)\n",
-    "top_vector_retriever = top_vector_index.as_retriever(similarity_top_k=1)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 100,
-   "id": "65547fe1-6d2c-4658-8552-08a34f9c763f",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# define recursive retriever\n",
-    "from llama_index.retrievers import RecursiveRetriever\n",
-    "from llama_index.query_engine import RetrieverQueryEngine\n",
-    "from llama_index.response_synthesizers import get_response_synthesizer"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 102,
-   "id": "bfb2b340",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# note: can pass `agents` dict as `query_engine_dict` since every agent can be used as a query engine\n",
-    "recursive_retriever = RecursiveRetriever(\n",
-    "    \"vector\",\n",
-    "    retriever_dict={\"vector\": top_vector_retriever, **vector_retrievers},\n",
-    "    # query_engine_dict=vector_query_engines,\n",
-    "    verbose=True,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 103,
-   "id": "3df4a090-c45a-4e3e-8d0a-f2955204bf26",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\u001b[36;1m\u001b[1;3mRetrieving with query id None: Tell me about a celebrity from the United States\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieved node with id, entering: Michael Jordan\n",
-      "\u001b[0m\u001b[36;1m\u001b[1;3mRetrieving with query id Michael Jordan: Tell me about a celebrity from the United States\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieving text node: He was interviewed at three homes associated with the production and did not want cameras in his home or on his plane, as according to director Jason Hehir \"there are certain aspects of his life that he wants to keep private\".Jordan granted rapper Travis Scott permission to film a music video for his single \"Franchise\" at his home in Highland Park, Illinois.Jordan appeared in the 2022 miniseries The Captain, which follows the life and career of Derek Jeter.\n",
-      "\n",
-      "\n",
-      "=== Books ===\n",
-      "Jordan has authored several books focusing on his life, basketball career, and world view.\n",
-      "\n",
-      "Rare Air: Michael on Michael, with Mark Vancil and Walter Iooss (Harper San Francisco, 1993).\n",
-      "I Can't Accept Not Trying: Michael Jordan on the Pursuit of Excellence, with Mark Vancil and Sandro Miller (Harper San Francisco, 1994).\n",
-      "For the Love of the Game: My Story, with Mark Vancil (Crown Publishers, 1998).\n",
-      "Driven from Within, with Mark Vancil (Atria Books, 2005).\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieving text node: In the September 1996 issue of Sport, which was the publication's 50th-anniversary issue, Jordan was named the greatest athlete of the past 50 years.Jordan's athletic leaping ability, highlighted in his back-to-back Slam Dunk Contest championships in 1987 and 1988, is credited by many people with having influenced a generation of young players.Several NBA players, including James and Dwyane Wade, have stated that they considered Jordan their role model while they were growing up.In addition, commentators have dubbed a number of next-generation players \"the next Michael Jordan\" upon their entry to the NBA, including Penny Hardaway, Grant Hill, Allen Iverson, Bryant, Vince Carter, James, and Wade.Some analysts, such as The Ringer's Dan Devine, drew parallels between Jordan's experiment at point guard in the 1988–89 season and the modern NBA; for Devine, it \"inadvertently foreshadowed the modern game's stylistic shift toward monster-usage primary playmakers\", such as Russell Westbrook, James Harden, Luka Dončić, and James.Don Nelson stated: \"I would've been playing him at point guard the day he showed up as a rookie.\n",
-      "\u001b[0mHe was interviewed at three homes associated with the production and did not want cameras in his home or on his plane, as according to director Jason Hehir \"there are certain aspects of his life that he wants to keep private\".Jordan granted rapper Travis Scott permission to film a music video for his single \"Franchise\" at his home in Highland Park, Illinois.Jordan appeared in the 2022 miniseries The Captain, which follows the life and career of Derek Jeter.\n",
-      "\n",
-      "\n",
-      "=== Books ===\n",
-      "Jordan has authored several books focusing on his life, basketball career, and world view.\n",
-      "\n",
-      "Rare Air: Michael on Michael, with Mark Vancil and Walter Iooss (Harper San Francisco, 1993).\n",
-      "I Can't Accept Not Trying: Michael Jordan on the Pursuit of Excellence, with Mark Vancil and Sandro Miller (Harper San Francisco, 1994).\n",
-      "For the Love of the Game: My Story, with Mark Vancil (Crown Publishers, 1998).\n",
-      "Driven from Within, with Mark Vancil (Atria Books, 2005).\n",
-      "In the September 1996 issue of Sport, which was the publication's 50th-anniversary issue, Jordan was named the greatest athlete of the past 50 years.Jordan's athletic leaping ability, highlighted in his back-to-back Slam Dunk Contest championships in 1987 and 1988, is credited by many people with having influenced a generation of young players.Several NBA players, including James and Dwyane Wade, have stated that they considered Jordan their role model while they were growing up.In addition, commentators have dubbed a number of next-generation players \"the next Michael Jordan\" upon their entry to the NBA, including Penny Hardaway, Grant Hill, Allen Iverson, Bryant, Vince Carter, James, and Wade.Some analysts, such as The Ringer's Dan Devine, drew parallels between Jordan's experiment at point guard in the 1988–89 season and the modern NBA; for Devine, it \"inadvertently foreshadowed the modern game's stylistic shift toward monster-usage primary playmakers\", such as Russell Westbrook, James Harden, Luka Dončić, and James.Don Nelson stated: \"I would've been playing him at point guard the day he showed up as a rookie.\n"
-     ]
-    }
-   ],
-   "source": [
-    "# ?\n",
-    "nodes = recursive_retriever.retrieve(\"Tell me about a celebrity from the United States\")\n",
-    "for node in nodes:\n",
-    "    print(node.node.get_content())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 104,
-   "id": "7f3a83e8-5872-4d55-9307-f4cd4c79216c",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\u001b[36;1m\u001b[1;3mRetrieving with query id None: Tell me about the childhood of a billionaire who started at company at the age of 16\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieved node with id, entering: Richard Branson\n",
-      "\u001b[0m\u001b[36;1m\u001b[1;3mRetrieving with query id Richard Branson: Tell me about the childhood of a billionaire who started at company at the age of 16\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieving text node: Branson has also talked openly about having ADHD.Branson's parents were supportive of his endeavours from an early age.His mother was an entrepreneur; one of her most successful ventures was building and selling wooden tissue boxes and wastepaper bins.In London, he started off squatting from 1967 to 1968.Branson is an atheist.He said in a 2011 interview with CNN's Piers Morgan that he believes in evolution and the importance of humanitarian efforts but not in the existence of God.\"I would love to believe,\" he said.\"It's very comforting to believe\".\n",
-      "\n",
-      "\n",
-      "== Early business career ==\n",
-      "After failed attempts to grow and sell both Christmas trees and budgerigars, Branson launched a magazine named Student in 1966 with Nik Powell.The first issue of Student appeared in January 1968, and a year later, Branson's net worth was estimated at £50,000.The office for the venture was situated in the crypt of St. John's Church, off Bayswater Road, in London.Though not initially as successful as he hoped, the magazine later became a vital component of the mail-order record business Branson started from the same church he used for Student.\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieving text node: In March 2000, Branson was knighted at Buckingham Palace for \"services to entrepreneurship\".For his work in retail, music and transport (with interests in land, air, sea and space travel), his taste for adventure and for his humanitarian work, he has become a prominent global figure.In 2007, he was placed in the Time 100 Most Influential People in the World list.In June 2023, Forbes listed Branson's estimated net worth at US$3 billion.On 11 July 2021, Branson travelled as a passenger onboard Virgin Galactic Unity 22 at the edge of space, a suborbital test flight for his spaceflight company Virgin Galactic.The mission lasted approximately one hour, reaching a peak altitude of 53.5 miles (86.1 km).At 70, Branson became the third oldest person to fly to space.\n",
-      "\n",
-      "\n",
-      "== Early life ==\n",
-      "Richard Charles Nicholas Branson was born on 18 July 1950 in Blackheath, London, the son of Edward James Branson (1918–2011), a barrister, and his wife Evette Huntley Branson (née Flindt; 1924–2021), a former ballet dancer and air hostess.\n",
-      "\u001b[0mBranson has also talked openly about having ADHD.Branson's parents were supportive of his endeavours from an early age.His mother was an entrepreneur; one of her most successful ventures was building and selling wooden tissue boxes and wastepaper bins.In London, he started off squatting from 1967 to 1968.Branson is an atheist.He said in a 2011 interview with CNN's Piers Morgan that he believes in evolution and the importance of humanitarian efforts but not in the existence of God.\"I would love to believe,\" he said.\"It's very comforting to believe\".\n",
-      "\n",
-      "\n",
-      "== Early business career ==\n",
-      "After failed attempts to grow and sell both Christmas trees and budgerigars, Branson launched a magazine named Student in 1966 with Nik Powell.The first issue of Student appeared in January 1968, and a year later, Branson's net worth was estimated at £50,000.The office for the venture was situated in the crypt of St. John's Church, off Bayswater Road, in London.Though not initially as successful as he hoped, the magazine later became a vital component of the mail-order record business Branson started from the same church he used for Student.\n",
-      "In March 2000, Branson was knighted at Buckingham Palace for \"services to entrepreneurship\".For his work in retail, music and transport (with interests in land, air, sea and space travel), his taste for adventure and for his humanitarian work, he has become a prominent global figure.In 2007, he was placed in the Time 100 Most Influential People in the World list.In June 2023, Forbes listed Branson's estimated net worth at US$3 billion.On 11 July 2021, Branson travelled as a passenger onboard Virgin Galactic Unity 22 at the edge of space, a suborbital test flight for his spaceflight company Virgin Galactic.The mission lasted approximately one hour, reaching a peak altitude of 53.5 miles (86.1 km).At 70, Branson became the third oldest person to fly to space.\n",
-      "\n",
-      "\n",
-      "== Early life ==\n",
-      "Richard Charles Nicholas Branson was born on 18 July 1950 in Blackheath, London, the son of Edward James Branson (1918–2011), a barrister, and his wife Evette Huntley Branson (née Flindt; 1924–2021), a former ballet dancer and air hostess.\n"
-     ]
-    }
-   ],
-   "source": [
-    "nodes = recursive_retriever.retrieve(\n",
-    "    \"Tell me about the childhood of a billionaire who started at company at the age of 16\"\n",
-    ")\n",
-    "for node in nodes:\n",
-    "    print(node.node.get_content())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "750e4416-644a-42f9-9acf-1bac2fa05748",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "llama_index_v2",
-   "language": "python",
-   "name": "llama_index_v2"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.10"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "5fa01d8c-261f-4da2-8698-5eae785e2f81",
+            "metadata": {},
+            "source": [
+                "# Comparing Methods for Structured Retrieval (Auto-Retrieval vs. Recursive Retrieval)\n",
+                "\n",
+                "In a naive RAG system, the set of input documents are then chunked, embedded, and dumped to a vector database collection. Retrieval would just fetch the top-k documents by embedding similarity.\n",
+                "\n",
+                "This can fail if the set of documents is large - it can be hard to disambiguate raw chunks, and you're not guaranteed to filter for the set of documents that contain relevant context.\n",
+                "\n",
+                "In this guide we explore **structured retrieval** - more advanced query algorithms that take advantage of structure within your documents for higher-precision retrieval. We compare the following two methods:\n",
+                "\n",
+                "- **Metadata Filters + Auto-Retrieval**: Tag each document with the right set of metadata. During query-time, use auto-retrieval to infer metadata filters along with passing through the query string for semantic search.\n",
+                "- **Store Document Hierarchies (summaries -> raw chunks) + Recursive Retrieval**: Embed document summaries and map that to the set of raw chunks for each document. During query-time, do recursive retrieval to first fetch summaries before fetching documents."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 77,
+            "id": "15c61ad0-25e3-4a07-a5af-d604f36b84aa",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "d38e8cbf-53da-4fed-8770-860e9e83d329",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "from llama_index import SimpleDirectoryReader, SummaryIndex, ServiceContext\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 58,
+            "id": "9107ed7c-9727-40c9-ae43-862a8c67b7d0",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "wiki_titles = [\"Michael Jordan\", \"Elon Musk\", \"Richard Branson\", \"Rihanna\"]\n",
+                "wiki_metadatas = {\n",
+                "    \"Michael Jordan\": {\n",
+                "        \"category\": \"Sports\",\n",
+                "        \"country\": \"United States\",\n",
+                "    },\n",
+                "    \"Elon Musk\": {\n",
+                "        \"category\": \"Business\",\n",
+                "        \"country\": \"United States\",\n",
+                "    },\n",
+                "    \"Richard Branson\": {\n",
+                "        \"category\": \"Business\",\n",
+                "        \"country\": \"UK\",\n",
+                "    },\n",
+                "    \"Rihanna\": {\n",
+                "        \"category\": \"Music\",\n",
+                "        \"country\": \"Barbados\",\n",
+                "    },\n",
+                "}"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 59,
+            "id": "ec14903c-9e13-4a41-a8e0-f8bf9e0b5c89",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from pathlib import Path\n",
+                "\n",
+                "import requests\n",
+                "\n",
+                "for title in wiki_titles:\n",
+                "    response = requests.get(\n",
+                "        \"https://en.wikipedia.org/w/api.php\",\n",
+                "        params={\n",
+                "            \"action\": \"query\",\n",
+                "            \"format\": \"json\",\n",
+                "            \"titles\": title,\n",
+                "            \"prop\": \"extracts\",\n",
+                "            # 'exintro': True,\n",
+                "            \"explaintext\": True,\n",
+                "        },\n",
+                "    ).json()\n",
+                "    page = next(iter(response[\"query\"][\"pages\"].values()))\n",
+                "    wiki_text = page[\"extract\"]\n",
+                "\n",
+                "    data_path = Path(\"data\")\n",
+                "    if not data_path.exists():\n",
+                "        Path.mkdir(data_path)\n",
+                "\n",
+                "    with open(data_path / f\"{title}.txt\", \"w\") as fp:\n",
+                "        fp.write(wiki_text)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 60,
+            "id": "cee3a660-dfff-4865-8fe8-75862a2b4c78",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# Load all wiki documents\n",
+                "docs_dict = {}\n",
+                "for wiki_title in wiki_titles:\n",
+                "    doc = SimpleDirectoryReader(input_files=[f\"data/{wiki_title}.txt\"]).load_data()[0]\n",
+                "\n",
+                "    doc.metadata.update(wiki_metadatas[wiki_title])\n",
+                "    docs_dict[wiki_title] = doc"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 61,
+            "id": "438b87ce-729d-4fad-9464-b9fa30e069b0",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.llms import OpenAI\n",
+                "from llama_index.callbacks import LlamaDebugHandler, CallbackManager\n",
+                "\n",
+                "\n",
+                "llm = OpenAI(\"gpt-4\")\n",
+                "callback_manager = CallbackManager([LlamaDebugHandler()])\n",
+                "service_context = ServiceContext.from_defaults(\n",
+                "    llm=llm, callback_manager=callback_manager, chunk_size=256\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "c07f3302-f0c1-485e-990e-ac6b1dda2577",
+            "metadata": {},
+            "source": [
+                "## Metadata Filters + Auto-Retrieval\n",
+                "\n",
+                "In this approach, we tag each Document with metadata (category, country), and store in a Weaviate vector db.\n",
+                "\n",
+                "During retrieval-time, we then perform \"auto-retrieval\" to infer the relevant set of metadata filters."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 105,
+            "id": "c3a35007-9bd0-42cc-be52-ba0316f80635",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/sessions.py:806: ResourceWarning: unclosed <ssl.SSLSocket fd=77, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=0, laddr=('192.168.1.78', 63780), raddr=('34.111.207.94', 443)>\n",
+                        "  self.adapters[prefix] = adapter\n",
+                        "ResourceWarning: Enable tracemalloc to get the object allocation traceback\n"
+                    ]
+                }
+            ],
+            "source": [
+                "## Setup Weaviate\n",
+                "import weaviate\n",
+                "\n",
+                "# cloud\n",
+                "resource_owner_config = weaviate.AuthClientPassword(\n",
+                "    username=\"username\",\n",
+                "    password=\"password\",\n",
+                ")\n",
+                "client = weaviate.Client(\n",
+                "    \"https://llamaindex-test-ul4sgpxc.weaviate.network\",\n",
+                "    auth_client_secret=resource_owner_config,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 106,
+            "id": "7051c353-ba65-4a47-bf9a-f5b97d7212ce",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index import VectorStoreIndex, SimpleDirectoryReader\n",
+                "from llama_index.vector_stores import WeaviateVectorStore\n",
+                "from IPython.display import Markdown, display"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 107,
+            "id": "0c2dcf5d-f539-4e83-8017-0a79ef398132",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# drop items from collection first\n",
+                "client.schema.delete_class(\"LlamaIndex\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 108,
+            "id": "f2f0e889-1f16-4660-863c-dca8ef63fbe0",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.storage.storage_context import StorageContext\n",
+                "\n",
+                "# If you want to load the index later, be sure to give it a name!\n",
+                "vector_store = WeaviateVectorStore(weaviate_client=client, index_name=\"LlamaIndex\")\n",
+                "storage_context = StorageContext.from_defaults(vector_store=vector_store)\n",
+                "\n",
+                "# NOTE: you may also choose to define a index_name manually.\n",
+                "# index_name = \"test_prefix\"\n",
+                "# vector_store = WeaviateVectorStore(weaviate_client=client, index_name=index_name)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 109,
+            "id": "d01b32c2-5625-4b6f-87eb-198b5f3f37da",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "{'class': 'LlamaIndex',\n",
+                            " 'description': 'Class for LlamaIndex',\n",
+                            " 'invertedIndexConfig': {'bm25': {'b': 0.75, 'k1': 1.2},\n",
+                            "  'cleanupIntervalSeconds': 60,\n",
+                            "  'stopwords': {'additions': None, 'preset': 'en', 'removals': None}},\n",
+                            " 'multiTenancyConfig': {'enabled': False},\n",
+                            " 'properties': [{'dataType': ['text'],\n",
+                            "   'description': 'Text property',\n",
+                            "   'indexFilterable': True,\n",
+                            "   'indexSearchable': True,\n",
+                            "   'name': 'text',\n",
+                            "   'tokenization': 'whitespace'},\n",
+                            "  {'dataType': ['text'],\n",
+                            "   'description': 'The ref_doc_id of the Node',\n",
+                            "   'indexFilterable': True,\n",
+                            "   'indexSearchable': True,\n",
+                            "   'name': 'ref_doc_id',\n",
+                            "   'tokenization': 'whitespace'},\n",
+                            "  {'dataType': ['text'],\n",
+                            "   'description': 'node_info (in JSON)',\n",
+                            "   'indexFilterable': True,\n",
+                            "   'indexSearchable': True,\n",
+                            "   'name': 'node_info',\n",
+                            "   'tokenization': 'whitespace'},\n",
+                            "  {'dataType': ['text'],\n",
+                            "   'description': 'The relationships of the node (in JSON)',\n",
+                            "   'indexFilterable': True,\n",
+                            "   'indexSearchable': True,\n",
+                            "   'name': 'relationships',\n",
+                            "   'tokenization': 'whitespace'}],\n",
+                            " 'replicationConfig': {'factor': 1},\n",
+                            " 'shardingConfig': {'virtualPerPhysical': 128,\n",
+                            "  'desiredCount': 1,\n",
+                            "  'actualCount': 1,\n",
+                            "  'desiredVirtualCount': 128,\n",
+                            "  'actualVirtualCount': 128,\n",
+                            "  'key': '_id',\n",
+                            "  'strategy': 'hash',\n",
+                            "  'function': 'murmur3'},\n",
+                            " 'vectorIndexConfig': {'skip': False,\n",
+                            "  'cleanupIntervalSeconds': 300,\n",
+                            "  'maxConnections': 64,\n",
+                            "  'efConstruction': 128,\n",
+                            "  'ef': -1,\n",
+                            "  'dynamicEfMin': 100,\n",
+                            "  'dynamicEfMax': 500,\n",
+                            "  'dynamicEfFactor': 8,\n",
+                            "  'vectorCacheMaxObjects': 1000000000000,\n",
+                            "  'flatSearchCutoff': 40000,\n",
+                            "  'distance': 'cosine',\n",
+                            "  'pq': {'enabled': False,\n",
+                            "   'bitCompression': False,\n",
+                            "   'segments': 0,\n",
+                            "   'centroids': 256,\n",
+                            "   'trainingLimit': 100000,\n",
+                            "   'encoder': {'type': 'kmeans', 'distribution': 'log-normal'}}},\n",
+                            " 'vectorIndexType': 'hnsw',\n",
+                            " 'vectorizer': 'none'}"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "# validate that the schema was created\n",
+                "class_schema = client.schema.get(\"LlamaIndex\")\n",
+                "display(class_schema)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 110,
+            "id": "3fce23e0-0632-4c72-97d3-e2c845e32555",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "Exception in thread TokenRefresh:\n",
+                        "Traceback (most recent call last):\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 703, in urlopen\n",
+                        "    httplib_response = self._make_request(\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 449, in _make_request\n",
+                        "    six.raise_from(e, None)\n",
+                        "  File \"<string>\", line 3, in raise_from\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 444, in _make_request\n",
+                        "    httplib_response = conn.getresponse()\n",
+                        "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 1374, in getresponse\n",
+                        "    response.begin()\n",
+                        "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 318, in begin\n",
+                        "    version, status, reason = self._read_status()\n",
+                        "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 287, in _read_status\n",
+                        "    raise RemoteDisconnected(\"Remote end closed connection without\"\n",
+                        "http.client.RemoteDisconnected: Remote end closed connection without response\n",
+                        "\n",
+                        "During handling of the above exception, another exception occurred:\n",
+                        "\n",
+                        "Traceback (most recent call last):\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/adapters.py\", line 486, in send\n",
+                        "    resp = conn.urlopen(\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 787, in urlopen\n",
+                        "    retries = retries.increment(\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/util/retry.py\", line 550, in increment\n",
+                        "    raise six.reraise(type(error), error, _stacktrace)\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/packages/six.py\", line 769, in reraise\n",
+                        "    raise value.with_traceback(tb)\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 703, in urlopen\n",
+                        "    httplib_response = self._make_request(\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 449, in _make_request\n",
+                        "    six.raise_from(e, None)\n",
+                        "  File \"<string>\", line 3, in raise_from\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/urllib3/connectionpool.py\", line 444, in _make_request\n",
+                        "    httplib_response = conn.getresponse()\n",
+                        "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 1374, in getresponse\n",
+                        "    response.begin()\n",
+                        "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 318, in begin\n",
+                        "    version, status, reason = self._read_status()\n",
+                        "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/http/client.py\", line 287, in _read_status\n",
+                        "    raise RemoteDisconnected(\"Remote end closed connection without\"\n",
+                        "urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))\n",
+                        "\n",
+                        "During handling of the above exception, another exception occurred:\n",
+                        "\n",
+                        "Traceback (most recent call last):\n",
+                        "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/threading.py\", line 1016, in _bootstrap_inner\n",
+                        "    self.run()\n",
+                        "  File \"/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/threading.py\", line 953, in run\n",
+                        "    self._target(*self._args, **self._kwargs)\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/weaviate/connect/connection.py\", line 276, in periodic_refresh_token\n",
+                        "    self._session.token = self._session.refresh_token(\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/authlib/oauth2/client.py\", line 252, in refresh_token\n",
+                        "    return self._refresh_token(\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/authlib/oauth2/client.py\", line 368, in _refresh_token\n",
+                        "    resp = self._http_post(url, body=body, auth=auth, headers=headers, **kwargs)\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/authlib/oauth2/client.py\", line 425, in _http_post\n",
+                        "    return self.session.post(\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/sessions.py\", line 637, in post\n",
+                        "    return self.request(\"POST\", url, data=data, json=json, **kwargs)\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/authlib/integrations/requests_client/oauth2_session.py\", line 109, in request\n",
+                        "    return super(OAuth2Session, self).request(\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/sessions.py\", line 589, in request\n",
+                        "    resp = self.send(prep, **send_kwargs)\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/sessions.py\", line 703, in send\n",
+                        "    r = adapter.send(request, **kwargs)\n",
+                        "  File \"/Users/jerryliu/Programming/gpt_index/.venv/lib/python3.10/site-packages/requests/adapters.py\", line 501, in send\n",
+                        "    raise ConnectionError(err, request=request)\n",
+                        "requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))\n",
+                        "sys:1: ResourceWarning: Unclosed socket <zmq.Socket(zmq.PUSH) at 0x2c4e7ebc0>\n",
+                        "ResourceWarning: Enable tracemalloc to get the object allocation traceback\n"
+                    ]
+                }
+            ],
+            "source": [
+                "index = VectorStoreIndex(\n",
+                "    [], storage_context=storage_context, service_context=service_context\n",
+                ")\n",
+                "\n",
+                "# add documents to index\n",
+                "for wiki_title in wiki_titles:\n",
+                "    index.insert(docs_dict[wiki_title])"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 68,
+            "id": "40f0717a-c868-4570-a8c6-57e3f50ce819",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.indices.vector_store.retrievers import VectorIndexAutoRetriever\n",
+                "from llama_index.vector_stores.types import MetadataInfo, VectorStoreInfo\n",
+                "\n",
+                "\n",
+                "vector_store_info = VectorStoreInfo(\n",
+                "    content_info=\"brief biography of celebrities\",\n",
+                "    metadata_info=[\n",
+                "        MetadataInfo(\n",
+                "            name=\"category\",\n",
+                "            type=\"str\",\n",
+                "            description=\"Category of the celebrity, one of [Sports, Entertainment, Business, Music]\",\n",
+                "        ),\n",
+                "        MetadataInfo(\n",
+                "            name=\"country\",\n",
+                "            type=\"str\",\n",
+                "            description=\"Country of the celebrity, one of [United States, Barbados, Portugal]\",\n",
+                "        ),\n",
+                "    ],\n",
+                ")\n",
+                "retriever = VectorIndexAutoRetriever(\n",
+                "    index,\n",
+                "    vector_store_info=vector_store_info,\n",
+                "    service_context=service_context,\n",
+                "    max_top_k=10000,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 69,
+            "id": "249a8a74-5bde-4b42-9b43-62764484158b",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using query str: celebrity\n",
+                        "Using query str: celebrity\n",
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using filters: {'country': 'United States'}\n",
+                        "Using filters: {'country': 'United States'}\n",
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using top_k: 10000\n",
+                        "Using top_k: 10000\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# NOTE: the \"set top-k to 10000\" is a hack to return all data.\n",
+                "# Right now auto-retrieval will always return a fixed top-k, there's a TODO to allow it to be None\n",
+                "# to fetch all data.\n",
+                "# So it's theoretically possible to have the LLM infer a None top-k value.\n",
+                "nodes = retriever.retrieve(\n",
+                "    \"Tell me about a celebrity from the United States, set top k to 10000\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 70,
+            "id": "56408d2f-f532-4010-bf81-7a8487433f9e",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Number of nodes: 124\n",
+                        "The Super Bowl commercial inspired the 1996 live action/animated film Space Jam, which starred Jordan and Bugs in a fictional story set during the former's first retirement from basketball.They have subsequently appeared together in several commercials for MCI.Jordan also made an appearance in the music video for Michael Jackson's \"Jam\" (1992).Since 2008, Jordan's yearly income from the endorsements is estimated to be over $40 million.In addition, when Jordan's power at the ticket gates was at its highest point, the Bulls regularly sold out both their home and road games.Due to this, Jordan set records in player salary by signing annual contracts worth in excess of US$30 million per season.An academic study found that Jordan's first NBA comeback resulted in an increase in the market capitalization of his client firms of more than $1 billion.Most of Jordan's endorsement deals, including his first deal with Nike, were engineered by his agent, David Falk.Jordan has described Falk as \"the best at what he does\" and that \"marketing-wise, he's great.He's the one who came up with the concept of 'Air Jordan'.\"\n",
+                        "Musk blamed the estrangement of his daughter on what the Financial Times characterized as \"the supposed takeover of elite schools and universities by neo-Marxists.\"In 2008, Musk began dating English actress Talulah Riley.They married two years later at Dornoch Cathedral in Scotland.In 2012, the couple divorced, before remarrying the following year.After briefly filing for divorce in 2014, Musk finalized a second divorce from Riley in 2016.Musk then dated Amber Heard for several months in 2017; he had reportedly been pursuing her since 2012.Johnny Depp later accused Musk of having an affair with Heard while she was still married to Depp.Musk and Heard both denied the affair.In 2018, Musk and Canadian musician Grimes revealed that they were dating.Grimes gave birth to their son in May 2020.According to Musk and Grimes, his name was \"X Æ A-12\" (); however, the name would have violated California regulations as it contained characters that are not in the modern English alphabet, and was then changed to \"X Æ A-Xii\".This drew more confusion, as Æ is not a letter in the modern English alphabet.\n",
+                        "=== Film and television ===\n",
+                        "Jordan played himself in the 1996 comedy film Space Jam.The film received mixed reviews, but it was a box office success, making $230 million worldwide, and earned more than $1 billion through merchandise sales.In 2000, Jordan was the subject of an IMAX documentary about his career with the Chicago Bulls, especially the 1998 NBA playoffs, titled Michael Jordan to the Max.Two decades later, the same period of Jordan's life was covered in much greater and more personal detail by the Emmy Award-winning The Last Dance, a 10-part TV documentary which debuted on ESPN in April and May 2020.The Last Dance relied heavily on about 500 hours of candid film of Jordan's and his teammates' off-court activities which an NBA Entertainment crew had shot over the course of the 1997–98 NBA season for use in a documentary.The project was delayed for many years because Jordan had not yet given his permission for the footage to be used.\n",
+                        "He was interviewed at three homes associated with the production and did not want cameras in his home or on his plane, as according to director Jason Hehir \"there are certain aspects of his life that he wants to keep private\".Jordan granted rapper Travis Scott permission to film a music video for his single \"Franchise\" at his home in Highland Park, Illinois.Jordan appeared in the 2022 miniseries The Captain, which follows the life and career of Derek Jeter.\n",
+                        "\n",
+                        "\n",
+                        "=== Books ===\n",
+                        "Jordan has authored several books focusing on his life, basketball career, and world view.\n",
+                        "\n",
+                        "Rare Air: Michael on Michael, with Mark Vancil and Walter Iooss (Harper San Francisco, 1993).\n",
+                        "I Can't Accept Not Trying: Michael Jordan on the Pursuit of Excellence, with Mark Vancil and Sandro Miller (Harper San Francisco, 1994).\n",
+                        "For the Love of the Game: My Story, with Mark Vancil (Crown Publishers, 1998).\n",
+                        "Driven from Within, with Mark Vancil (Atria Books, 2005).\n",
+                        "\"In April 2023, the government of the U.S. Virgin Islands sought to subpoena Musk for documents in a lawsuit alleging that JPMorgan Chase profited from Jeffrey Epstein's sex trafficking operation.In May, a judge granted the U.S. Virgin Islands' request to serve Musk electronically through Tesla after the U.S. territory had difficulty locating him.The efforts to subpoena Musk for documents do not implicate him in any wrongdoing and do not seek to have Musk testify under oath.\n",
+                        "\n",
+                        "\n",
+                        "== Public perception ==\n",
+                        "\n",
+                        "Though Musk's ventures were influential within their own industries in the 2000s, he only became a public figure in the early 2010s.He has often been described as an eccentric who makes spontaneous and controversial statements, contrary to other billionaires who prefer reclusiveness to protect their businesses.Celebrated by fans and hated by critics, Musk was described by Vance as having become very polarizing because of his \"part philosopher, part troll\" role on Twitter.With Steve Jobs and Donald Trump, Musk served as inspiration for the characterization of Tony Stark in the Marvel film Iron Man (2008).Musk had a cameo appearance in the film's 2010 sequel, Iron Man 2.\n",
+                        "Knafel claimed Jordan promised her $5 million for remaining silent and agreeing not to file a paternity suit after Knafel learned she was pregnant in 1991; a DNA test showed Jordan was not the father of the child.Jordan proposed to his longtime girlfriend, Cuban-American model Yvette Prieto, on Christmas 2011, and they were married on April 27, 2013, at Bethesda-by-the-Sea Episcopal Church.It was announced on November 30, 2013, that the two were expecting their first child together.On February 11, 2014, Prieto gave birth to identical twin daughters named Victoria and Ysabel.In 2019, Jordan became a grandfather when his daughter Jasmine gave birth to a son, whose father is professional basketball player Rakeem Christmas.\n",
+                        "\n",
+                        "\n",
+                        "== Media figure and business interests ==\n",
+                        "\n",
+                        "\n",
+                        "=== Endorsements ===\n",
+                        "Jordan is one of the most marketed sports figures in history.He has been a major spokesman for such brands as Nike, Coca-Cola, Chevrolet, Gatorade, McDonald's, Ball Park Franks, Rayovac, Wheaties, Hanes, and MCI.\n",
+                        "=== Business ventures ===\n",
+                        "In June 2010, Jordan was ranked by Forbes as the 20th-most-powerful celebrity in the world, with $55 million earned between June 2009 and June 2010.According to Forbes, Jordan Brand generates $1 billion in sales for Nike.In June 2014, Jordan was named the first NBA player to become a billionaire, after he increased his stake in the Charlotte Hornets from 80% to 89.5%.On January 20, 2015, Jordan was honored with the Charlotte Business Journal's Business Person of the Year for 2014.In 2017, he became a part owner of the Miami Marlins of Major League Baseball.Forbes designated Jordan as the athlete with the highest career earnings in 2017.From his Jordan Brand income and endorsements, Jordan's 2015 income was an estimated $110 million, the most of any retired athlete.As of 2023, his net worth is estimated at $2 billion by Forbes, making him the fifth-richest African-American, behind Robert F. Smith, David Steward, Oprah Winfrey, and Rihanna.Jordan co-owns an automotive group which bears his name.\n",
+                        "He reportedly hosted large, ticketed house parties to help pay for tuition, and wrote a business plan for an electronic book-scanning service similar to Google Books.In 1994, Musk held two internships in Silicon Valley: one at energy storage startup Pinnacle Research Institute, which investigated electrolytic ultracapacitors for energy storage, and another at Palo Alto–based startup Rocket Science Games.In 1995, he was accepted to a PhD program in materials science at Stanford University.However, Musk decided to join the Internet boom, dropping out two days after being accepted and applied for a job at Netscape, to which he reportedly never received a response.\n",
+                        "\n",
+                        "\n",
+                        "== Business career ==\n",
+                        "He starred as himself in the live-action/animation hybrid film Space Jam (1996) and was the central focus of the Emmy-winning documentary series The Last Dance (2020).He became part-owner and head of basketball operations for the Charlotte Hornets (then named the Bobcats) in 2006 and bought a controlling interest in 2010, before selling his majority stake in 2023, and he is also the owner of 23XI Racing in the NASCAR Cup Series.In 2016, he became the first billionaire player in NBA history.That year, President Barack Obama awarded him the Presidential Medal of Freedom.As of 2023, his net worth is estimated at $2 billion.\n",
+                        "\n",
+                        "\n",
+                        "== Early life ==\n",
+                        "Michael Jeffrey Jordan was born at Cumberland Hospital in the Fort Greene neighborhood of New York City's Brooklyn borough on February 17, 1963, to bank employee Deloris (née Peoples) and equipment supervisor James R. Jordan Sr.He has two older brothers, James R. Jordan Jr. and fellow basketball player Larry Jordan, as well as an older sister named Deloris and a younger sister named Roslyn.\n",
+                        "The New York Post revealed that Musk's ex-wife Talulah Riley had encouraged Musk to purchase Twitter, specifically citing the Bee's ban.Following the acquisition, he made reinstatement of accounts like the Bee an immediate priority.The Independent reported that Musk has \"appealed to far-right activists and influencers and unleashed a wave of hate speech and abuse aimed at LGBT+ people\" since taking control of Twitter.On December 18, Musk posted a poll to his Twitter account asking users to decide whether he should step down as the head of Twitter, with 57.5% out of the more than 17.5 million votes supporting that decision.Musk then announced that he would resign as CEO \"as soon as I find someone foolish enough to take the job\".On May 11, 2023, Musk announced that he would be stepping down from the CEO position and instead moving to \"exec chair & CTO, overseeing product, software & sysops\" and announced the new CEO, former NBCUniversal executive Linda Yaccarino.\n",
+                        "Musk has made cameos and appearances in other films such as Machete Kills (2013), Why Him?(2016), and Men in Black: International (2019).Television series in which he has appeared include The Simpsons (\"The Musk Who Fell to Earth\", 2015), The Big Bang Theory (\"The Platonic Permutation\", 2015), South Park (\"Members Only\", 2016), Young Sheldon (\"A Patch, a Modem, and a Zantac®\", 2017), Rick and Morty (\"One Crew over the Crewcoo's Morty\", 2019), and Saturday Night Live (2021).He contributed interviews to the documentaries Racing Extinction (2015) and the Werner Herzog-directed Lo and Behold (2016).Musk was elected a Fellow of the Royal Society (FRS) in 2018.In 2015, he received an honorary doctorate in engineering and technology from Yale University and IEEE Honorary Membership.\n",
+                        "In March 2019, Musk was later one of the 187 people who received various honors conferred by the King of Thailand for involvement in the rescue effort.Soon after the rescue, Vernon Unsworth, a British recreational caver who had been exploring the cave for the previous six years and played a key advisory role in the operation, criticized the submarine on CNN as amounting to nothing more than a public relations effort with no chance of success, maintaining that Musk \"had no conception of what the cave passage was like\" and \"can stick his submarine where it hurts\".Musk asserted on Twitter that the device would have worked and referred to Unsworth as a \"pedo guy\".He deleted the tweets, and apologized, and he deleted his responses to critical tweets from Cher Scarlett, a software engineer, which had caused his followers to harass her.In an email to BuzzFeed News, Musk later called Unsworth a \"child rapist\" and said that he had married a child.In September, Unsworth filed a defamation suit in the District Court for the Central District of California.\n",
+                        "== See also ==\n",
+                        "Forbes' list of the world's highest-paid athletes\n",
+                        "List of athletes who came out of retirement\n",
+                        "List of NBA teams by single season win percentage\n",
+                        "Michael Jordan's Restaurant\n",
+                        "Michael Jordan: Chaos in the Windy City\n",
+                        "Michael Jordan in Flight\n",
+                        "NBA 2K11\n",
+                        "NBA 2K12\n",
+                        "\n",
+                        "\n",
+                        "== Notes ==\n",
+                        "\n",
+                        "\n",
+                        "== References ==\n",
+                        "\n",
+                        "\n",
+                        "== Sources ==\n",
+                        "Condor, Bob (1998).Michael Jordan's 50 Greatest Games.Carol Publishing Group.ISBN 978-0-8065-2030-8.Halberstam, David (2000).Playing for Keeps: Michael Jordan and the World He Made.Broadway Books.ISBN 978-0-7679-0444-5.Jordan, Michael (1998).For the Love of the Game: My Story.New York City: Crown Publishers.ISBN 978-0-609-60206-5.Kotler, Philip; Rein, Irving J.; Shields, Ben (2006).The Elusive Fan: Reinventing Sports in a Crowded Marketplace.The McGraw-Hill Companies.ISBN 978-0-07-149114-3.\n",
+                        "23 retired by the North Carolina Tar HeelsHigh schoolMcDonald's All-American – 1981\n",
+                        "Parade All-American First Team – 1981Halls of FameTwo-time Naismith Memorial Basketball Hall of Fame inductee:\n",
+                        "Class of 2009 – individual\n",
+                        "Class of 2010 – as a member of the \"Dream Team\"\n",
+                        "United States Olympic Hall of Fame – Class of 2009 (as a member of the \"Dream Team\")\n",
+                        "North Carolina Sports Hall of Fame – Class of 2010\n",
+                        "Two-time FIBA Hall of Fame inductee:\n",
+                        "Class of 2015 – individual\n",
+                        "Class of 2017 – as a member of the \"Dream Team\"MediaThree-time Associated Press Athlete of the Year – 1991, 1992, 1993\n",
+                        "Sports Illustrated Sportsperson of the Year – 1991\n",
+                        "Ranked No.1 by Slam magazine's \"Top 50 Players of All-Time\"\n",
+                        "Ranked No.1 by ESPN SportsCentury's \"Top North American Athletes of the 20th Century\"\n",
+                        "10-time ESPY Award winner (in various categories)\n",
+                        "1997 Marca Leyenda winnerNational2016 Presidential Medal of FreedomState/localStatue inside the United Center\n",
+                        "Section of Madison Street in Chicago renamed Michael Jordan Drive – 1994\n",
+                        "=== Music ===\n",
+                        "In 2019, Musk, through Emo G Records, released a rap track, \"RIP Harambe\", on SoundCloud. The track, which refers to the killing of Harambe the gorilla and the subsequent Internet sensationalism surrounding the event, was performed by Yung Jake, written by Yung Jake and Caroline Polachek, and produced by BloodPop. The following year, Musk released an EDM track, \"Don't Doubt Ur Vibe\", featuring his own lyrics and vocals. While Guardian critic Alexi Petridis described it as \"indistinguishable... from umpteen competent but unthrilling bits of bedroom electronica posted elsewhere on Soundcloud\", TechCrunch said it was \"not a bad representation of the genre\".\n",
+                        "Also in July 2022, The Wall Street Journal reported that Musk allegedly had an affair with Nicole Shanahan, the wife of Google co-founder Sergey Brin, in 2021, leading to their divorce the following year.Musk denied the report.\n",
+                        "\n",
+                        "\n",
+                        "=== Legal matters ===\n",
+                        "\n",
+                        "In May 2022, Business Insider cited an anonymous friend of an unnamed SpaceX contract flight attendant, alleging that Musk engaged in sexual misconduct in 2016.The source stated that in November 2018, Musk, SpaceX, and the former flight attendant entered into a severance agreement granting the attendant a $250,000 payment in exchange for a promise not to sue over the claims.Musk responded, \"If I were inclined to engage in sexual harassment, this is unlikely to be the first time in my entire 30-year career that it comes to light\".He accused the article from Business Insider of being a \"politically motivated hit piece\".After the release of the Business Insider article, Tesla's stock fell by more than 6%, decreasing Musk's net worth by $10 billion.Barron's wrote \"...some investors considered key-man risk – the danger that a company could be badly hurt by the loss of one individual.\n",
+                        "=== Works cited ===\n",
+                        "Belfiore, Michael (2007). Rocketeers. New York: HarperCollins. ISBN 9780061149023.\n",
+                        "Berger, Eric (2021). Liftoff. William Morrow and Company. ISBN 9780062979971.\n",
+                        "Jackson, Erik (2004). The PayPal Wars: Battles with eBay, the Media, the Mafia, and the Rest of Planet Earth. Los Angeles: World Ahead Publishing. ISBN 9780974670102.\n",
+                        "Kidder, David; Hoffman, Reid (2013). The Startup Playbook: Secrets of the Fastest Growing Start-Ups from the founding Entrepreneurs. San Francisco: Chronicle Books. ISBN 9781452105048.\n",
+                        "Vance, Ashlee (2017) [2015]. Elon Musk: Tesla, SpaceX, and the Quest for a Fantastic Future (2nd ed.). New York: Ecco. ISBN 9780062301253.\n",
+                        "They had two sons, Jeffrey and Marcus, and a daughter, Jasmine.The Jordans filed for divorce on January 4, 2002, citing irreconcilable differences, but reconciled shortly thereafter.They again filed for divorce and were granted a final decree of dissolution of marriage on December 29, 2006, commenting that the decision was made \"mutually and amicably\".It is reported that Juanita received a $168 million settlement (equivalent to $244 million in 2022), making it the largest celebrity divorce settlement on public record at the time.In 1991, Jordan purchased a lot in Highland Park, Illinois, where he planned to build a 56,000-square-foot (5,200 m2) mansion.It was completed in 1995.He listed the mansion for sale in 2012.He also owns homes in North Carolina and Jupiter Island, Florida.On July 21, 2006, a judge in Cook County, Illinois, determined that Jordan did not owe his alleged former lover Karla Knafel $5 million in a breach of contract claim.Jordan had allegedly paid Knafel $250,000 to keep their relationship a secret.\n",
+                        "2003\n",
+                        "Three-time NBA All-Star Game MVP – 1988, 1996, 1998\n",
+                        "10-time All-NBA First Team – 1987–1993, 1996–1998\n",
+                        "One-time All-NBA Second Team – 1985\n",
+                        "Nine-time NBA All-Defensive First Team – 1988–1993, 1996–1998\n",
+                        "NBA All-Rookie First Team – 1985\n",
+                        "Two-time NBA Slam Dunk Contest champion – 1987, 1988\n",
+                        "Two-time IBM Award winner – 1985, 1989\n",
+                        "Named one of the 50 Greatest Players in NBA History in 1996\n",
+                        "Selected on the NBA 75th Anniversary Team in 2021\n",
+                        "No.23 retired by the Chicago Bulls\n",
+                        "No.\n",
+                        "Michael Jeffrey Jordan (born February 17, 1963), also  known by his initials MJ, is an American former professional basketball player and businessman.The official National Basketball Association (NBA) website states: \"By acclamation, Michael Jordan is the greatest basketball player of all time.\"He played fifteen seasons in the NBA, winning six NBA championships with the Chicago Bulls.He was integral in popularizing the sport of basketball and the NBA around the world in the 1980s and 1990s, becoming a global cultural icon.Jordan played college basketball for three seasons under coach Dean Smith with the North Carolina Tar Heels.As a freshman, he was a member of the Tar Heels' national championship team in 1982.Jordan joined the Bulls in 1984 as the third overall draft pick and quickly emerged as a league star, entertaining crowds with his prolific scoring while gaining a reputation as one of the game's best defensive players.His leaping ability, demonstrated by performing slam dunks from the free-throw line in Slam Dunk Contests, earned him the nicknames \"Air Jordan\" and \"His Airness\".Jordan won his first NBA title with the Bulls in 1991 and followed that achievement with titles in 1992 and 1993, securing a three-peat.\n",
+                        "== Personal life ==\n",
+                        "From the early 2000s until late 2020, Musk resided in California, where both Tesla and SpaceX were founded. In 2020, he relocated to Texas, saying that California had become \"complacent\" about its economic success. While hosting Saturday Night Live in May 2021, Musk revealed that he has Asperger syndrome. Musk is also a practitioner of Brazilian jiu-jitsu.\n",
+                        "\n",
+                        "\n",
+                        "=== Relationships and children ===\n",
+                        "Musk met his first wife, Canadian author Justine Wilson, while attending Queen's University in Ontario, Canada; and they married in 2000.In 2002, their first child died of sudden infant death syndrome at the age of 10 weeks.After his death, the couple decided to use IVF to continue their family.They had twins in 2004 followed by triplets in 2006.The couple divorced in 2008 and shared custody of their children.In 2022, one of the twins officially changed her name to reflect her gender identity as a trans woman, and to use Wilson as her last name because she no longer wished to be associated with Musk.\n",
+                        "In the September 1996 issue of Sport, which was the publication's 50th-anniversary issue, Jordan was named the greatest athlete of the past 50 years.Jordan's athletic leaping ability, highlighted in his back-to-back Slam Dunk Contest championships in 1987 and 1988, is credited by many people with having influenced a generation of young players.Several NBA players, including James and Dwyane Wade, have stated that they considered Jordan their role model while they were growing up.In addition, commentators have dubbed a number of next-generation players \"the next Michael Jordan\" upon their entry to the NBA, including Penny Hardaway, Grant Hill, Allen Iverson, Bryant, Vince Carter, James, and Wade.Some analysts, such as The Ringer's Dan Devine, drew parallels between Jordan's experiment at point guard in the 1988–89 season and the modern NBA; for Devine, it \"inadvertently foreshadowed the modern game's stylistic shift toward monster-usage primary playmakers\", such as Russell Westbrook, James Harden, Luka Dončić, and James.Don Nelson stated: \"I would've been playing him at point guard the day he showed up as a rookie.\n",
+                        "In his defense, Musk argued that \"'pedo guy' was a common insult used in South Africa when I was growing up ... synonymous with 'creepy old man' and is used to insult a person's appearance and demeanor\".The defamation case began in December 2019, with Unsworth seeking $190 million in damages.During the trial Musk apologized to Unsworth again for the tweet.On December 6, the jury found in favor of Musk and ruled he was not liable.\n",
+                        "Elon Reeve Musk ( EE-lon; born June 28, 1971) is a business magnate and investor.Musk is the founder, chairman, CEO and chief technology officer of SpaceX;  angel investor, CEO, product architect and former chairman of Tesla, Inc.; owner, chairman and CTO of X Corp.; founder of the Boring Company; co-founder of Neuralink and OpenAI; and president of the Musk Foundation.He is the wealthiest person in the world, with an estimated net worth of US$217 billion as of August 2023, according to the Bloomberg Billionaires Index, and $219 billion according to Forbes, primarily from his ownership stakes in both Tesla and SpaceX.Musk was born in Pretoria, South Africa, and briefly attended the University of Pretoria before immigrating to Canada at age 18, acquiring citizenship through his Canadian-born mother.Two years later, he matriculated at Queen's University in Kingston, Ontario.Musk later transferred to the University of Pennsylvania, and received bachelor's degrees in economics and physics there.He moved to California in 1995 to attend Stanford University.\n",
+                        "He also endorsed Kanye West's 2020 presidential campaign.He said he voted for Joe Biden in the 2020 U.S. presidential election.In 2022, Musk said that he could \"no longer support\" the Democrats because they are the \"party of division & hate\", and wrote a tweet encouraging \"independent-minded voters\" to vote Republican in the 2022 U.S. elections, which was an outlier among social media executives who typically avoid partisan political advocacy.He has supported Republican Ron DeSantis for the 2024 U.S. presidential election, and Twitter hosted DeSantis's campaign announcement on a Twitter Spaces event As of May 2023, Musk was declining to endorse any specific candidate.Musk opposes a \"billionaire's tax\", and has argued on Twitter with more left-leaning Democratic politicians such as Bernie Sanders, Alexandria Ocasio-Cortez, and Elizabeth Warren.He has raised questions about the Black Lives Matter protests, partially based on the fact that the phrase \"Hands up, don't shoot\" was made up.\n",
+                        "Two months later, Musk contracted COVID-19 and suggested his COVID-19 rapid antigen test results were dubious, after which the phrase \"Space Karen\" trended on Twitter, in reference to Musk.However, in December 2021, Musk revealed that he and his eligible children had received the vaccine.\n",
+                        "\n",
+                        "\n",
+                        "=== Finance ===\n",
+                        "Musk said that the U.S. government should not provide subsidies to companies, but impose a carbon tax to discourage poor behavior.The free market, in his view, would achieve the best solution, and producing environmentally unfriendly vehicles should have consequences.Tesla has received billions of dollars in subsidies.In addition, Tesla made large sums from government-initiated systems of zero-emissions credits offered in California and at the United States federal level, which facilitated initial consumer adoption of Tesla vehicles, as the tax credits given by governments enabled Tesla's battery electric vehicles to be price-competitive, in comparison with existing lower-priced internal combustion engine vehicles.\n",
+                        "== Personal views and Twitter (later X) usage ==\n",
+                        "\n",
+                        "Since joining Twitter (now known as X) in 2009, Musk has been an active user and has over 100 million followers as of June 2022. He posts memes, promotes business interests, and comments on contemporary political and cultural issues. Musk's statements have provoked controversy, such as for mocking preferred gender pronouns, and comparing Canadian prime minister Justin Trudeau to Adolf Hitler. The New York Times describes his contributions to international relations as \"chaotic\", and critics of Musk argue that there is a lack of separation between his opinions and his business interests. As CEO of Twitter, Musk emerged as a source of misinformation, for example by suggesting online details about mass murderer Mauricio Garcia's apparent interest in Nazism could have been planted as part of a psyop. Allegations of him being transphobic appeared as well in response to actions taken by Twitter under his guidance. The Israel government and several media outlets accused Musk of antisemitism due to him spreading George Soros conspiracy theories, although some Israeli officials defended Musk.\n",
+                        "\n",
+                        "\n",
+                        "=== Existential threats ===\n",
+                        "Musk has been described as believing in longtermism, emphasizing the needs of future populations.\n",
+                        "=== Tham Luang cave rescue and defamation case ===\n",
+                        "\n",
+                        "In July 2018, Musk arranged for his employees to build a mini-submarine to assist the rescue of children trapped in a flooded cavern in Thailand.Richard Stanton, leader of the international rescue diving team, urged Musk to facilitate the construction of the vehicle as a back-up, in case flooding worsened.Engineers at SpaceX and the Boring Company built the mini-submarine from a Falcon 9 liquid oxygen transfer tube in eight hours and personally delivered it to Thailand.By this time, however, eight of the 12 children, had already been rescued, the rescuers employing full face masks, oxygen, and anesthesia; consequently, Thai authorities declined to use the submarine.\n",
+                        "==== First retirement and stint in Minor League Baseball (1993–1995) ====\n",
+                        "\n",
+                        "On October 6, 1993, Jordan announced his retirement, saying that he lost his desire to play basketball.Jordan later said that the murder of his father three months earlier helped shape his decision.James R. Jordan Sr. was murdered on July 23, 1993, at a highway rest area in Lumberton, North Carolina, by two teenagers, Daniel Green and Larry Martin Demery, who carjacked his Lexus bearing the license plate \"UNC 0023\".His body, dumped in a South Carolina swamp, was not discovered until August 3.Green and Demery were found after they made calls on James Jordan's cell phone, convicted at a trial, and sentenced to life in prison.Jordan was close to his father; as a child, he imitated the way his father stuck out his tongue while absorbed in work.He later adopted it as his own signature, often displaying it as he drove to the basket.In 1996, he founded a Chicago-area Boys & Girls Club and dedicated it to his father.\n",
+                        "The child was eventually named X AE A-XII Musk, with \"X\" as a first name, \"AE A-XII\" as a middle name, and \"Musk\" as surname.In December 2021, Grimes and Musk had a second child, a daughter named Exa Dark Sideræl Musk (nicknamed \"Y\"), born via surrogacy.Despite the pregnancy, Musk confirmed reports that the couple were \"semi-separated\" in September 2021; in an interview with Time in December 2021, he said he was single.In March 2022, Grimes said of her relationship with Musk: \"I would probably refer to him as my boyfriend, but we're very fluid.\"Later that month, Grimes tweeted that she and Musk had broken up again but remained on good terms.In July 2022, Insider published court documents revealing that Musk had had twins with Shivon Zilis, director of operations and special projects at Neuralink, in November 2021.They were born weeks before Musk and Grimes had their second child via surrogate in December.The news \"raise[d] questions about workplace ethics\", given that Zilis directly reported to Musk.\n",
+                        "The company has a Nissan dealership in Durham, North Carolina, acquired in 1990, and formerly had a Lincoln–Mercury dealership from 1995 until its closure in June 2009.The company also owned a Nissan franchise in Glen Burnie, Maryland.The restaurant industry is another business interest of Jordan's.Restaurants he has owned include a steakhouse in New York City's Grand Central Terminal, among others; that restaurant closed in 2018.Jordan is the majority investor in a golf course, Grove XXIII, under construction in Hobe Sound, Florida.In September 2020, Jordan became an investor and advisor for DraftKings.\n",
+                        "\n",
+                        "\n",
+                        "=== Philanthropy ===\n",
+                        "From 2001 to 2014, Jordan hosted an annual golf tournament, the Michael Jordan Celebrity Invitational, that raised money for various charities.In 2006, Jordan and his wife Juanita pledged $5 million to Chicago's Hales Franciscan High School.The Jordan Brand has made donations to Habitat for Humanity and a Louisiana branch of the Boys & Girls Clubs of America.The Make-A-Wish Foundation named Jordan its Chief Wish Ambassador in 2008.In 2013, he granted his 200th wish for the organization.\n",
+                        "After Jordan received word of his acceptance into the Hall of Fame, he selected Class of 1996 member David Thompson to present him.As Jordan would later explain during his induction speech in September 2009, he was not a fan of the Tar Heels when growing up in North Carolina but greatly admired Thompson, who played for the rival NC State Wolfpack.In September, he was inducted into the Hall with several former Bulls teammates in attendance, including Scottie Pippen, Dennis Rodman, Charles Oakley, Ron Harper, Steve Kerr, and Toni Kukoč.Dean Smith and Doug Collins, two of Jordan's former coaches, were also among those present.His emotional reaction during his speech when he began to cry was captured by Associated Press photographer Stephan Savoia and would later go viral on social media as the \"Crying Jordan\" Internet meme.In 2016, President Barack Obama honored Jordan with the Presidential Medal of Freedom.In October 2021, Jordan was named to the NBA 75th Anniversary Team.In September 2022, Jordan's jersey in which he played the opening game of the 1998 NBA Finals was sold for $10.1 million, making it the most expensive game-worn sports memorabilia in history.\n",
+                        "Awards for his contributions to the development of the Falcon rockets include the American Institute of Aeronautics and Astronautics George Low Transportation Award in 2008, the Fédération Aéronautique Internationale Gold Space Medal in 2010, and the Royal Aeronautical Society Gold Medal in 2012.Time has listed Musk as one of the most influential people in the world on four occasions in 2010, 2013, 2018, and 2021.Musk was selected as Time's \"Person of the Year\" for 2021.Time editor-in-chief Edward Felsenthal wrote that \"Person of the Year is a marker of influence, and few individuals have had more influence than Musk on life on Earth, and potentially life off Earth too\".In February 2022, Musk was elected as a member of the National Academy of Engineering.\n",
+                        "\n",
+                        "\n",
+                        "== Notes and references ==\n",
+                        "\n",
+                        "\n",
+                        "=== Notes ===\n",
+                        "\n",
+                        "\n",
+                        "=== Citations ===\n",
+                        "Kruger, Mitchell (2003).One Last Shot: The Story of Michael Jordan's Comeback.New York City: St. Martin's Paperbacks.ISBN 978-0-312-99223-1.Lazenby, Roland (2014).Michael Jordan: The Life.New York City: Little, Brown and Company.ISBN 978-0-316-19477-8.LaFeber, Walter (2002).Michael Jordan and the New Global Capitalism.W. W. Norton.ISBN 978-0-393-32369-6.Markovits, Andrei S.; Rensman, Lars (June 3, 2010).Gaming the World: How Sports are Reshaping Global Politics and Culture.Princeton University Press.ISBN 978-0-691-13751-3.Porter, David L. (2007).Michael Jordan: A Biography.Greenwood Publishing Group.ISBN 978-0-313-33767-3.The Sporting News Official NBA Register 1994–95 (1994).The Sporting News.ISBN 978-0-89204-501-3.\n",
+                        "His mother, Maye Musk (née Haldeman), is a model and dietitian born in Saskatchewan, Canada, and raised in South Africa.His father, Errol Musk, is a South African electromechanical engineer, pilot, sailor, consultant, and property developer, who partly owned a Zambian emerald mine near Lake Tanganyika.Musk has a younger brother, Kimbal, and a younger sister, Tosca.Musk's family was wealthy during his youth.His father was elected to the Pretoria City Council as a representative of the anti-apartheid Progressive Party and has said that his children shared their father's dislike of apartheid.His maternal grandfather, Joshua Haldeman, was an American-born Canadian who took his family on record-breaking journeys to Africa and Australia in a single-engine Bellanca airplane.After his parents divorced in 1980, Musk chose to live primarily with his father.Musk later regretted his decision and became estranged from his father.He has a paternal half-sister and a half-brother.Maye Musk has said of her son that he \"was shy and awkward at school\" and \"didn't have many friends\".\n",
+                        "He holds the NBA records for career regular season scoring average (30.1 points per game) and career playoff scoring average (33.4 points per game).In 1999, he was named the 20th century's greatest North American athlete by ESPN and was second to Babe Ruth on the Associated Press' list of athletes of the century.Jordan was twice inducted into the Naismith Memorial Basketball Hall of Fame, once in 2009 for his individual career, and again in 2010 as part of the 1992 United States men's Olympic basketball team (\"The Dream Team\").He became a member of the United States Olympic Hall of Fame in 2009, a member of the North Carolina Sports Hall of Fame in 2010, and an individual member of the FIBA Hall of Fame in 2015 and a \"Dream Team\" member in 2017.In 2021, he was named to the NBA 75th Anniversary Team.One of the most effectively marketed athletes of his generation, Jordan is known for his product endorsements.He fueled the success of Nike's Air Jordan sneakers, which were introduced in 1984 and remain popular today.\n",
+                        "This included about $12.5 billion in loans against his Tesla stock and $21 billion in equity financing.Tesla's stock market value sank by over $100 billion the next day in reaction to the deal, causing Musk to lose around $30 billion of his net worth.He subsequently tweeted criticism of Twitter executive Vijaya Gadde's policies to his 86 million followers, which led to some of them engaging in sexist and racist harassment against her.Exactly a month after announcing the takeover, Musk stated that the deal was \"on hold\" following a report that 5% of Twitter's daily active users were spam accounts, causing Twitter shares to drop more than 10 percent.Although he initially affirmed his commitment to the acquisition, he sent notification of his termination of the deal in July; Twitter's Board of Directors responded that they were committed to holding him to the transaction.On July 12, 2022, Twitter formally sued Musk in the Chancery Court of Delaware for breaching a legally binding agreement to purchase Twitter.In October 2022, Musk reversed again, offering to purchase Twitter at $54.20 per share.\n",
+                        "Coincidentally, Jordan and the Bulls met Barkley and his Phoenix Suns in the 1993 NBA Finals.The Bulls won their third NBA championship on a game-winning shot by John Paxson and a last-second block by Horace Grant, but Jordan was once again Chicago's leader.He averaged a Finals-record 41.0 ppg during the six-game series, and became the first player in NBA history to win three straight Finals MVP awards.He scored more than 30 points in every game of the series, including 40 or more points in four consecutive games.With his third Finals triumph, Jordan capped off a seven-year run where he attained seven scoring titles and three championships, but there were signs that Jordan was tiring of his massive celebrity and all of the non-basketball hassles in his life.\n",
+                        "\n",
+                        "\n",
+                        "==== Gambling ====\n",
+                        "During the Bulls' 1993 NBA playoffs, Jordan was seen gambling in Atlantic City, New Jersey, the night before Game 2 of the Eastern Conference Finals against the New York Knicks.\n",
+                        "The previous year, he admitted that he had to cover $57,000 in gambling losses, and author Richard Esquinas wrote a book in 1993 claiming he had won $1.25 million from Jordan on the golf course.David Stern, the commissioner of the NBA, denied in 1995 and 2006 that Jordan's 1993 retirement was a secret suspension by the league for gambling, but the rumor spread widely.In 2005, Jordan discussed his gambling with Ed Bradley of 60 Minutes and admitted that he made reckless decisions.Jordan stated: \"Yeah, I've gotten myself into situations where I would not walk away and I've pushed the envelope.Is that compulsive?Yeah, it depends on how you look at it.If you're willing to jeopardize your livelihood and your family, then yeah.\"When Bradley asked him if his gambling ever got to the level where it jeopardized his livelihood or family, Jordan replied: \"No.\"In 2010, Ron Shelton, director of Jordan Rides the Bus, said that he began working on the documentary believing that the NBA had suspended him, but that research \"convinced [him it] was nonsense\".\n",
+                        "The media, hoping to recreate a Magic–Bird rivalry, highlighted the similarities between \"Air\" Jordan and Clyde \"The Glide\" during the pre-Finals hype.In the first game, Jordan scored a Finals-record 35 points in the first half, including a record-setting six three-point field goals.After the sixth three-pointer, he jogged down the court shrugging as he looked courtside.Marv Albert, who broadcast the game, later stated that it was as if Jordan was saying: \"I can't believe I'm doing this.\"The Bulls went on to win Game 1 and defeat the Blazers in six games.Jordan was named Finals MVP for the second year in a row, and finished the series averaging 35.8 ppg, 4.8 rpg, and 6.5 apg, while shooting 52.6% from the floor.In the 1992–93 season, despite a 32.6 ppg, 6.7 rpg, and 5.5 apg campaign, including a second-place finish in Defensive Player of the Year voting, Jordan's streak of consecutive MVP seasons ended, as he lost the award to his friend Charles Barkley, which upset him.\n",
+                        "While this resulted in saved costs for SpaceX's rocket, vertical integration has caused many usability problems for Tesla's software.Musk's handling of employees—whom he communicates with directly through mass emails—has been characterized as \"carrot and stick\", rewarding those \"who offer constructive criticism\" while also being known to impulsively threaten, swear at, and fire his employees.Musk said he expects his employees to work for long hours, sometimes for 80 hours per week.He has his new employees sign strict non-disclosure agreements and often fires in sprees, such as during the Model 3 \"production hell\" in 2018.In 2022, Musk revealed plans to fire 10 percent of Tesla's workforce, due to his concerns about the economy.That same month, he suspended remote work at SpaceX and Tesla and threatened to fire employees who do not work 40 hours per week in the office.Musk's leadership has been praised by some, who credit it with the success of Tesla and his other endeavors, and criticized by others, who see him as callous and his managerial decisions as \"show[ing] a lack of human understanding.\"The 2021 book Power Play contains anecdotes of Musk berating employees.\n",
+                        "As a senior, he was selected to play in the 1981 McDonald's All-American Game and scored 30 points, after averaging 27 ppg, 12 rebounds (rpg), and six assists per game (apg) for the season.He was recruited by numerous college basketball programs, including Duke, North Carolina, South Carolina, Syracuse, and Virginia.In 1981, he accepted a basketball scholarship to the University of North Carolina at Chapel Hill, where he majored in cultural geography.\n",
+                        "=== 2018 Joe Rogan podcast appearance ===\n",
+                        "In 2018, Musk appeared on The Joe Rogan Experience podcast and discussed various topics for over two hours. During the interview, Musk sampled a puff from a cigar consisting, the host claimed, of tobacco laced with cannabis. Tesla stock dropped after the incident, which coincided with the confirmation of the departure of Tesla's vice president of worldwide finance earlier that day. Fortune wondered if the cannabis use could have ramifications for SpaceX contracts with the United States Air Force, though an Air Force spokesperson told The Verge that there was no investigation and that the Air Force was still determining the facts. In 2022, Musk claimed that he and other Space-X employees were subjected to random drug tests for about a year following the incident. In a 60 Minutes interview, Musk said of the incident: \"I do not smoke pot. As anybody who watched that podcast could tell, I have no idea how to smoke pot.\"\n",
+                        "=== Private jet ===\n",
+                        "\n",
+                        "In 2003, Musk said his favorite plane he owned was an L-39 Albatros. He uses a private jet owned by Falcon Landing LLC, a SpaceX-linked company, and acquired a second jet in August 2020. His heavy use of the jet—it flew over 150,000 miles in 2018—and the consequent fossil fuel usage has received criticism.His flight usage is tracked on social media through ElonJet. The Twitter version of the account was blocked in December 2022, after Musk claimed that his son X AE A-XII had been harassed by a stalker after the account posted the airport at which his jet had landed. This led to Musk banning the ElonJet account on Twitter, as well as the accounts of journalists that posted stories regarding the incident, including Donie O'Sullivan, Keith Olbermann, and journalists from The New York Times, The Washington Post, CNN, and The Intercept. Musk equated the reporting to doxxing. The police do not believe there is a link between the account and alleged stalker. Musk later took a Twitter poll on whether the journalists' accounts should be reinstated, which resulted in reinstating the accounts.\n",
+                        "\"Although Jordan was a well-rounded player, his \"Air Jordan\" image is also often credited with inadvertently decreasing the jump shooting skills, defense, and fundamentals of young players, a fact Jordan himself has lamented, saying: \"I think it was the exposure of Michael Jordan; the marketing of Michael Jordan.Everything was marketed towards the things that people wanted to see, which was scoring and dunking.That Michael Jordan still played defense and an all-around game, but it was never really publicized.\"During his heyday, Jordan did much to increase the status of the game; television ratings increased only during his time in the league.The popularity of the NBA in the U.S. declined after his last title.As late as 2022, NBA Finals television ratings had not returned to the level reached during his last championship-winning season.In August 2009, the Naismith Memorial Basketball Hall of Fame in Springfield, Massachusetts, opened a Michael Jordan exhibit that contained items from his college and NBA careers as well as from the 1992 \"Dream Team\"; the exhibit also has a batting baseball glove to signify Jordan's short career in the Minor League Baseball.\n",
+                        "Jordan finished among the top three in regular season MVP voting 10 times.He was named one of the 50 Greatest Players in NBA History in 1996, and selected to the NBA 75th Anniversary Team in 2021.Jordan is one of only seven players in history to win an NCAA championship, an NBA championship, and an Olympic gold medal (doing so twice with the 1984 and 1992 U.S. men's basketball teams).Since 1976, the year of the ABA–NBA merger, Jordan and Pippen are the only two players to win six NBA Finals playing for one team.In the All-Star Game fan ballot, Jordan received the most votes nine times, more than any other player.Many of Jordan's contemporaries have said that Jordan is the greatest basketball player of all time.In 1999, an ESPN survey of journalists, athletes and other sports figures ranked Jordan the greatest North American athlete of the 20th century, above Babe Ruth and Muhammad Ali.Jordan placed second to Ruth in the Associated Press' December 1999 list of 20th century athletes.In addition, the Associated Press voted him the greatest basketball player of the 20th century.Jordan has also appeared on the front cover of Sports Illustrated a record 50 times.\n",
+                        "James Jr. became command sergeant major of the 35th Signal Brigade of the U.S. Army's XVIII Airborne Corps and retired in 2006.In 1968, Jordan moved with his family to Wilmington, North Carolina.He attended Emsley A. Laney High School in Wilmington, where he highlighted his athletic career by playing basketball, baseball, and football.He tried out for the basketball varsity team during his sophomore year, but at a height of 5 feet 11 inches (1.80 m), he was deemed too short to play at that level.His taller friend Harvest Leroy Smith was the only sophomore to make the team.Motivated to prove his worth, Jordan became the star of Laney's junior varsity team and tallied some 40-point games.The following summer, he grew four inches (10 cm) and trained rigorously.Upon earning a spot on the varsity roster, he averaged more than 25 points per game (ppg) over his final two seasons of high school play.\n",
+                        "23 retired by the Miami Heat\n",
+                        "NBA MVP trophy renamed in Jordan's honor (\"Michael Jordan Trophy\") in 2022USA BasketballTwo-time Olympic gold medal winner – 1984, 1992\n",
+                        "Tournament of the Americas gold medal winner – 1992\n",
+                        "Pan American Games gold medal winner – 1983\n",
+                        "Two-time USA Basketball Male Athlete of the Year – 1983, 1984NCAANCAA national championship – 1981–82\n",
+                        "ACC Rookie of the Year – 1981–82\n",
+                        "Two-time Consensus NCAA All-American First Team – 1982–83, 1983–84\n",
+                        "ACC Men's Basketball Player of the Year – 1983–84\n",
+                        "ACC Athlete of the Year – 1984\n",
+                        "USBWA College Player of the Year – 1983–84\n",
+                        "Naismith College Player of the Year – 1983–84\n",
+                        "Adolph Rupp Trophy – 1983–84\n",
+                        "John R. Wooden Award – 1983–84\n",
+                        "Two-time Sporting News National Player of  the Year (1983, 1984)\n",
+                        "No.\n",
+                        "He spread misinformation about the virus, including promoting a widely discredited paper on the benefits of chloroquine and claiming that COVID-19 death statistics were inflated.In March 2020, Musk stated, \"The coronavirus panic is dumb.\"In an email to Tesla employees, Musk referred to COVID-19 as a \"specific form of the common cold\" and predicted that confirmed COVID-19 cases would not exceed 0.1% of the U.S. population.On March 19, 2020, Musk predicted that there would be \"probably close to zero new cases in [the U.S.] by end of April\".Politico labeled this statement one of \"the most audacious, confident, and spectacularly incorrect prognostications [of 2020]\".Musk also claimed falsely that children \"are essentially immune\" to COVID-19.Musk condemned COVID-19 lockdowns and initially refused to close the Tesla Fremont Factory in March 2020, defying the local shelter-in-place order.\n",
+                        "Under Musk, Tesla has also constructed multiple lithium-ion battery and electric vehicle factories, named Gigafactories.Since its initial public offering in 2010, Tesla stock has risen significantly; it became the most valuable carmaker in summer 2020, and it entered the S&P 500 later that year.In October 2021, it reached a market capitalization of $1 trillion, the sixth company in U.S. history to do so.In November 2021, Musk proposed, on Twitter, to sell 10% of his Tesla stock, since \"much is made lately of unrealized gains being a means of tax avoidance\".After more than 3.5 million Twitter accounts supported the sale, Musk sold $6.9 billion of Tesla stock within a week, and a total of $16.4 billion by year end, reaching the 10% target.In February 2022, The Wall Street Journal reported that both Elon and Kimbal Musk were under investigation by the SEC for possible insider trading related to the sale.In 2022, Musk unveiled a robot developed by Tesla, Optimus.\n",
+                        "During his rookie 1984–85 season with the Bulls, Jordan averaged 28.2 ppg on 51.5% shooting, and helped make a team that had won 35% of games in the previous three seasons playoff contenders.He quickly became a fan favorite even in opposing arenas.Roy S. Johnson of The New York Times described him as \"the phenomenal rookie of the Bulls\" in November, and Jordan appeared on the cover of Sports Illustrated with the heading \"A Star Is Born\" in December.The fans also voted in Jordan as an All-Star starter during his rookie season.Controversy arose before the 1985 NBA All-Star Game when word surfaced that several veteran players, led by Isiah Thomas, were upset by the amount of attention Jordan was receiving.This led to a so-called \"freeze-out\" on Jordan, where players refused to pass the ball to him throughout the game.The controversy left Jordan relatively unaffected when he returned to regular season play, and he would go on to be voted the NBA Rookie of the Year.\n",
+                        "The acquisition was officially completed on October 27.Immediately after the acquisition, Musk fired several top Twitter executives including CEO Parag Agrawal; Musk became the CEO instead.He instituted a $7.99 monthly subscription for a \"blue check\", and laid off a significant portion of the company's staff.Musk lessened content moderation, and in December, Musk released internal documents relating to Twitter's moderation of Hunter Biden's laptop controversy in the leadup to the 2020 presidential election.The Southern Poverty Law Center noted that Twitter has verified numerous extremists, and a study of millions of tweets following the acquisition indicated that hate speech on the platform has become \"more visible\" under Musk's leadership.Within the first weeks of ownership, Musk made a series of decisions and changes that he quickly reversed, including the paid blue checkmark, creating an \"official\" label and forbidding linking to one's profiles on other social media platforms.Under Musk's management, Twitter experienced several large scale outages.In April 2022, The Washington Post reported that Musk privately claimed that supposed censorship on the platform, including the banning of accounts such as The Babylon Bee, had prompted him to begin the acquisition.\n",
+                        "Musk also promoted a baseless theory relating to the attack of Speaker Nancy Pelosi's husband, but Musk deleted his tweet.Musk has praised China and has been described as having a close relationship with the Chinese government, allowing access to its markets for Tesla.After Gigafactory Shanghai produced its first batch of vehicles, Musk thanked the Chinese government and Chinese people while criticizing the United States and its people.: 207–208  In 2022, Musk wrote an article for China Cyberspace, the official publication of Cyberspace Administration of China, which enforces Internet censorship in China.His writing the article was described as conflicting with his advocacy for free speech.Musk later advocated for Taiwan to become a \"special administrative zone\" of China which drew cross-party criticism from Taiwanese lawmakers.In October 2022, Musk posted a Twitter poll and \"peace plan\" to resolve the Russian invasion of Ukraine.It was reported that Musk allegedly spoke with Russian President Vladimir Putin prior to the proposal, which Musk denied.\n",
+                        "\n",
+                        "\n",
+                        "=== COVID-19 ===\n",
+                        "Musk was criticized for his public comments and conduct related to the COVID-19 pandemic.\n",
+                        "Jordan has had a long relationship with Gatorade, appearing in over 20 commercials for the company since 1991, including the \"Be Like Mike\" commercials in which a song was sung by children wishing to be like Jordan.Nike created a signature shoe for Jordan, called the Air Jordan, in 1984.One of Jordan's more popular commercials for the shoe involved Spike Lee playing the part of Mars Blackmon.In the commercials, Lee, as Blackmon, attempted to find the source of Jordan's abilities and became convinced that \"it's gotta be the shoes\".The hype and demand for the shoes even brought on a spate of \"shoe-jackings\", in which people were robbed of their sneakers at gunpoint.Subsequently, Nike spun off the Jordan line into its own division named the \"Jordan Brand\".The company features a list of athletes and celebrities as endorsers.The brand has also sponsored college sports programs such as those of North Carolina, UCLA, California, Oklahoma, Florida, Georgetown, and Marquette.Jordan also has been associated with the Looney Tunes cartoon characters.A Nike commercial shown during 1992's Super Bowl XXVI featured Jordan and Bugs Bunny playing basketball.\n",
+                        "Accordingly, Musk has stated that artificial intelligence poses the greatest existential threat to humanity.He has warned of a \"Terminator-like\" AI apocalypse and suggested that the government should regulate its safe development.In 2015, Musk was a cosignatory, along with Stephen Hawking and hundreds of others, of the Open Letter on Artificial Intelligence, which called for the ban of autonomous weapons.Musk's AI stances have been called alarmist and sensationalist by critics such as computer scientist Yann LeCun and Meta CEO Mark Zuckerberg, and led the think tank Information Technology and Innovation Foundation to award Musk its Annual Luddite Award in 2016.Musk has described climate change as the greatest threat to humanity after AI, and has advocated for a carbon tax.Musk was a critic of President Donald Trump's stance on climate change, and resigned from two presidential business advisory councils following Trump's 2017 decision to withdraw the United States from the Paris Agreement.Musk has long promoted the colonization of Mars and argues that humanity should become a \"multiplanetary species\".He has suggested the use of nuclear weapons to terraform Mars.\n",
+                        "In 2022, he acquired Twitter for $44 billion and subsequently merged the company into newly created X Corp. and rebranded the service as X the following year.In March 2023, he founded xAI, an artificial-intelligence company.Musk has expressed views that have made him a polarizing figure.He has been criticized for making unscientific and misleading statements, including that of spreading COVID-19 misinformation, and promoting conspiracy theories.His Twitter ownership has been similarly controversial, including letting off a large number of employees, an increase in hate speech on the platform and features such as Twitter Blue and the implementation of limits on the amount of viewable Tweets per day being criticized.In 2018, the U.S. Securities and Exchange Commission (SEC) sued him for falsely tweeting that he had secured funding for a private takeover of Tesla.To settle the case, Musk stepped down as the chairman of Tesla and paid a $20 million fine.\n",
+                        "\n",
+                        "\n",
+                        "== Early life ==\n",
+                        "\n",
+                        "\n",
+                        "=== Childhood and family ===\n",
+                        "\n",
+                        "Elon Reeve Musk was born on June 28, 1971, in Pretoria, one of South Africa's capital cities.Musk has British and Pennsylvania Dutch ancestry.\n",
+                        "Jordan abruptly retired from basketball before the 1993–94 NBA season to play Minor League Baseball but returned to the Bulls in March 1995 and led them to three more championships in 1996, 1997, and 1998, as well as a then-record 72 regular season wins in the 1995–96 NBA season.He retired for the second time in January 1999 but returned for two more NBA seasons from 2001 to 2003 as a member of the Washington Wizards.During the course of his professional career, he was also selected to play for the United States national team, winning four gold medals—at the 1983 Pan American Games, 1984 Summer Olympics, 1992 Tournament of the Americas and 1992 Summer Olympics—while also being undefeated.Jordan's individual accolades and accomplishments include six NBA Finals Most Valuable Player (MVP) awards, ten NBA scoring titles (both all-time records), five NBA MVP awards, ten All-NBA First Team designations, nine All-Defensive First Team honors, fourteen NBA All-Star Game selections, three NBA All-Star Game MVP awards, three NBA steals titles, and the 1988 NBA Defensive Player of the Year Award.\n",
+                        "In May 2020, he reopened the Tesla factory, defying the local stay-at-home order, and warned workers that they would be unpaid, and their unemployment benefits might be jeopardized, if they did not report to work.In December 2022, Musk called for prosecution of former National Institute of Allergy and Infectious Diseases director Anthony Fauci.In March 2020, Musk promised that Tesla would make ventilators for COVID-19 patients if there were a shortage.After figures like New York City mayor Bill de Blasio responded to Musk's offer, Musk offered to donate ventilators which Tesla would build or buy from a third party.However, Musk ended up buying and donating BiPAP and CPAP machines, which are devices that support respirations of someone able to breathe on their own, rather than the much more expensive and sought-after mechanical ventilator machines that are able to breathe for a patient entirely.In September 2020, Musk stated that he would not get the COVID-19 vaccine, because he and his children were \"not at risk for COVID\".\n",
+                        "Broadcaster Al Michaels said that he was able to read baseball box scores on a 27-inch (69 cm) television clearly from about 50 feet (15 m) away.During the 2001 NBA Finals, Phil Jackson compared Jordan's dominance to Shaquille O'Neal, stating: \"Michael would get fouled on every play and still have to play through it and just clear himself for shots instead and would rise to that occasion.\"\n",
+                        "\n",
+                        "\n",
+                        "== Legacy ==\n",
+                        "Jordan's talent was clear from his first NBA season; by November 1984, he was being compared to Julius Erving.Larry Bird said that rookie Jordan was the best player he ever saw, and that he was \"one of a kind\", and comparable to Wayne Gretzky as an athlete.In his first game in Madison Square Garden against the New York Knicks, Jordan received a near minute-long standing ovation.After establishing the single game playoff record of 63 points against the Boston Celtics on April 20, 1986, Bird described him as \"God disguised as Michael Jordan\".Jordan led the NBA in scoring in 10 seasons (NBA record) and tied Wilt Chamberlain's record of seven consecutive scoring titles.\n",
+                        "=== Twitter ===\n",
+                        "\n",
+                        "Musk expressed interest in buying Twitter as early as 2017, and had previously questioned the platform's commitment to freedom of speech.In January 2022, Musk started purchasing Twitter shares, reaching a 9.2% stake by April, making him the largest shareholder.When this was publicly disclosed, Twitter shares experienced the largest intraday price surge since the company's 2013 IPO.On April 4, Musk agreed to a deal that would appoint him to Twitter's board of directors and prohibit him from acquiring more than 14.9% of the company.However, on April 13, Musk made a $43 billion offer to buy Twitter, launching a takeover bid to buy 100% of Twitter's stock at $54.20 per share.In response, Twitter's board adopted a \"poison pill\" shareholder rights plan to make it more expensive for any single investor to own more than 15% of the company without board approval.Nevertheless, by the end of the month Musk had successfully concluded his bid for approximately $44 billion.\n",
+                        "In his 1998 autobiography For the Love of the Game, Jordan wrote that he was preparing for retirement as early as the summer of 1992.The added exhaustion due to the \"Dream Team\" run in the 1992 Summer Olympics solidified Jordan's feelings about the game and his ever-growing celebrity status.Jordan's announcement sent shock waves throughout the NBA and appeared on the front pages of newspapers around the world.Jordan further surprised the sports world by signing a Minor League Baseball contract with the Chicago White Sox on February 7, 1994.He reported to spring training in Sarasota, Florida, and was assigned to the team's minor league system on March 31, 1994.Jordan said that this decision was made to pursue the dream of his late father, who always envisioned his son as a Major League Baseball player.The White Sox were owned by Bulls owner Jerry Reinsdorf, who continued to honor Jordan's basketball contract during the years he played baseball.In 1994, Jordan played for the Birmingham Barons, a Double-A minor league affiliate of the Chicago White Sox, batting .202 with three home runs, 51 runs batted in, 30 stolen bases, 114 strikeouts, 51 bases on balls, and 11 errors.\n",
+                        "As of 2019, he has raised more than $5 million for the Make-A-Wish Foundation.In 2023, Jordan donated $10 million to the organization for his 60th birthday.In 2015, Jordan donated a settlement of undisclosed size from a lawsuit against supermarkets that had used his name without permission to 23 different Chicago charities.In 2017, Jordan funded two Novant Health Michael Jordan Family Clinics in Charlotte, North Carolina, by giving $7 million, the biggest donation he had made at the time.In 2018, after Hurricane Florence damaged parts of North Carolina, including his former hometown of Wilmington, Jordan donated $2 million to relief efforts.He gave $1 million to aid the Bahamas' recovery following Hurricane Dorian in 2019.On June 5, 2020, in the wake of the protests following the murder of George Floyd, Jordan and his brand announced in a joint statement that they would be donating $100 million over the next 10 years to organizations dedicated to \"ensuring racial equality, social justice and greater access to education\".In February 2021, Jordan funded two Novant Health Michael Jordan Family Clinics in New Hanover County, North Carolina, by giving $10 million.\n",
+                        "Jordan was undefeated in the four tournaments he played for the United States national team, winning all 30 games he took part in.\n",
+                        "\n",
+                        "\n",
+                        "== Player profile ==\n",
+                        "Jordan was a shooting guard who could also play as a small forward, the position he would primarily play during his second return to professional basketball with the Washington Wizards, and as a point guard.Jordan was known throughout his career as a strong clutch performer.With the Bulls, he decided 25 games with field goals or free throws in the last 30 seconds, including two NBA Finals games and five other playoff contests.His competitiveness was visible in his prolific trash talk and well-known work ethic.Jordan often used perceived slights to fuel his performances.Sportswriter Wright Thompson described him as \"a killer, in the Darwinian sense of the word, immediately sensing and attacking someone's weakest spot\".As the Bulls organization built the franchise around Jordan, management had to trade away players who were not \"tough enough\" to compete with him in practice.To help improve his defense, he spent extra hours studying film of opponents.\n",
+                        "== National team career ==\n",
+                        "Jordan made his debut for the U.S. national basketball team at the 1983 Pan American Games in Caracas, Venezuela.He led the team in scoring with 17.3 ppg as the U.S., coached by Jack Hartman, won the gold medal in the competition.A year later, he won another gold medal in the 1984 Summer Olympics.The 1984 U.S. team was coached by Bob Knight and featured players such as Patrick Ewing, Sam Perkins, Chris Mullin, Steve Alford, and Wayman Tisdale.Jordan led the team in scoring, averaging 17.1 ppg for the tournament.In 1992, Jordan was a member of the star-studded squad that was dubbed the \"Dream Team\", which included Larry Bird and Magic Johnson.The team went on to win two gold medals: the first one in the 1992 Tournament of the Americas, and the second one in the 1992 Summer Olympics.He was the only player to start all eight games in the Olympics, averaged 14.9 ppg, and finished second on the team in scoring.\n",
+                        "In 2020, SpaceX launched its first crewed flight, the Demo-2, becoming the first private company to place astronauts into orbit and dock a crewed spacecraft with the ISS.\n",
+                        "\n",
+                        "\n",
+                        "==== Starlink ====\n",
+                        "\n",
+                        "In 2015, SpaceX began development of the Starlink constellation of low-Earth-orbit satellites to provide satellite Internet access, with the first two prototype satellites launched in February 2018.A second set of test satellites, and the first large deployment of a piece of the constellation, occurred in May 2019, when the first 60 operational satellites were launched.The total cost of the decade-long project to design, build, and deploy the constellation is estimated by SpaceX to be about $10 billion.Some critics, including the International Astronomical Union, have alleged that Starlink blocks the view of the sky and poses a collision threat to spacecraft.During the Russian invasion of Ukraine, Musk sent Starlink terminals to Ukraine to provide Internet access and communication.However, Musk refused to block Russian state media on Starlink, declaring himself \"a free speech absolutist\".\n",
+                        "During the season, Sam Vincent, Chicago's point guard, was having trouble running the offense, and Jordan expressed his frustration with head coach Doug Collins, who would put Jordan at point guard.In his time as a point guard, Jordan averaged 10 triple-doubles in eleven games, with 33.6 ppg, 11.4 rpg, 10.8 apg, 2.9 spg, and 0.8 bpg on 51% shooting.The Bulls finished with a 47–35 record, and advanced to the Eastern Conference Finals, defeating the Cavaliers and New York Knicks along the way.The Cavaliers series included a career highlight for Jordan when he hit \"The Shot\" over Craig Ehlo at the buzzer in the fifth and final game of the series.\n",
+                        "On June 20, 2023, Musk met with Indian Prime Minister Narendra Modi in New York City, suggesting that he might be interested in investing in India \"as soon as humanly possible\".\n",
+                        "\n",
+                        "\n",
+                        "==== SEC and shareholder lawsuits regarding tweets ====\n",
+                        "In 2018, Musk was sued by the SEC for a tweet claiming that funding had been secured for potentially taking Tesla private.The lawsuit characterized the tweet as false, misleading, and damaging to investors, and sought to bar Musk from serving as CEO of publicly traded companies.Two days later, Musk settled with the SEC, without admitting or denying the SEC's allegations.As a result, Musk and Tesla were fined $20 million each, and Musk was forced to step down for three years as Tesla chairman but was able to remain as CEO.Musk has stated in interviews that he does not regret posting the tweet that triggered the SEC investigation.In April 2022, the shareholder who sued Musk over the tweet, along with several Tesla shareholders, said that a federal judge had ruled that the tweet was false, although the ruling in question has not been unsealed.\n",
+                        "At age ten, he developed an interest in computing and video games, teaching himself how to program from the VIC-20 user manual.At age twelve, he sold his BASIC-based game Blastar to PC and Office Technology magazine for approximately $500.\n",
+                        "\n",
+                        "\n",
+                        "=== Education ===\n",
+                        "Musk attended Waterkloof House Preparatory School, Bryanston High School, and Pretoria Boys High School, from where he graduated.Musk applied for a Canadian passport through his Canadian-born mother, knowing that it would be easier to immigrate to the United States this way.While waiting for his application to be processed, he attended the University of Pretoria for five months.Musk arrived in Canada in June 1989 and lived with a second cousin in Saskatchewan for a year, working odd jobs at a farm and lumber mill.In 1990, he entered Queen's University in Kingston, Ontario.Two years later, he transferred to the University of Pennsylvania (UPenn), where he completed studies for a Bachelor of Arts degree in physics and a Bachelor of Science degree in economics from the Wharton School.Although Musk claims he earned the degrees in 1995, UPenn maintains it awarded them in 1997.\n",
+                        "== Further reading ==\n",
+                        "Leahy, Michael (2004). When Nothing Else Matters: Michael Jordan's Last Comeback. Simon & Schuster. ISBN 978-0-7432-7648-1.\n",
+                        "McGovern, Mike (2005). Michael Jordan: Basketball Player. Ferguson. ISBN 978-0-8160-5876-1.\n",
+                        "\n",
+                        "\n",
+                        "== External links ==\n",
+                        "\n",
+                        "Career statistics and player information from NBA.com and Basketball-Reference.com\n",
+                        "Michael Jordan at the Naismith Memorial Basketball Hall of Fame\n",
+                        "Michael Jordan at Curlie\n",
+                        "Career statistics and player information from Baseball Reference (Minors)\n",
+                        "Michael Jordan Career Retrospective on YouTube\n",
+                        "Michael Jordan at IMDb\n",
+                        "\"Jordan archives\". Chicago Tribune. Archived from the original on June 5, 1997. Retrieved April 29, 2020.\n",
+                        "He was also a fixture of the NBA All-Defensive First Team, making the roster nine times (NBA record shared with Gary Payton, Kevin Garnett, and Kobe Bryant).Jordan also holds the top career regular season and playoff scoring averages of 30.1 and 33.4 ppg, respectively.By 1998, the season of his Finals-winning shot against the Jazz, he was well known throughout the league as a clutch performer.In the regular season, Jordan was the Bulls' primary threat in the final seconds of a close game and in the playoffs; he would always ask for the ball at crunch time.Jordan's total of 5,987 points in the playoffs is the second-highest among NBA career playoff scoring leaders.He scored 32,292 points in the regular season, placing him fifth on the NBA all-time scoring list behind LeBron James, Kareem Abdul-Jabbar, Karl Malone, and Bryant.With five regular season MVPs (tied for second place with Bill Russell—only Abdul-Jabbar has won more, with six), six Finals MVPs (NBA record), and three NBA All-Star Game MVPs, Jordan is the most decorated player in NBA history.\n",
+                        "His strikeout total led the team and his games played tied for the team lead.His 30 stolen bases were second on the team only to Doug Brady.He also appeared for the Scottsdale Scorpions in the 1994 Arizona Fall League, batting .252 against the top prospects in baseball.On November 1, 1994, his No.23 was retired by the Bulls in a ceremony that included the erection of a permanent sculpture known as The Spirit outside the new United Center.\n",
+                        "\n",
+                        "\n",
+                        "==== \"I'm back\": Return to the NBA (1995) ====\n",
+                        "The Bulls went 55–27 in 1993–94 without Jordan in the lineup and lost to the New York Knicks in the second round of the playoffs.The 1994–95 Bulls were a shell of the championship team of just two years earlier.Struggling at mid-season to ensure a spot in the playoffs, Chicago was 31–31 at one point in mid-March; the team received help when Jordan decided to return to the Bulls.In March 1995, Jordan decided to quit baseball because he feared he might become a replacement player during the Major League Baseball strike.\n",
+                        "Though the rocket failed to reach Earth orbit, it was awarded a Commercial Orbital Transportation Services program contract from NASA Administrator (and former SpaceX consultant) Mike Griffin later that year.After two more failed attempts that nearly caused Musk and his companies to go bankrupt, SpaceX succeeded in launching the Falcon 1 into orbit in 2008.Later that year, SpaceX received a $1.6 billion Commercial Resupply Services contract from NASA for 12 flights of its Falcon 9 rocket and Dragon spacecraft to the International Space Station, replacing the Space Shuttle after its 2011 retirement.In 2012, the Dragon vehicle docked with the ISS, a first for a commercial spacecraft.Working towards its goal of reusable rockets, in 2015 SpaceX successfully landed the first stage of a Falcon 9 on an inland platform.Later landings were achieved on autonomous spaceport drone ships, an ocean-based recovery platform.In 2018, SpaceX launched the Falcon Heavy; the inaugural mission carried Musk's personal Tesla Roadster as a dummy payload.Since 2019, SpaceX has been developing Starship, a fully-reusable, super-heavy-lift launch vehicle intended to replace the Falcon 9 and the Falcon Heavy.\n",
+                        "At the 2003 All-Star Game, Jordan was offered a starting spot from Tracy McGrady and Allen Iverson but refused both; in the end, he accepted the spot of Vince Carter.Jordan played in his final NBA game on April 16, 2003, in Philadelphia.After scoring 13 points in the game, Jordan went to the bench with 4 minutes and 13 seconds remaining in the third quarter and his team trailing the Philadelphia 76ers 75–56.Just after the start of the fourth quarter, the First Union Center crowd began chanting \"We want Mike!\"After much encouragement from coach Doug Collins, Jordan finally rose from the bench and re-entered the game, replacing Larry Hughes with 2:35 remaining.At 1:45, Jordan was intentionally fouled by the 76ers' Eric Snow, and stepped to the line to make both free throws.After the second foul shot, the 76ers in-bounded the ball to rookie John Salmons, who in turn was intentionally fouled by Bobby Simmons one second later, stopping time so that Jordan could return to the bench.Jordan received a three-minute standing ovation from his teammates, his opponents, the officials, and the crowd of 21,257 fans.\n",
+                        "==== SolarCity and Tesla Energy ====\n",
+                        "\n",
+                        "Musk provided the initial concept and financial capital for SolarCity, which his cousins Lyndon and Peter Rive founded in 2006. By 2013, SolarCity was the second largest provider of solar power systems in the United States. In 2014, Musk promoted the idea of SolarCity building an advanced production facility in Buffalo, New York, triple the size of the largest solar plant in the United States. Construction of the factory started in 2014 and was completed in 2017. It operated as a joint venture with Panasonic until early 2020.Tesla acquired SolarCity for over $2 billion in 2016 and merged it with its battery unit to create Tesla Energy. The deal's announcement resulted in a more than 10% drop in Tesla's stock price. At the time, SolarCity was facing liquidity issues. Multiple shareholder groups filed a lawsuit against Musk and Tesla's directors, claiming that the purchase of SolarCity was done solely to benefit Musk and came at the expense of Tesla and its shareholders. Tesla directors settled the lawsuit in January 2020, leaving Musk the sole remaining defendant. Two years later, the court ruled in Musk's favor.\n",
+                        "In December 2022, the NBA unveiled a new MVP trophy, named in Jordan's honor, to be awarded beginning with the 2022–23 season.The \"Michael Jordan Trophy\" will replace the original trophy, named in honor of former NBA commissioner Maurice Podoloff, with a new Podoloff Trophy set to be awarded to the team with the best overall regular season record.\n",
+                        "\n",
+                        "\n",
+                        "== NBA career statistics ==\n",
+                        "\n",
+                        "\n",
+                        "=== Regular season ===\n",
+                        "\n",
+                        "\n",
+                        "=== Playoffs ===\n",
+                        "\n",
+                        "\n",
+                        "== Awards and honors ==\n",
+                        "\n",
+                        "NBASix-time NBA champion – 1991, 1992, 1993, 1996, 1997, 1998\n",
+                        "Six-time NBA Finals MVP – 1991, 1992, 1993, 1996, 1997, 1998\n",
+                        "Five-time NBA MVP – 1988, 1991, 1992, 1996, 1998\n",
+                        "NBA Defensive Player of the Year – 1987–88\n",
+                        "NBA Rookie of the Year – 1984–85\n",
+                        "10-time NBA scoring leader – 1987–1993, 1996–1998\n",
+                        "Three-time NBA steals leader – 1988, 1990, 1993\n",
+                        "14-time NBA All-Star – 1985–1993, 1996–1998, 2002,\n",
+                        "Consequently, Tesla's 2021 announcement, against the backdrop of Musk's social media behavior, that it bought $1.5 billion worth of Bitcoin, raised questions.Tesla's announcement that it would accept Bitcoin for payment was criticized by environmentalists and investors, due to the environmental impact of cryptocurrency mining.A few months later, in response to the criticism, Musk announced on Twitter that Tesla would no longer accept payments in Bitcoin and would not engage in any Bitcoin transactions until the environmental issues are solved.Despite the Boring Company's involvement in building mass transit infrastructure, Musk has criticized public transport and promoted individualized transport (private vehicles).His comments have been called \"elitist\" and have sparked widespread criticism from both transportation and urban planning experts, who have pointed out that public transportation in dense urban areas is more economical, more energy efficient, and requires much less space than private cars.\n",
+                        "Musk assumed leadership of the company as CEO and product architect in 2008.A 2009 lawsuit settlement with Eberhard designated Musk as a Tesla co-founder, along with Tarpenning and two others.As of 2019, Musk was the longest-tenured CEO of any automotive manufacturer globally.In 2021, Musk nominally changed his title to \"Technoking\" while retaining his position as CEO.Tesla began delivery of an electric sports car, the Roadster, in 2008.With sales of about 2,500 vehicles, it was the first serial production all-electric car to use lithium-ion battery cells.Tesla began delivery of its four-door Model S sedan in 2012.A cross-over, the Model X was launched in 2015.A mass-market sedan, the Model 3, was released in 2017.The Model 3 is the all-time bestselling plug-in electric car worldwide, and in June 2021 it became the first electric car to sell 1 million units globally.A fifth vehicle, the Model Y crossover, was launched in 2020.The Cybertruck, an all-electric pickup truck, was unveiled in 2019.\n",
+                        "Perhaps the best-known moment of the series came in Game 2 when, attempting a dunk, Jordan avoided a potential Sam Perkins block by switching the ball from his right hand to his left in mid-air to lay the shot into the basket.In his first Finals appearance, Jordan had 31.2 ppg on 56% shooting from the field, 11.4 apg, 6.6 rpg, 2.8 spg, and 1.4 bpg.Jordan won his first NBA Finals MVP award, and he cried while holding the Finals trophy.Jordan and the Bulls continued their dominance in the 1991–92 season, establishing a 67–15 record, topping their franchise record from the 1990–91 campaign.Jordan won his second consecutive MVP award with averages of 30.1 ppg, 6.4 rbg, and 6.1 apg on 52% shooting.After winning a physical seven-game series over the New York Knicks in the second round of the playoffs and finishing off the Cleveland Cavaliers in the Conference Finals in six games, the Bulls met Clyde Drexler and the Portland Trail Blazers in the Finals.\n",
+                        "On April 20 at the Boston Garden, in Game 2 of the First Round, a 135–131 double overtime loss to the eventual NBA Champion Boston Celtics, Jordan scored a playoff career-high 63 points, breaking Elgin Baylor’s single-game playoff scoring record.A Celtics team that is often considered one of the greatest in NBA history swept the series in three games.Jordan completely recovered in time for the 1986–87 season, and had one of the most prolific scoring seasons in NBA history; he became the only player other than Wilt Chamberlain to score 3,000 points in a season, averaging a league-high 37.1 ppg on 48.2% shooting.In addition, Jordan demonstrated his defensive prowess, as he became the first player in NBA history to record 200 steals and 100 blocked shots in a season.Despite Jordan's success, Magic Johnson won the NBA Most Valuable Player Award.The Bulls reached 40 wins, and advanced to the playoffs for the third consecutive year but were again swept by the Celtics.\n",
+                        "The Wall Street Journal reported that, after Musk insisted on branding his vehicles as \"self-driving\", he faced criticism from his engineers for putting customer \"lives at risk\", with some employees resigning in consequence.\n",
+                        "\n",
+                        "\n",
+                        "== Other activities ==\n",
+                        "\n",
+                        "\n",
+                        "=== Musk Foundation ===\n",
+                        "Musk is president of the Musk Foundation he founded in 2001, whose stated purpose is to provide solar-power energy systems in disaster areas; support research, development, and advocacy (for interests including human space exploration, pediatrics, renewable energy and \"safe artificial intelligence\"); and support science and engineering educational efforts.From 2002 to 2018, the foundation gave $25 million directly to non-profit organizations, nearly half of which went to Musk's OpenAI, which was a non-profit at the time.Since 2002, the foundation has made over 350 donations.Around half of them were made to scientific research or education nonprofits.Notable beneficiaries include the Wikimedia Foundation, his alma mater the University of Pennsylvania, and his brother Kimbal's non-profit Big Green.In 2012, Musk took the Giving Pledge, thereby committing to give the majority of his wealth to charitable causes either during his lifetime or in his will.\n",
+                        "He envisioned establishing a direct democracy on Mars, with a system in which more votes would be required to create laws than remove them.Musk has also voiced concerns about human population decline, saying that \"Mars has zero human population.We need a lot of people to become a multiplanet civilization.\"Speaking at The Wall Street Journal's CEO Council session in 2021, Musk stated that a declining birth rate, and consequent population decline, is one of the biggest risks to human civilization.\n",
+                        "\n",
+                        "\n",
+                        "=== Politics ===\n",
+                        "\n",
+                        "While often described as libertarian, Musk has called himself \"politically moderate\" and was a registered independent voter when he lived in California.The New York Times wrote that Musk \"expresses views that don't fit neatly into [the American] binary, left-right political framework\".Historically, Musk has donated to both Democrats and Republicans, many of whom are in states in which he has a vested interest.Beginning in the late 2010s, Musk's political contributions have shifted to almost entirely supporting Republicans.Musk voted for Hillary Clinton in the 2016 U.S. presidential election.In the 2020 Democratic Party presidential primaries, Musk endorsed candidate Andrew Yang and expressed support for his proposed universal basic income.\n",
+                        "With 10 seconds remaining, Jordan started to dribble right, then crossed over to his left, possibly pushing off Russell, although the officials did not call a foul.With 5.2 seconds left, Jordan made the climactic shot of his Bulls career, a top-key jumper over a stumbling Russell to give Chicago an 87–86 lead.Afterwards, the Jazz' John Stockton narrowly missed a game-winning three-pointer, and the buzzer sounded as Jordan and the Bulls won their sixth NBA championship, achieving a second three-peat in the decade.Once again, Jordan was voted Finals MVP, having led all scorers by averaging 33.5 ppg, including 45 in the deciding Game 6.Jordan's six Finals MVPs is a record.The 1998 Finals holds the highest television rating of any Finals series in history, and Game 6 holds the highest television rating of any game in NBA history.\n",
+                        "\n",
+                        "\n",
+                        "==== Second retirement (1999–2001) ====\n",
+                        "With Phil Jackson's contract expiring, the pending departures of Scottie Pippen and Dennis Rodman looming, and being in the latter stages of an owner-induced lockout of NBA players, Jordan retired for the second time on January 13, 1999.\n",
+                        "On January 19, 2000, Jordan returned to the NBA not as a player but as part owner and president of basketball operations for the Washington Wizards.Jordan's responsibilities with the Wizards were comprehensive, as he controlled all aspects of the Wizards' basketball operations, and had the final say in all personnel matters; opinions of Jordan as a basketball executive were mixed.He managed to purge the team of several highly paid, unpopular players (like forward Juwan Howard and point guard Rod Strickland) but used the first pick in the 2001 NBA draft to select high school student Kwame Brown, who did not live up to expectations and was traded away after four seasons.Despite his January 1999 claim that he was \"99.9% certain\" he would never play another NBA game, Jordan expressed interest in making another comeback in the summer of 2001, this time with his new team.Inspired by the NHL comeback of his friend Mario Lemieux the previous winter, Jordan spent much of the spring and summer of 2001 in training, holding several invitation-only camps for NBA players in Chicago.\n",
+                        "In February 2023, the jury found Musk and Tesla not liable.In 2019, Musk stated in a tweet that Tesla would build half a million cars that year.The SEC reacted to Musk's tweet by filing in court, asking the court to hold him in contempt for violating the terms of a settlement agreement with such a tweet; the accusation was disputed by Musk.This was eventually settled by a joint agreement between Musk and the SEC clarifying the previous agreement details.The agreement included a list of topics that Musk would need preclearance before tweeting about.In 2020, a judge prevented a lawsuit from proceeding that claimed a tweet by Musk regarding Tesla stock price (\"too high imo\") violated the agreement.FOIA-released records showed that the SEC itself concluded Musk has subsequently violated the agreement twice by tweeting regarding \"Tesla's solar roof production volumes and its stock price\".\n",
+                        "The Bulls won the Eastern Conference Championship for a third straight season, including surviving a seven-game series with the Indiana Pacers in the Eastern Conference Finals; it was the first time Jordan had played in a Game 7 since the 1992 Eastern Conference Semifinals with the New York Knicks.After winning, they moved on for a rematch with the Jazz in the Finals.The Bulls returned to the Delta Center for Game 6 on June 14, 1998, leading the series 3–2.Jordan executed a series of plays, considered to be one of the greatest clutch performances in NBA Finals history.With 41.9 seconds remaining and the Bulls trailing 86–83, Phil Jackson called a timeout.When play resumed, Jordan received the inbound pass, drove to the basket, and sank a shot over several Jazz defenders, cutting Utah's lead to 86–85.The Jazz brought the ball upcourt and passed the ball to Malone, who was set up in the low post and was being guarded by Rodman.Malone jostled with Rodman and caught the pass, but Jordan cut behind him and stole the ball out of his hands.Jordan then dribbled down the court and paused, eyeing his defender, Jazz guard Bryon Russell.\n",
+                        "== Post-retirement ==\n",
+                        "After his third retirement, Jordan assumed that he would be able to return to his front office position as Director of Basketball Operations with the Wizards. His previous tenure in the Wizards' front office had produced mixed results and may have also influenced the trade of Richard \"Rip\" Hamilton for Jerry Stackhouse, although Jordan was not technically Director of Basketball Operations in 2002. On May 7, 2003, Wizards owner Abe Pollin fired Jordan as the team's president of basketball operations. Jordan later stated that he felt betrayed, and that if he had known he would be fired upon retiring, he never would have come back to play for the Wizards.Jordan kept busy over the next few years. He stayed in shape, played golf in celebrity charity tournaments, and spent time with his family in Chicago. He also promoted his Jordan Brand clothing line and rode motorcycles. Since 2004, Jordan has owned Michael Jordan Motorsports, a professional closed-course motorcycle road racing team that competed with two Suzukis in the premier Superbike championship sanctioned by the American Motorcyclist Association (AMA) until the end of the 2013 season.\n",
+                        "Notably, Tesla generates some of its revenue from its sales of carbon credits granted to the company, by both the European Union Emissions Trading System and the Chinese national carbon trading scheme.Musk, a longtime opponent of short-selling, has repeatedly criticized the practice and argued it should be illegal.Wired magazine speculated that Musk's opposition to short-selling stems from how short sellers have an incentive to find and promote unfavorable information about his companies.In early 2021, he encouraged the GameStop short squeeze.In December 2022, Musk sold $3.6 billion of his stock in Tesla, equal to 22 million shares in the company, despite pledging earlier in the year that he would not sell any additional shares.\n",
+                        "\n",
+                        "\n",
+                        "=== Technology ===\n",
+                        "Musk has promoted cryptocurrencies and supports them over traditional government-issued fiat currencies.Given the influence of Musk's tweets in moving cryptocurrency markets, his statements about cryptocurrencies have been viewed as market manipulation by some, such as economist Nouriel Roubini.Musk's social media praising of Bitcoin and Dogecoin was credited for increasing their prices.\n",
+                        "On March 18, 1995, Jordan announced his return to the NBA through a two-word press release: \"I'm back.\"The next day, Jordan took to the court with the Bulls to face the Indiana Pacers in Indianapolis, scoring 19 points.The game had the highest Nielsen rating of any regular season NBA game since 1975.Although he could have worn his original number even though the Bulls retired it, Jordan wore No.45, his baseball number.Despite his eighteen-month hiatus from the NBA, Jordan played well, making a game-winning jump shot against Atlanta in his fourth game back.He scored 55 points in his next game, against the New York Knicks at Madison Square Garden on March 28, 1995.Boosted by Jordan's comeback, the Bulls went 13–4 to make the playoffs and advanced to the Eastern Conference Semifinals against the Orlando Magic.At the end of Game 1, Orlando's Nick Anderson stripped Jordan from behind, leading to the game-winning basket for the Magic; he later commented that Jordan \"didn't look like the old Michael Jordan\", and said that \"No.45 doesn't explode like No.\n",
+                        "That team included Karl Malone, who had beaten Jordan for the NBA MVP award in a tight race (986–957).The series against the Jazz featured two of the more memorable clutch moments of Jordan's career.He won Game 1 for the Bulls with a buzzer-beating jump shot.In Game 5, with the series tied at 2, Jordan played despite being feverish and dehydrated from a stomach virus.In what is known as \"The Flu Game\", Jordan scored 38 points, including the game-deciding 3-pointer with 25 seconds remaining.The Bulls won 90–88 and went on to win the series in six games.For the fifth time in as many Finals appearances, Jordan received the Finals MVP award.During the 1997 NBA All-Star Game, Jordan posted the first triple-double in All-Star Game history in a victorious effort, but the MVP award went to Glen Rice.Jordan and the Bulls compiled a 62–20 record in the 1997–98 season.Jordan led the league with 28.7 ppg, securing his fifth regular season MVP award, plus honors for All-NBA First Team, First Defensive Team, and the All-Star Game MVP.\n",
+                        "The team closed out the season with a 23-game losing streak; their .106 winning percentage was the worst in NBA history.Before the next season, Jordan said: \"I'm not real happy about the record book scenario last year.It's very, very frustrating.\"During the 2019 NBA offseason, Jordan sold a minority piece of the Hornets to Gabe Plotkin and Daniel Sundheim, retaining the majority of the team for himself, as well as the role of chairman.In 2023, Jordan finalized the sale of his majority stake of the team to Gabe Plotkin and Rick Schnall, ending his 13-year tenure as majority owner of the Hornets, although he is keeping a minority stake, The sale was officially completed in August 2023 for approximately $3 billion, more than 10 times the $275 million Jordan had paid for the team.\n",
+                        "During the demonstration, Musk revealed a pig with a Neuralink implant that tracked neural activity related to smell.In 2022, Neuralink announced that clinical trials would begin by the end of the year.Neuralink has conducted further animal testing on macaque monkeys at the University of California, Davis' Primate Research Center.In 2021, the company released a video in which a Macaque played the video game Pong via a Neuralink implant.The company's animal trials—which have caused the deaths of some monkeys—have led to claims of animal cruelty.The Physicians Committee for Responsible Medicine has alleged that Neuralink's animal trials have violated the Animal Welfare Act.Employees have complained that pressure from Musk to accelerate development has led to botched experiments and unnecessary animal deaths.In 2022, a federal probe was launched into possible animal welfare violations by Neuralink.\n",
+                        "=== Neuralink ===\n",
+                        "\n",
+                        "In 2016, Musk co-founded Neuralink, a neurotechnology startup company, with an investment of $100 million.Neuralink aims to integrate the human brain with artificial intelligence (AI) by creating devices that are embedded in the brain to facilitate its merging with machines.Such technology could enhance memory or allow the devices to communicate with software.The company also hopes to develop devices with which to treat neurological conditions such as Alzheimer's disease, dementia, and spinal cord injuries.In 2019, Musk announced work on a device akin to a sewing machine that could embed threads into a human brain.He is listed as the sole author of an October 2019 paper that details some of Neuralink's research, although Musk's being listed as such rankled the Neuralink team's researchers.At a 2020 live demonstration, Musk described one of their early devices as \"a Fitbit in your skull\" that could soon cure paralysis, deafness, blindness, and other disabilities.Many neuroscientists and publications criticized these claims, with MIT Technology Review describing them as \"highly speculative\" and \"neuroscience theater\".\n",
+                        "Despite media criticism by some as a selfish player early in his career, Jordan was willing to defer to this teammates, with a career average of 5.3 apg and a season-high of 8.0 apg.For a guard, Jordan was also a good rebounder, finishing with 6.2 rpg.Defensively, he averaged 2.3 spg and 0.8 bpg.Three-point field goal was not Jordan's strength, especially in his early years.Later on in Jordan's career, he improved his three-point shooting, and finished his career with a respectable 32% success rate.His three-point field-goal percentages ranged from 35% to 43% in seasons in which he attempted at least 230 three-pointers between 1989–90 and 1996–97.\n",
+                        "He has endowed prizes at the X Prize Foundation, including $100 million to reward improved carbon capture technology.Vox said \"the Musk Foundation is almost entertaining in its simplicity and yet is strikingly opaque\", noting that its website was only 33 words in plain-text.The foundation has been criticized for the relatively small amount of wealth donated.In 2020, Forbes gave Musk a philanthropy score of 1, because he had given away less than 1% of his net worth.In November 2021, Musk donated $5.7 billion of Tesla's shares to charity, according to regulatory filings.However, Bloomberg News noted that all of it went to his own foundation, bringing Musk Foundation's assets up to $9.4 billion at the end of 2021.The foundation disbursed $160 million to non-profits that year.\n",
+                        "\n",
+                        "\n",
+                        "=== Hyperloop ===\n",
+                        "\n",
+                        "In 2013, Musk announced plans for a version of a vactrain—a vacuum tube train—and assigned a dozen engineers from SpaceX and Tesla to establish the conceptual foundations and create initial designs.Later that year, Musk unveiled the concept, which he dubbed the hyperloop.\n",
+                        "==== First three-peat (1991–1993) ====\n",
+                        "In the 1990–91 season, Jordan won his second MVP award after averaging 31.5 ppg on 53.9% shooting, 6.0 rpg, and 5.5 apg for the regular season.The Bulls finished in first place in their division for the first time in sixteen years and set a franchise record with 61 wins in the regular season.With Scottie Pippen developing into an All-Star, the Bulls had elevated their play.The Bulls defeated the New York Knicks and the Philadelphia 76ers in the opening two rounds of the playoffs.They advanced to the Eastern Conference Finals where their rival, the Detroit Pistons, awaited them; this time, the Bulls beat the Pistons in a four-game sweep.The Bulls advanced to the Finals for the first time in franchise history to face the Los Angeles Lakers, who had Magic Johnson and James Worthy, two formidable opponents.The Bulls won the series four games to one, and compiled a 15–2 playoff record along the way.\n",
+                        "Jordan led the league in scoring with 30.4 ppg, and he won the league's regular season and All-Star Game MVP awards.In the playoffs, the Bulls lost only three games in four series (Miami Heat 3–0, New York Knicks 4–1, and Orlando Magic 4–0), as they defeated the Seattle SuperSonics 4–2 in the NBA Finals to win their fourth championship.Jordan was named Finals MVP for a record fourth time, surpassing Magic Johnson's three Finals MVP awards; he also achieved only the second sweep of the MVP awards in the All-Star Game, regular season, and NBA Finals after Willis Reed in the 1969–70 season.Upon winning the championship, his first since his father's murder, Jordan reacted emotionally, clutching the game ball and crying on the locker room floor.In the 1996–97 season, the Bulls stood at a 69–11 record but ended the season by losing their final two games to finish the year 69–13, missing out on a second consecutive 70-win season.The Bulls again advanced to the Finals, where they faced the Utah Jazz.\n",
+                        "On September 27, 2021, after Tesla stock surged, Forbes announced that Musk had a net worth of over $200 billion, and was the richest person in the world.In November 2021, Musk became the first person to have a net worth of more than $300 billion.On December 30, 2022, it was reported that Musk had lost $200 billion from his net worth due to declining stock values in Tesla, becoming the first person in history to lose such a large sum of money.In January 2023, Musk was recognized by Guinness World Records for experiencing the \"largest loss of personal fortune in history\" with regards to his financial losses since November 2021, which Guinness quoted a Forbes estimate of $182 billion.Musk's personal wealth is managed by his family office called Excession LLC, which was formed in 2016 and run by Jared Birchall.\n",
+                        "\n",
+                        "\n",
+                        "=== Sources of wealth ===\n",
+                        "Around 75% of Musk's wealth derived from Tesla stock in November 2020, a proportion that fell to about 37% as of December 2022, after selling nearly $40 billion in company shares since late 2021.\n",
+                        "== College career ==\n",
+                        "\n",
+                        "As a freshman in coach Dean Smith's team-oriented system, Jordan was named ACC Freshman of the Year after he averaged 13.4 ppg on 53.4% shooting (field goal percentage). He made the game-winning jump shot in the 1982 NCAA Championship game against Georgetown, which was led by future NBA rival Patrick Ewing. Jordan later described this shot as the major turning point in his basketball career. During his three seasons with the Tar Heels, he averaged 17.7 ppg on 54.0% shooting and added 5.0 rpg and 1.8 apg.Jordan was selected by consensus to the NCAA All-American First Team in both his sophomore (1983) and junior (1984) seasons. After winning the Naismith and the Wooden College Player of the Year awards in 1984, Jordan left North Carolina one year before his scheduled graduation to enter the 1984 NBA draft. Jordan returned to North Carolina to complete his degree in 1986, when he graduated with a Bachelor of Arts degree in geography. In 2002, Jordan was named to the ACC 50th Anniversary men's basketball team honoring the 50 greatest players in ACC history.\n",
+                        "\n",
+                        "\n",
+                        "== Professional career ==\n",
+                        "=== 23XI Racing ===\n",
+                        "On September 21, 2020, Jordan and NASCAR driver Denny Hamlin announced they would be fielding a NASCAR Cup Series team with Bubba Wallace driving, beginning competition in the 2021 season. On October 22, the team's name was confirmed to be 23XI Racing (pronounced twenty-three eleven) and the team's entry would bear No. 23. After the team's inaugural season, it added a second car with No. 45, driven by Kurt Busch in 2022 and Tyler Reddick in 2023. Ty Gibbs, John Hunter Nemechek, and Daniel Hemric also drove for 23XI as substitute drivers during the 2022 season. The team fielded a third car, No. 67, driven by Travis Pastrana in the 2023 Daytona 500. 23XI Racing has won four races, two by Wallace, one by Busch, and one by Reddick.\n",
+                        "\n",
+                        "\n",
+                        "== Personal life ==\n",
+                        "Jordan's nephew through his brother Larry, Justin Jordan, played NCAA Division I basketball for the UNC Greensboro Spartans and is a scout for the Charlotte Hornets.Jordan married Juanita Vanoy at A Little White Wedding Chapel in Las Vegas on September 2, 1989.\n",
+                        "However, Musk dropped out after two days and, with his brother Kimbal, co-founded online city guide software company Zip2.The startup was acquired by Compaq for $307 million in 1999, and with $12 million of the money he made, that same year Musk co-founded X.com, a direct bank.X.com merged with Confinity in 2000 to form PayPal.In 2002, eBay acquired PayPal for $1.5 billion, and that same year, with $100 million of the money he made, Musk founded SpaceX, a spaceflight services company.In 2004, he became an early investor in electric vehicle manufacturer Tesla Motors, Inc. (now Tesla, Inc.).He became its chairman and product architect, assuming the position of CEO in 2008.In 2006, Musk helped create SolarCity, a solar energy company that was acquired by Tesla in 2016 and became Tesla Energy.In 2013, he proposed a hyperloop high-speed vactrain transportation system.In 2015, he co-founded OpenAI, a nonprofit artificial intelligence research company.The following year, Musk co-founded Neuralink—a neurotechnology company developing brain–computer interfaces—and the Boring Company, a tunnel construction company.\n",
+                        "On March 17, the NBA Board of Governors unanimously approved Jordan's purchase, making him the first former player to become the majority owner of an NBA team.It also made him the league's only African-American majority owner.In 2023, Johnson said he regretted selling the Charlotte Hornets to Jordan.During the 2011 NBA lockout, The New York Times wrote that Jordan led a group of 10 to 14 hardline owners who wanted to cap the players' share of basketball-related income at 50 percent and as low as 47.Journalists observed that, during the labor dispute in 1998, Jordan had told Washington Wizards then-owner Abe Pollin: \"If you can't make a profit, you should sell your team.\"Jason Whitlock of FoxSports.com called Jordan \"a hypocrite sellout who can easily betray the very people who made him a billionaire global icon\" for wanting \"current players to pay for his incompetence\".He cited Jordan's executive decisions to draft disappointing players Kwame Brown and Adam Morrison.During the 2011–12 NBA season that was shortened to 66 games by the lockout, the Bobcats posted a 7–59 record.\n",
+                        "The tunnel project to Hawthorne was discontinued in 2022 and is cited to be converted into parking spots for SpaceX workers.Biographer Ashlee Vance has noted that Musk hoped Hyperloop would \"make the public and legislators rethink the high-speed train\" proposal current in California at the time and consider more \"creative\" ideas.\n",
+                        "23 used to\".Jordan responded by scoring 38 points in the next game, which Chicago won.Before the game, Jordan decided that he would immediately resume wearing his former No.23.The Bulls were fined $25,000 for failing to report the impromptu number change to the NBA.Jordan was fined an additional $5,000 for opting to wear white sneakers when the rest of the Bulls wore black.He averaged 31 ppg in the playoffs, but Orlando won the series in six games.\n",
+                        "\n",
+                        "\n",
+                        "==== Second three-peat (1996–1998) ====\n",
+                        "Jordan was freshly motivated by the playoff defeat, and he trained aggressively for the 1995–96 season.The Bulls were strengthened by the addition of rebound specialist Dennis Rodman, and the team dominated the league, starting the season at 41–3.The Bulls eventually finished with the best regular season record in NBA history, 72–10, a mark broken two decades later by the 2015–16 Golden State Warriors.\n",
+                        "Even though Musk founded the company, investors regarded him as inexperienced and replaced him with Intuit CEO Bill Harris by the end of the year.In 2000, X.com merged with online bank Confinity to avoid competition, as the latter's money-transfer service PayPal was more popular than X.com's service.Musk then returned as CEO of the merged company.His preference for Microsoft over Unix-based software caused a rift among the company's employees, and eventually led Confinity co-founder Peter Thiel to resign.With the company suffering from compounding technological issues and the lack of a cohesive business model, the board ousted Musk and replaced him with Thiel in September 2000.Under Thiel, the company focused on the money-transfer service and was renamed PayPal in 2001.In 2002, PayPal was acquired by eBay for $1.5 billion in stock, of which Musk—PayPal's largest shareholder with 11.72% of shares—received $175.8 million.In 2017, more than 15 years later, Musk purchased the X.com domain from PayPal for its \"sentimental value\".In 2022, Musk discussed a goal of creating \"X, the everything app\".\n",
+                        "In addition, Jordan hired his old Chicago Bulls head coach, Doug Collins, as Washington's coach for the upcoming season, a decision that many saw as foreshadowing another Jordan return.\n",
+                        "\n",
+                        "\n",
+                        "=== Washington Wizards (2001–2003) ===\n",
+                        "On September 25, 2001, Jordan announced his return to the NBA to play for the Washington Wizards, indicating his intention to donate his salary as a player to a relief effort for the victims of the September 11 attacks.In an injury-plagued 2001–02 season, Jordan led the team in scoring (22.9 ppg), assists (5.2 apg), and steals (1.4 spg), and was an MVP candidate, as he led the Wizards to a winning record and playoff contention; he would eventually finish 13th in the MVP ballot.After suffering torn cartilage in his right knee, and subsequent knee soreness, the Wizards missed the playoffs, and Jordan's season ended after only 60 games, the fewest he had played in a regular season since playing 17 games after returning from his first retirement during the 1994–95 season.\n",
+                        "=== SpaceX ===\n",
+                        "\n",
+                        "In early 2001, Musk became involved with the nonprofit Mars Society and discussed funding plans to place a growth-chamber for plants on Mars.In October of the same year, he traveled to Moscow with Jim Cantrell and Adeo Ressi to buy refurbished intercontinental ballistic missiles (ICBMs) that could send the greenhouse payloads into space.He met with the companies NPO Lavochkin and Kosmotras; however, Musk was seen as a novice and the group returned to the United States empty-handed.In February 2002, the group returned to Russia with Mike Griffin (president of In-Q-Tel) to look for three ICBMs.They had another meeting with Kosmotras and were offered one rocket for $8 million, which Musk rejected.He instead decided to start a company that could build affordable rockets.With $100 million of his own money, Musk founded SpaceX in May 2002 and became the company's CEO and Chief Engineer.SpaceX attempted its first launch of the Falcon 1 rocket in 2006.\n",
+                        "Jordan started 53 of his 60 games for the season, averaging 24.3 ppg, 5.4 apg, and 6.0 rpg, and shooting 41.9% from the field in his 53 starts.His last seven appearances were in a reserve role, in which he averaged just over 20 minutes per game.The Wizards finished the season with a 37–45 record, an 18-game improvement.Playing in his 14th and final NBA All-Star Game in 2003, Jordan passed Kareem Abdul-Jabbar as the all-time leading scorer in All-Star Game history, a record since broken by Kobe Bryant and LeBron James.That year, Jordan was the only Washington player to play in all 82 games, starting in 67 of them, and coming from off the bench in 15.He averaged 20.0 ppg, 6.1 rpg, 3.8 assists, and 1.5 spg per game.He also shot 45% from the field, and 82% from the free-throw line.Even though he turned 40 during the season, he scored 20 or more points 42 times, 30 or more points nine times, and 40 or more points three times.\n",
+                        "In the Eastern Conference Finals, the Pistons again defeated the Bulls, this time in six games, by utilizing their \"Jordan Rules\" method of guarding Jordan, which consisted of double and triple teaming him every time he touched the ball.The Bulls entered the 1989–90 season as a team on the rise, with their core group of Jordan and young improving players like Scottie Pippen and Horace Grant, and under the guidance of new coach Phil Jackson.On March 28, 1990, Jordan scored a career-high 69 points in a 117–113 road win over the Cavaliers.He averaged a league-leading 33.6 ppg on 52.6% shooting, to go with 6.9 rpg and 6.3 apg, in leading the Bulls to a 55–27 record.They again advanced to the Eastern Conference Finals after beating the Bucks and Philadelphia 76ers; despite pushing the series to seven games, the Bulls lost to the Pistons for the third consecutive season.\n",
+                        "Jordan shot 37%, 35%, 42%, and 37% in all the seasons he shot over 200 three-pointers, and also shot 38.5%, 38.6%, 38.9%, 40.3%, 19.4%, and 30.2% in the playoffs during his championship runs, improving his shooting even after the three-point line reverted to the original line.In 1988, Jordan was honored with the NBA Defensive Player of the Year and the Most Valuable Player awards, becoming the first NBA player to win both awards in a career let alone season.In addition, he set both seasonal and career records for blocked shots by a guard, and combined this with his ball-thieving ability to become a standout defensive player.He ranks fourth in NBA history in total steals with 2,514, trailing John Stockton, Jason Kidd and Chris Paul.Jerry West often stated that he was more impressed with Jordan's defensive contributions than his offensive ones.Doc Rivers declared Jordan \"the best superstar defender in the history of the game\".Jordan was known to have strong eyesight.\n",
+                        "== Wealth ==\n",
+                        "\n",
+                        "\n",
+                        "=== Net worth ===\n",
+                        "Musk made $175.8 million when PayPal was sold to eBay in 2002.He was first listed on the Forbes Billionaires List in 2012, with a net worth of $2 billion.At the start of 2020, Musk had a net worth of $27 billion.By the end of the year his net worth had increased by $150 billion, mostly driven by his ownership of around 20% of Tesla stock.During this period, Musk's net worth was often volatile.For example, it dropped $16.3 billion in September, the largest single-day plunge in Bloomberg Billionaires Index's history.In November of that year, Musk passed Facebook co-founder Mark Zuckerberg to become the third-richest person in the world; a week later he passed Microsoft co-founder Bill Gates to become the second-richest.In January 2021, Musk, with a net worth of $185 billion, surpassed Amazon founder Jeff Bezos to become the richest person in the world.Bezos reclaimed the top spot the following month.\n",
+                        "=== xAI ===\n",
+                        "On July 12, 2023, Elon Musk launched an artificial intelligence company called xAI, which aims to develop a generative AI program that competes with existing offerings like ChatGPT. The company has reportedly hired engineers from Google and OpenAI.\n",
+                        "\n",
+                        "\n",
+                        "=== Leadership style ===\n",
+                        "Musk is often described as a micromanager and has called himself a \"nano-manager\".The New York Times has characterized his approach as absolutist.Musk does not make formal business plans; instead, he says he prefers to approach engineering problems with an \"iterative design methodology\" and \"tolerance for failures\".He has forced employees to adopt the company's own jargon and launched ambitious, risky, and costly projects against his advisors' recommendations, such as removing front-facing radar from Tesla Autopilot.His insistence on vertical integration causes his companies to move most production in-house.\n",
+                        "The Bulls finished the season 38–44, and lost to the Milwaukee Bucks in four games in the first round of the playoffs.An often-cited moment was on August 26, 1985, when Jordan shook the arena during a Nike exhibition game in Trieste, Italy, by shattering the glass of the backboard with a dunk.The moment was filmed and is often referred to worldwide as an important milestone in Jordan's rise.The shoes Jordan wore during the game were auctioned in August 2020 and sold for $615,000, a record for a pair of sneakers.Jordan's 1985–86 season was cut short when he broke his foot in the third game of the year, causing him to miss 64 games.The Bulls made the playoffs despite Jordan's injury and a 30–52 record, at the time the fifth-worst record of any team to qualify for the playoffs in NBA history.Jordan recovered in time to participate in the postseason and performed well upon his return.\n",
+                        "On February 21, 2003, Jordan became the first 40-year-old to tally 43 points in an NBA game.During his stint with the Wizards, all of Jordan's home games at the MCI Center were sold out and the Wizards were the second most-watched team in the NBA, averaging 20,172 fans a game at home and 19,311 on the road.Jordan's final two seasons did not result in a playoff appearance for the Wizards, and he was often unsatisfied with the play of those around him.At several points, he openly criticized his teammates to the media, citing their lack of focus and intensity, notably that of Kwame Brown, the number-one draft pick in the 2001 NBA draft.\n",
+                        "\n",
+                        "\n",
+                        "==== Final retirement (2003) ====\n",
+                        "With the recognition that 2002–03 would be Jordan's final season, tributes were paid to him throughout the NBA.In his final game at the United Center in Chicago, which was his old home court, Jordan received a four-minute standing ovation.The Miami Heat retired the No.23 jersey on April 11, 2003, even though Jordan never played for the team.\n",
+                        "On offense, he relied more upon instinct and improvization at game time.Noted as a durable player, Jordan did not miss four or more games while active for a full season from 1986–87 to 2001–02, when he injured his right knee.Of the 15 seasons Jordan was in the NBA, he played all 82 regular season games nine times.Jordan has frequently cited David Thompson, Walter Davis, and Jerry West as influences.Confirmed at the start of his career, and possibly later on, Jordan had a special \"Love of the Game Clause\" written into his contract, which was unusual at the time, and allowed him to play basketball against anyone at any time, anywhere.Jordan had a versatile offensive game and was capable of aggressively driving to the basket as well as drawing fouls from his opponents at a high rate.His 8,772 free throw attempts are the 11th-highest total in NBA history.As his career progressed, Jordan also developed the ability to post up his opponents and score with his trademark fadeaway jump shot, using his leaping ability to avoid block attempts.According to Hubie Brown, this move alone made him nearly unstoppable.\n",
+                        "In October 2022, Musk stated that about 20,000 satellite terminals had been donated to Ukraine, together with free data transfer subscriptions, which cost SpaceX $80 million.After asking the United States Department of Defense to pay for further units and future subscriptions on behalf of Ukraine, Musk publicly stated that SpaceX would continue to provide Starlink to Ukraine for free, at a yearly cost to itself of $400 million.\n",
+                        "\n",
+                        "\n",
+                        "=== Tesla ===\n",
+                        "\n",
+                        "Tesla, Inc.—originally Tesla Motors—was incorporated in July 2003 by Martin Eberhard and Marc Tarpenning, who financed the company until the Series A round of funding.Both men played active roles in the company's early development prior to Musk's involvement.Musk led the Series A round of investment in February 2004; he invested $6.5 million, became the majority shareholder, and joined Tesla's board of directors as chairman.Musk took an active role within the company and oversaw Roadster product design but was not deeply involved in day-to-day business operations.Following a series of escalating conflicts in 2007, and the financial crisis of 2007–2008, Eberhard was ousted from the firm.\n",
+                        "=== Zip2 ===\n",
+                        "\n",
+                        "In 1995, Musk, his brother Kimbal, and Greg Kouri founded Zip2. Errol Musk provided them with $28,000 in funding. The company developed an Internet city guide with maps, directions, and yellow pages, and marketed it to newspapers. They worked at a small rented office in Palo Alto, with Musk coding the website every night. Eventually, Zip2 obtained contracts with The New York Times and the Chicago Tribune. The brothers persuaded the board of directors to abandon a merger with CitySearch; however, Musk's attempts to become CEO were thwarted. Compaq acquired Zip2 for $307 million in cash in February 1999, and Musk received $22 million for his 7-percent share.\n",
+                        "\n",
+                        "\n",
+                        "=== X.com and PayPal ===\n",
+                        "\n",
+                        "Later in 1999, Musk co-founded X.com, an online financial services and e-mail payment company with $12 million of the money he made from the Compaq acquisition.X.com was one of the first federally insured online banks, and over 200,000 customers joined in its initial months of operation.\n",
+                        "=== Charlotte Bobcats/Hornets ===\n",
+                        "On June 15, 2006, Jordan bought a minority stake in the Charlotte Bobcats (known as the Hornets since 2013), becoming the team's second-largest shareholder behind majority owner Robert L. Johnson.As part of the deal, Jordan took full control over the basketball side of the operation, with the title Managing Member of Basketball Operations.Despite Jordan's previous success as an endorser, he has made an effort not to be included in Charlotte's marketing campaigns.A decade earlier, Jordan had made a bid to become part-owner of Charlotte's original NBA team, the Charlotte Hornets, but talks collapsed when owner George Shinn refused to give Jordan complete control of basketball operations.In February 2010, it was reported that Jordan was seeking majority ownership of the Bobcats.As February wore on, it became apparent that Jordan and former Houston Rockets president George Postolos were the leading contenders for ownership of the team.On February 27, the Bobcats announced that Johnson had reached an agreement with Jordan and his group, MJ Basketball Holdings, to buy the team from Johnson pending NBA approval.\n",
+                        "Musk does not receive a salary from Tesla; he agreed with the board in 2018 to a compensation plan that ties his personal earnings to Tesla's valuation and revenue.The deal stipulated that Musk only receives the compensation if Tesla reaches certain market values.It was the largest such deal ever done between a CEO and a company board.In the first award, given in May 2020, he was eligible to purchase 1.69 million Tesla shares (about 1% of the company) at below-market prices, which was worth about $800 million.Musk paid $455 million in taxes on $1.52 billion of income between 2014 and 2018.According to ProPublica, Musk paid no federal income taxes in 2018.He claimed his 2021 tax bill was estimated at $12 billion based on his sale of $14 billion worth of Tesla stock.Musk has repeatedly described himself as \"cash poor\", and has \"professed to have little interest in the material trappings of wealth\".In May 2020, he pledged to sell almost all physical possessions.Musk has defended his wealth by saying he is accumulating resources for humanity's outward expansion to space.\n",
+                        "The alpha design for the system was published in a whitepaper posted to the Tesla and SpaceX blogs.The document scoped out the technology and outlined a notional route where such a transport system could be built between the Greater Los Angeles Area and the San Francisco Bay Area, at an estimated cost of $6 billion.The proposal, if technologically feasible at the costs cited, would make Hyperloop travel cheaper than any other mode of transport for such long distances.In 2015, Musk announced a design competition for students and others to build Hyperloop pods, to operate on a SpaceX-sponsored mile-long track, for a 2015–2017 Hyperloop pod competition.The track was used in January 2017, and Musk also announced that the company had started a tunnel project, with Hawthorne Municipal Airport as its destination.In July 2017, Musk claimed that he had received \"verbal government approval\" to build a hyperloop from New York City to Washington, D.C., with stops in both Philadelphia and Baltimore.Mention of the projected DC-to-Baltimore leg was removed from the Boring Company website in 2021.\n",
+                        "==== Pistons roadblock (1987–1990) ====\n",
+                        "Jordan again led the league in scoring during the 1987–88 season, averaging 35.0 ppg on 53.5% shooting, and he won his first league MVP Award.He was also named the NBA Defensive Player of the Year, as he averaged 1.6 blocks per game (bpg), a league-high 3.1 steals per game (spg), and led the Bulls defense to the fewest points per game allowed in the league.The Bulls finished 50–32, and made it out of the first round of the playoffs for the first time in Jordan's career, as they defeated the Cleveland Cavaliers in five games.In the Eastern Conference Semifinals, the Bulls lost in five games to the more experienced Detroit Pistons, who were led by Isiah Thomas and a group of physical players known as the \"Bad Boys\".In the 1988–89 season, Jordan again led the league in scoring, averaging 32.5 ppg on 53.8% shooting from the field, along with 8 rpg and 8 apg.\n",
+                        "=== OpenAI ===\n",
+                        "\n",
+                        "In 2015, Musk co-founded OpenAI, a not-for-profit artificial intelligence (AI) research company aiming to develop artificial general intelligence intended to be safe and beneficial to humanity. A particular focus of the company is to democratize artificial superintelligence systems, against governments and corporations. Musk pledged $1 billion of funding to OpenAI. In 2023, Musk tweeted that he had ended up giving a total of $100 million to OpenAI. TechCrunch later reported that, according to its own investigation of public records, \"only $15 million\" of OpenAI's funding could be definitively traced to Musk. Musk subsequently stated that he had donated about $50 million.In 2018, Musk left the OpenAI board to avoid possible future conflicts with his role as CEO of Tesla as the latter company increasingly became involved in AI through Tesla Autopilot. Since then, OpenAI has made significant advances in machine learning, producing neural networks such as GPT-3 (producing human-like text), and DALL-E (generating digital images from natural language descriptions).\n",
+                        "Jordan's effective field goal percentage was 50%, and he had six seasons with at least 50% shooting, five of which consecutively (1988–1992); he also shot 51% and 50%, and 30% and 33% from the three-point range, throughout his first and second retirements, respectively, finishing his Chicago Bulls career with 31.5 points per game on 50.5 FG% shooting and his overall career with 49.7 FG% shooting.Unlike NBA players often compared to Jordan, such as Kobe Bryant and LeBron James, who had a similar three-point percentage, he did not shoot as many threes as they did, as he did not need to rely on the three-pointer in order to be effective on offense.Three-point shooting was only introduced in 1979 and would not be a more fundamental aspect of the game until the first decades of the 21st century, with the NBA having to briefly shorten the line to incentivize more shots.Jordan's three-point shooting was better selected, resulting in three-point field goals made in important games during the playoffs and the Finals, such as hitting six consecutive three-point shots in Game 1 of the 1992 NBA Finals.\n",
+                        "=== The Boring Company ===\n",
+                        "\n",
+                        "In 2017, Musk founded the Boring Company to construct tunnels, and revealed plans for specialized, underground, high-occupancy vehicles that could travel up to 150 miles per hour (240 km/h) and thus circumvent above-ground traffic in major cities. Early in 2017, the company began discussions with regulatory bodies and initiated construction of a 30-foot (9.1 m) wide, 50-foot (15 m) long, and 15-foot (4.6 m) deep \"test trench\" on the premises of SpaceX's offices, as that required no permits. The Los Angeles tunnel, less than two miles (3.2 km) in length, debuted to journalists in 2018. It used Tesla Model Xs and was reported to be a rough ride while traveling at suboptimal speeds.Two tunnel projects announced in 2018, in Chicago and West Los Angeles, have been canceled. However, a tunnel beneath the Las Vegas Convention Center was completed in early 2021. Local officials have approved further expansions of the tunnel system. In 2021, tunnel construction was approved for Fort Lauderdale, Florida.\n",
+                        "=== Chicago Bulls (1984–1993; 1995–1998) ===\n",
+                        "\n",
+                        "\n",
+                        "==== Early NBA years (1984–1987) ====\n",
+                        "The Chicago Bulls selected Jordan with the third overall pick of the 1984 NBA draft after Hakeem Olajuwon (Houston Rockets) and Sam Bowie (Portland Trail Blazers).One of the primary reasons why Jordan was not drafted sooner was because the first two teams were in need of a center.Trail Blazers general manager Stu Inman contended that it was not a matter of drafting a center but more a matter of taking Bowie over Jordan, in part because Portland already had Clyde Drexler, who was a guard with similar skills to Jordan.Citing Bowie's injury-laden college career, ESPN named the Blazers' choice of Bowie as the worst draft pick in North American professional sports history.Jordan made his NBA debut at Chicago Stadium on October 26, 1984, and scored 16 points.In 2021, a ticket stub from the game sold at auction for $264,000, setting a record for a collectible ticket stub.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(f\"Number of nodes: {len(nodes)}\")\n",
+                "for node in nodes:\n",
+                "    print(node.node.get_content())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 71,
+            "id": "4ead6887-18c4-4855-8152-9b99016d4618",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using query str: childhood of a popular sports celebrity\n",
+                        "Using query str: childhood of a popular sports celebrity\n",
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using filters: {'category': 'Sports', 'country': 'United States'}\n",
+                        "Using filters: {'category': 'Sports', 'country': 'United States'}\n",
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using top_k: 2\n",
+                        "Using top_k: 2\n",
+                        "Knafel claimed Jordan promised her $5 million for remaining silent and agreeing not to file a paternity suit after Knafel learned she was pregnant in 1991; a DNA test showed Jordan was not the father of the child.Jordan proposed to his longtime girlfriend, Cuban-American model Yvette Prieto, on Christmas 2011, and they were married on April 27, 2013, at Bethesda-by-the-Sea Episcopal Church.It was announced on November 30, 2013, that the two were expecting their first child together.On February 11, 2014, Prieto gave birth to identical twin daughters named Victoria and Ysabel.In 2019, Jordan became a grandfather when his daughter Jasmine gave birth to a son, whose father is professional basketball player Rakeem Christmas.\n",
+                        "\n",
+                        "\n",
+                        "== Media figure and business interests ==\n",
+                        "\n",
+                        "\n",
+                        "=== Endorsements ===\n",
+                        "Jordan is one of the most marketed sports figures in history.He has been a major spokesman for such brands as Nike, Coca-Cola, Chevrolet, Gatorade, McDonald's, Ball Park Franks, Rayovac, Wheaties, Hanes, and MCI.\n",
+                        "James Jr. became command sergeant major of the 35th Signal Brigade of the U.S. Army's XVIII Airborne Corps and retired in 2006.In 1968, Jordan moved with his family to Wilmington, North Carolina.He attended Emsley A. Laney High School in Wilmington, where he highlighted his athletic career by playing basketball, baseball, and football.He tried out for the basketball varsity team during his sophomore year, but at a height of 5 feet 11 inches (1.80 m), he was deemed too short to play at that level.His taller friend Harvest Leroy Smith was the only sophomore to make the team.Motivated to prove his worth, Jordan became the star of Laney's junior varsity team and tallied some 40-point games.The following summer, he grew four inches (10 cm) and trained rigorously.Upon earning a spot on the varsity roster, he averaged more than 25 points per game (ppg) over his final two seasons of high school play.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "nodes = retriever.retrieve(\n",
+                "    \"Tell me about the childhood of a popular sports celebrity in the United States\"\n",
+                ")\n",
+                "for node in nodes:\n",
+                "    print(node.node.get_content())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 72,
+            "id": "c0a8627c-662f-47b9-8e48-7c2f237dda1a",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using query str: college life of a billionaire who started at company at the age of 16\n",
+                        "Using query str: college life of a billionaire who started at company at the age of 16\n",
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using filters: {}\n",
+                        "Using filters: {}\n",
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using top_k: 2\n",
+                        "Using top_k: 2\n",
+                        "He reportedly hosted large, ticketed house parties to help pay for tuition, and wrote a business plan for an electronic book-scanning service similar to Google Books.In 1994, Musk held two internships in Silicon Valley: one at energy storage startup Pinnacle Research Institute, which investigated electrolytic ultracapacitors for energy storage, and another at Palo Alto–based startup Rocket Science Games.In 1995, he was accepted to a PhD program in materials science at Stanford University.However, Musk decided to join the Internet boom, dropping out two days after being accepted and applied for a job at Netscape, to which he reportedly never received a response.\n",
+                        "\n",
+                        "\n",
+                        "== Business career ==\n",
+                        "At age ten, he developed an interest in computing and video games, teaching himself how to program from the VIC-20 user manual.At age twelve, he sold his BASIC-based game Blastar to PC and Office Technology magazine for approximately $500.\n",
+                        "\n",
+                        "\n",
+                        "=== Education ===\n",
+                        "Musk attended Waterkloof House Preparatory School, Bryanston High School, and Pretoria Boys High School, from where he graduated.Musk applied for a Canadian passport through his Canadian-born mother, knowing that it would be easier to immigrate to the United States this way.While waiting for his application to be processed, he attended the University of Pretoria for five months.Musk arrived in Canada in June 1989 and lived with a second cousin in Saskatchewan for a year, working odd jobs at a farm and lumber mill.In 1990, he entered Queen's University in Kingston, Ontario.Two years later, he transferred to the University of Pennsylvania (UPenn), where he completed studies for a Bachelor of Arts degree in physics and a Bachelor of Science degree in economics from the Wharton School.Although Musk claims he earned the degrees in 1995, UPenn maintains it awarded them in 1997.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "nodes = retriever.retrieve(\n",
+                "    \"Tell me about the college life of a billionaire who started at company at the age of 16\"\n",
+                ")\n",
+                "for node in nodes:\n",
+                "    print(node.node.get_content())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 73,
+            "id": "33425c39-e0e7-4415-b695-66c96d3fc7d1",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using query str: childhood of a billionaire\n",
+                        "Using query str: childhood of a billionaire\n",
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using filters: {'country': 'UK'}\n",
+                        "Using filters: {'country': 'UK'}\n",
+                        "INFO:llama_index.indices.vector_store.retrievers.auto_retriever.auto_retriever:Using top_k: 2\n",
+                        "Using top_k: 2\n",
+                        "Branson has also talked openly about having ADHD.Branson's parents were supportive of his endeavours from an early age.His mother was an entrepreneur; one of her most successful ventures was building and selling wooden tissue boxes and wastepaper bins.In London, he started off squatting from 1967 to 1968.Branson is an atheist.He said in a 2011 interview with CNN's Piers Morgan that he believes in evolution and the importance of humanitarian efforts but not in the existence of God.\"I would love to believe,\" he said.\"It's very comforting to believe\".\n",
+                        "\n",
+                        "\n",
+                        "== Early business career ==\n",
+                        "After failed attempts to grow and sell both Christmas trees and budgerigars, Branson launched a magazine named Student in 1966 with Nik Powell.The first issue of Student appeared in January 1968, and a year later, Branson's net worth was estimated at £50,000.The office for the venture was situated in the crypt of St. John's Church, off Bayswater Road, in London.Though not initially as successful as he hoped, the magazine later became a vital component of the mail-order record business Branson started from the same church he used for Student.\n",
+                        "In March 2000, Branson was knighted at Buckingham Palace for \"services to entrepreneurship\".For his work in retail, music and transport (with interests in land, air, sea and space travel), his taste for adventure and for his humanitarian work, he has become a prominent global figure.In 2007, he was placed in the Time 100 Most Influential People in the World list.In June 2023, Forbes listed Branson's estimated net worth at US$3 billion.On 11 July 2021, Branson travelled as a passenger onboard Virgin Galactic Unity 22 at the edge of space, a suborbital test flight for his spaceflight company Virgin Galactic.The mission lasted approximately one hour, reaching a peak altitude of 53.5 miles (86.1 km).At 70, Branson became the third oldest person to fly to space.\n",
+                        "\n",
+                        "\n",
+                        "== Early life ==\n",
+                        "Richard Charles Nicholas Branson was born on 18 July 1950 in Blackheath, London, the son of Edward James Branson (1918–2011), a barrister, and his wife Evette Huntley Branson (née Flindt; 1924–2021), a former ballet dancer and air hostess.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "nodes = retriever.retrieve(\"Tell me about the childhood of a UK billionaire\")\n",
+                "for node in nodes:\n",
+                "    print(node.node.get_content())"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "45612783-c82c-48f7-ace9-d04341b9d3ec",
+            "metadata": {},
+            "source": [
+                "## Build Recursive Retriever over Document Summaries"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 87,
+            "id": "6a127983-f083-41d8-9299-e8f93e1ee112",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.schema import IndexNode"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 98,
+            "id": "07d4509e-715c-4844-975b-db2afa900fe5",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "**Summary for Michael Jordan: Michael Jordan, often referred to as MJ, is a retired professional basketball player from the United States who is widely considered one of the greatest players in the history of the sport. He played 15 seasons in the NBA, primarily with the Chicago Bulls, and won six NBA championships. His individual accolades include six NBA Finals MVP awards, ten NBA scoring titles, five NBA MVP awards, and fourteen NBA All-Star Game selections. He also holds the NBA records for career regular season scoring average and career playoff scoring average. Jordan briefly retired to play Minor League Baseball, but returned to lead the Bulls to three more championships. He was twice inducted into the Naismith Memorial Basketball Hall of Fame. \n",
+                        "\n",
+                        "After retiring, Jordan became a successful businessman, part-owner and head of basketball operations for the Charlotte Hornets, and owner of 23XI Racing in the NASCAR Cup Series. He has also made significant contributions to charitable causes, donating millions to organizations such as the Make-A-Wish Foundation and Habitat for Humanity. In the entertainment industry, he has appeared in productions like \"Space Jam\" and \"The Last Dance\", and has authored several books about his life and career. His influence extends beyond sports, making him a significant cultural figure.\n",
+                        "**Summary for Elon Musk: Elon Musk is a globally recognized business magnate and investor, who has founded and led numerous high-profile technology companies. He is the founder, CEO, and chief technology officer of SpaceX, an aerospace manufacturer and space transportation company, and the CEO and product architect of Tesla, Inc., a company specializing in electric vehicles and clean energy. Musk also owns and chairs X Corp, and founded the Boring Company, a tunnel construction and infrastructure company. He co-founded Neuralink, a neurotechnology company, and OpenAI, a nonprofit artificial intelligence research company. \n",
+                        "\n",
+                        "In 2022, Musk acquired Twitter and merged it with X Corp, and also founded xAI, an AI company. Despite his success, he has faced criticism for his controversial statements and management style. Musk was born in South Africa, moved to Canada at 18, and later to the United States to attend Stanford University, but dropped out to start his entrepreneurial journey. He co-founded Zip2 and X.com (later PayPal), which was sold to eBay in 2002. \n",
+                        "\n",
+                        "Musk envisions a future that includes Mars colonization and the development of a high-speed transportation system known as the Hyperloop. As of August 2023, he is the wealthiest person in the world, with a net worth of over $200 billion. Despite various controversies, Musk has made significant contributions to the tech industry. He has been married multiple times, has several children, and is known for his active presence on social media, particularly Twitter.\n",
+                        "**Summary for Richard Branson: Richard Branson, born on 18 July 1950, is a British business magnate, commercial astronaut, and philanthropist. He founded the Virgin Group in the 1970s, which now controls over 400 companies in various fields such as aviation, music, and space travel. His first business venture was a magazine called Student, and he later established a mail-order record business and a chain of record stores known as Virgin Records. The Virgin brand expanded rapidly during the 1980s with the start of Virgin Atlantic airline and the expansion of the Virgin Records music label. In 1997, he founded the Virgin Rail Group, and in 2004, he founded Virgin Galactic. Branson was knighted in 2000 for his services to entrepreneurship. He has a net worth of US$3 billion as of June 2023. Branson has also been involved in numerous philanthropic activities and has launched initiatives like Virgin Startup. Despite his success, he has faced criticism and legal issues, including a brief jail term for tax evasion in 1971. He is married to Joan Templeman, with whom he has two children.\n",
+                        "**Summary for Rihanna: Rihanna, whose real name is Robyn Rihanna Fenty, is a renowned Barbadian singer, songwriter, actress, and businesswoman. She rose to fame after signing with Def Jam in 2005 and releasing her first two albums, \"Music of the Sun\" and \"A Girl Like Me\". Her third album, \"Good Girl Gone Bad\", solidified her status as a major music icon. Some of her other successful albums include \"Rated R\", \"Loud\", \"Talk That Talk\", and \"Unapologetic\", which was her first to reach number one on the Billboard 200. \n",
+                        "\n",
+                        "Rihanna has sold over 250 million records worldwide, making her one of the best-selling music artists of all time. She has received numerous awards, including nine Grammy Awards, 12 Billboard Music Awards, and 13 American Music Awards. She also holds six Guinness World Records. \n",
+                        "\n",
+                        "In addition to her music career, Rihanna has ventured into business, founding the cosmetics brand Fenty Beauty and the fashion house Fenty under LVMH. She has also acted in several films, including \"Battleship\", \"Home\", \"Valerian and the City of a Thousand Planets\", and \"Ocean's 8\". \n",
+                        "\n",
+                        "Rihanna is also known for her philanthropic work, particularly through her Believe Foundation and the Clara Lionel Foundation. As of 2023, she is the wealthiest female musician, with an estimated net worth of $1.4 billion.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# define top-level nodes and vector retrievers\n",
+                "nodes = []\n",
+                "vector_query_engines = {}\n",
+                "vector_retrievers = {}\n",
+                "\n",
+                "for wiki_title in wiki_titles:\n",
+                "    # build vector index\n",
+                "    vector_index = VectorStoreIndex.from_documents(\n",
+                "        [docs_dict[wiki_title]], service_context=service_context\n",
+                "    )\n",
+                "    # define query engines\n",
+                "    vector_query_engine = vector_index.as_query_engine()\n",
+                "    vector_query_engines[wiki_title] = vector_query_engine\n",
+                "    vector_retrievers[wiki_title] = vector_index.as_retriever()\n",
+                "\n",
+                "    # save summaries\n",
+                "    out_path = Path(\"summaries\") / f\"{wiki_title}.txt\"\n",
+                "    if not out_path.exists():\n",
+                "        # use LLM-generated summary\n",
+                "        list_index = SummaryIndex.from_documents(\n",
+                "            [docs_dict[wiki_title]], service_context=service_context\n",
+                "        )\n",
+                "\n",
+                "        summarizer = list_index.as_query_engine(response_mode=\"tree_summarize\")\n",
+                "        response = await summarizer.aquery(f\"Give me a summary of {wiki_title}\")\n",
+                "\n",
+                "        wiki_summary = response.response\n",
+                "        Path(\"summaries\").mkdir(exist_ok=True)\n",
+                "        with open(out_path, \"w\") as fp:\n",
+                "            fp.write(wiki_summary)\n",
+                "    else:\n",
+                "        with open(out_path, \"r\") as fp:\n",
+                "            wiki_summary = fp.read()\n",
+                "\n",
+                "    print(f\"**Summary for {wiki_title}: {wiki_summary}\")\n",
+                "    node = IndexNode(text=wiki_summary, index_id=wiki_title)\n",
+                "    nodes.append(node)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 99,
+            "id": "f3f10b9d-2cdc-44d7-9f2c-44c1438fad9e",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# define top-level retriever\n",
+                "top_vector_index = VectorStoreIndex(nodes)\n",
+                "top_vector_retriever = top_vector_index.as_retriever(similarity_top_k=1)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 100,
+            "id": "65547fe1-6d2c-4658-8552-08a34f9c763f",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# define recursive retriever\n",
+                "from llama_index.retrievers import RecursiveRetriever\n",
+                "from llama_index.query_engine import RetrieverQueryEngine\n",
+                "from llama_index.response_synthesizers import get_response_synthesizer"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 102,
+            "id": "bfb2b340",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# note: can pass `agents` dict as `query_engine_dict` since every agent can be used as a query engine\n",
+                "recursive_retriever = RecursiveRetriever(\n",
+                "    \"vector\",\n",
+                "    retriever_dict={\"vector\": top_vector_retriever, **vector_retrievers},\n",
+                "    # query_engine_dict=vector_query_engines,\n",
+                "    verbose=True,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 103,
+            "id": "3df4a090-c45a-4e3e-8d0a-f2955204bf26",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[36;1m\u001b[1;3mRetrieving with query id None: Tell me about a celebrity from the United States\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieved node with id, entering: Michael Jordan\n",
+                        "\u001b[0m\u001b[36;1m\u001b[1;3mRetrieving with query id Michael Jordan: Tell me about a celebrity from the United States\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieving text node: He was interviewed at three homes associated with the production and did not want cameras in his home or on his plane, as according to director Jason Hehir \"there are certain aspects of his life that he wants to keep private\".Jordan granted rapper Travis Scott permission to film a music video for his single \"Franchise\" at his home in Highland Park, Illinois.Jordan appeared in the 2022 miniseries The Captain, which follows the life and career of Derek Jeter.\n",
+                        "\n",
+                        "\n",
+                        "=== Books ===\n",
+                        "Jordan has authored several books focusing on his life, basketball career, and world view.\n",
+                        "\n",
+                        "Rare Air: Michael on Michael, with Mark Vancil and Walter Iooss (Harper San Francisco, 1993).\n",
+                        "I Can't Accept Not Trying: Michael Jordan on the Pursuit of Excellence, with Mark Vancil and Sandro Miller (Harper San Francisco, 1994).\n",
+                        "For the Love of the Game: My Story, with Mark Vancil (Crown Publishers, 1998).\n",
+                        "Driven from Within, with Mark Vancil (Atria Books, 2005).\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieving text node: In the September 1996 issue of Sport, which was the publication's 50th-anniversary issue, Jordan was named the greatest athlete of the past 50 years.Jordan's athletic leaping ability, highlighted in his back-to-back Slam Dunk Contest championships in 1987 and 1988, is credited by many people with having influenced a generation of young players.Several NBA players, including James and Dwyane Wade, have stated that they considered Jordan their role model while they were growing up.In addition, commentators have dubbed a number of next-generation players \"the next Michael Jordan\" upon their entry to the NBA, including Penny Hardaway, Grant Hill, Allen Iverson, Bryant, Vince Carter, James, and Wade.Some analysts, such as The Ringer's Dan Devine, drew parallels between Jordan's experiment at point guard in the 1988–89 season and the modern NBA; for Devine, it \"inadvertently foreshadowed the modern game's stylistic shift toward monster-usage primary playmakers\", such as Russell Westbrook, James Harden, Luka Dončić, and James.Don Nelson stated: \"I would've been playing him at point guard the day he showed up as a rookie.\n",
+                        "\u001b[0mHe was interviewed at three homes associated with the production and did not want cameras in his home or on his plane, as according to director Jason Hehir \"there are certain aspects of his life that he wants to keep private\".Jordan granted rapper Travis Scott permission to film a music video for his single \"Franchise\" at his home in Highland Park, Illinois.Jordan appeared in the 2022 miniseries The Captain, which follows the life and career of Derek Jeter.\n",
+                        "\n",
+                        "\n",
+                        "=== Books ===\n",
+                        "Jordan has authored several books focusing on his life, basketball career, and world view.\n",
+                        "\n",
+                        "Rare Air: Michael on Michael, with Mark Vancil and Walter Iooss (Harper San Francisco, 1993).\n",
+                        "I Can't Accept Not Trying: Michael Jordan on the Pursuit of Excellence, with Mark Vancil and Sandro Miller (Harper San Francisco, 1994).\n",
+                        "For the Love of the Game: My Story, with Mark Vancil (Crown Publishers, 1998).\n",
+                        "Driven from Within, with Mark Vancil (Atria Books, 2005).\n",
+                        "In the September 1996 issue of Sport, which was the publication's 50th-anniversary issue, Jordan was named the greatest athlete of the past 50 years.Jordan's athletic leaping ability, highlighted in his back-to-back Slam Dunk Contest championships in 1987 and 1988, is credited by many people with having influenced a generation of young players.Several NBA players, including James and Dwyane Wade, have stated that they considered Jordan their role model while they were growing up.In addition, commentators have dubbed a number of next-generation players \"the next Michael Jordan\" upon their entry to the NBA, including Penny Hardaway, Grant Hill, Allen Iverson, Bryant, Vince Carter, James, and Wade.Some analysts, such as The Ringer's Dan Devine, drew parallels between Jordan's experiment at point guard in the 1988–89 season and the modern NBA; for Devine, it \"inadvertently foreshadowed the modern game's stylistic shift toward monster-usage primary playmakers\", such as Russell Westbrook, James Harden, Luka Dončić, and James.Don Nelson stated: \"I would've been playing him at point guard the day he showed up as a rookie.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# ?\n",
+                "nodes = recursive_retriever.retrieve(\"Tell me about a celebrity from the United States\")\n",
+                "for node in nodes:\n",
+                "    print(node.node.get_content())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 104,
+            "id": "7f3a83e8-5872-4d55-9307-f4cd4c79216c",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[36;1m\u001b[1;3mRetrieving with query id None: Tell me about the childhood of a billionaire who started at company at the age of 16\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieved node with id, entering: Richard Branson\n",
+                        "\u001b[0m\u001b[36;1m\u001b[1;3mRetrieving with query id Richard Branson: Tell me about the childhood of a billionaire who started at company at the age of 16\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieving text node: Branson has also talked openly about having ADHD.Branson's parents were supportive of his endeavours from an early age.His mother was an entrepreneur; one of her most successful ventures was building and selling wooden tissue boxes and wastepaper bins.In London, he started off squatting from 1967 to 1968.Branson is an atheist.He said in a 2011 interview with CNN's Piers Morgan that he believes in evolution and the importance of humanitarian efforts but not in the existence of God.\"I would love to believe,\" he said.\"It's very comforting to believe\".\n",
+                        "\n",
+                        "\n",
+                        "== Early business career ==\n",
+                        "After failed attempts to grow and sell both Christmas trees and budgerigars, Branson launched a magazine named Student in 1966 with Nik Powell.The first issue of Student appeared in January 1968, and a year later, Branson's net worth was estimated at £50,000.The office for the venture was situated in the crypt of St. John's Church, off Bayswater Road, in London.Though not initially as successful as he hoped, the magazine later became a vital component of the mail-order record business Branson started from the same church he used for Student.\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3mRetrieving text node: In March 2000, Branson was knighted at Buckingham Palace for \"services to entrepreneurship\".For his work in retail, music and transport (with interests in land, air, sea and space travel), his taste for adventure and for his humanitarian work, he has become a prominent global figure.In 2007, he was placed in the Time 100 Most Influential People in the World list.In June 2023, Forbes listed Branson's estimated net worth at US$3 billion.On 11 July 2021, Branson travelled as a passenger onboard Virgin Galactic Unity 22 at the edge of space, a suborbital test flight for his spaceflight company Virgin Galactic.The mission lasted approximately one hour, reaching a peak altitude of 53.5 miles (86.1 km).At 70, Branson became the third oldest person to fly to space.\n",
+                        "\n",
+                        "\n",
+                        "== Early life ==\n",
+                        "Richard Charles Nicholas Branson was born on 18 July 1950 in Blackheath, London, the son of Edward James Branson (1918–2011), a barrister, and his wife Evette Huntley Branson (née Flindt; 1924–2021), a former ballet dancer and air hostess.\n",
+                        "\u001b[0mBranson has also talked openly about having ADHD.Branson's parents were supportive of his endeavours from an early age.His mother was an entrepreneur; one of her most successful ventures was building and selling wooden tissue boxes and wastepaper bins.In London, he started off squatting from 1967 to 1968.Branson is an atheist.He said in a 2011 interview with CNN's Piers Morgan that he believes in evolution and the importance of humanitarian efforts but not in the existence of God.\"I would love to believe,\" he said.\"It's very comforting to believe\".\n",
+                        "\n",
+                        "\n",
+                        "== Early business career ==\n",
+                        "After failed attempts to grow and sell both Christmas trees and budgerigars, Branson launched a magazine named Student in 1966 with Nik Powell.The first issue of Student appeared in January 1968, and a year later, Branson's net worth was estimated at £50,000.The office for the venture was situated in the crypt of St. John's Church, off Bayswater Road, in London.Though not initially as successful as he hoped, the magazine later became a vital component of the mail-order record business Branson started from the same church he used for Student.\n",
+                        "In March 2000, Branson was knighted at Buckingham Palace for \"services to entrepreneurship\".For his work in retail, music and transport (with interests in land, air, sea and space travel), his taste for adventure and for his humanitarian work, he has become a prominent global figure.In 2007, he was placed in the Time 100 Most Influential People in the World list.In June 2023, Forbes listed Branson's estimated net worth at US$3 billion.On 11 July 2021, Branson travelled as a passenger onboard Virgin Galactic Unity 22 at the edge of space, a suborbital test flight for his spaceflight company Virgin Galactic.The mission lasted approximately one hour, reaching a peak altitude of 53.5 miles (86.1 km).At 70, Branson became the third oldest person to fly to space.\n",
+                        "\n",
+                        "\n",
+                        "== Early life ==\n",
+                        "Richard Charles Nicholas Branson was born on 18 July 1950 in Blackheath, London, the son of Edward James Branson (1918–2011), a barrister, and his wife Evette Huntley Branson (née Flindt; 1924–2021), a former ballet dancer and air hostess.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "nodes = recursive_retriever.retrieve(\n",
+                "    \"Tell me about the childhood of a billionaire who started at company at the age of 16\"\n",
+                ")\n",
+                "for node in nodes:\n",
+                "    print(node.node.get_content())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "750e4416-644a-42f9-9acf-1bac2fa05748",
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "llama_index_v2",
+            "language": "python",
+            "name": "llama_index_v2"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.10"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/retrievers/ensemble_retrieval.ipynb b/docs/examples/retrievers/ensemble_retrieval.ipynb
index 0afbb870ce9979978bfd97665f9e1ff98b3a2d0e..a968b28cdd71df15754f8699472f5ab3fa38c427 100644
--- a/docs/examples/retrievers/ensemble_retrieval.ipynb
+++ b/docs/examples/retrievers/ensemble_retrieval.ipynb
@@ -1,816 +1,816 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "5bf1de44-4047-46cf-a04c-dbf910d9e179",
-   "metadata": {},
-   "source": [
-    "# Ensemble Query Engine Guide\n",
-    "\n",
-    "Oftentimes when building a RAG applications there are many retreival parameters/strategies to decide from (from chunk size to vector vs. keyword vs. hybrid search, for instance).\n",
-    "\n",
-    "Thought: what if we could try a bunch of strategies at once, and have any AI/reranker/LLM prune the results?\n",
-    "\n",
-    "This achieves two purposes:\n",
-    "- Better (albeit more costly) retrieved results by pooling results from multiple strategies, assuming the reranker is good\n",
-    "- A way to benchmark different retrieval strategies against each other (w.r.t reranker)\n",
-    "\n",
-    "This guide showcases this over the Great Gatsby. We do ensemble retrieval over different chunk sizes and also different indices.\n",
-    "\n",
-    "**NOTE**: A closely related guide is our [Ensemble Retrievers Guide](https://gpt-index.readthedocs.io/en/stable/examples/retrievers/ensemble_retrieval.html) - make sure to check it out! "
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "6e73fead-ec2c-4346-bd08-e183c13c7e29",
-   "metadata": {},
-   "source": [
-    "## Setup"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "id": "a2d59778-4cda-47b5-8cd0-b80fee91d1e4",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# NOTE: This is ONLY necessary in jupyter notebook.\n",
-    "# Details: Jupyter runs an event-loop behind the scenes.\n",
-    "#          This results in nested event-loops when we start an event-loop to make async queries.\n",
-    "#          This is normally not allowed, we use nest_asyncio to allow it for convenience.\n",
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "c628448c-573c-4eeb-a7e1-707fe8cc575c",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Note: NumExpr detected 12 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
-      "NumExpr defaulting to 8 threads.\n"
-     ]
-    }
-   ],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().handlers = []\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
-    "\n",
-    "from llama_index import (\n",
-    "    VectorStoreIndex,\n",
-    "    ListIndex,\n",
-    "    SimpleDirectoryReader,\n",
-    "    ServiceContext,\n",
-    "    StorageContext,\n",
-    "    SimpleKeywordTableIndex,\n",
-    ")\n",
-    "from llama_index.response.notebook_utils import display_response\n",
-    "from llama_index.llms import OpenAI"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "787174ed-10ce-47d7-82fd-9ca7f891eea7",
-   "metadata": {},
-   "source": [
-    "## Load Data\n",
-    "\n",
-    "We first show how to convert a Document into a set of Nodes, and insert into a DocumentStore."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "dd62366b-8a24-40a7-8c47-5859851149fe",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# try loading great gatsby\n",
-    "\n",
-    "documents = SimpleDirectoryReader(\n",
-    "    input_files=[\"../../../examples/gatsby/gatsby_full.txt\"]\n",
-    ").load_data()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "7081194a-ede7-478e-bff2-23e89e23ef16",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Chunk Size: 128\n",
-      "Chunk Size: 256\n",
-      "Chunk Size: 512\n",
-      "Chunk Size: 1024\n"
-     ]
-    }
-   ],
-   "source": [
-    "# initialize service context (set chunk size)\n",
-    "llm = OpenAI(model=\"gpt-4\")\n",
-    "chunk_sizes = [128, 256, 512, 1024]\n",
-    "service_contexts = []\n",
-    "nodes_list = []\n",
-    "vector_indices = []\n",
-    "query_engines = []\n",
-    "for chunk_size in chunk_sizes:\n",
-    "    print(f\"Chunk Size: {chunk_size}\")\n",
-    "    service_context = ServiceContext.from_defaults(chunk_size=chunk_size, llm=llm)\n",
-    "    service_contexts.append(service_context)\n",
-    "    nodes = service_context.node_parser.get_nodes_from_documents(documents)\n",
-    "\n",
-    "    # add chunk size to nodes to track later\n",
-    "    for node in nodes:\n",
-    "        node.metadata[\"chunk_size\"] = chunk_size\n",
-    "        node.excluded_embed_metadata_keys = [\"chunk_size\"]\n",
-    "        node.excluded_llm_metadata_keys = [\"chunk_size\"]\n",
-    "\n",
-    "    nodes_list.append(nodes)\n",
-    "\n",
-    "    # build vector index\n",
-    "    vector_index = VectorStoreIndex(nodes)\n",
-    "    vector_indices.append(vector_index)\n",
-    "\n",
-    "    # query engines\n",
-    "    query_engines.append(vector_index.as_query_engine())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "fbca69b4-d8d5-4dcb-af33-f9ed4a91ec05",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# try ensemble retrieval\n",
-    "\n",
-    "from llama_index.tools import RetrieverTool\n",
-    "\n",
-    "retriever_tools = []\n",
-    "for chunk_size, vector_index in zip(chunk_sizes, vector_indices):\n",
-    "    retriever_tool = RetrieverTool.from_defaults(\n",
-    "        retriever=vector_index.as_retriever(),\n",
-    "        description=f\"Retrieves relevant context from the Great Gatsby (chunk size {chunk_size})\",\n",
-    "    )\n",
-    "    retriever_tools.append(retriever_tool)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "id": "5c9eaa6f-8f11-4380-b3c6-79092f17def3",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.selectors.pydantic_selectors import PydanticMultiSelector\n",
-    "from llama_index.retrievers import RouterRetriever\n",
-    "\n",
-    "\n",
-    "retriever = RouterRetriever(\n",
-    "    selector=PydanticMultiSelector.from_defaults(llm=llm, max_outputs=4),\n",
-    "    retriever_tools=retriever_tools,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "7c72c61c-d4f7-4159-bb80-1989468ab61c",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting retriever 0: This choice retrieves a moderate amount of context from the Great Gatsby, which could provide a balanced amount of detail for describing and summarizing the interactions between Gatsby and Daisy..\n",
-      "Selecting retriever 1: This choice retrieves a larger amount of context from the Great Gatsby, which could provide more detail for describing and summarizing the interactions between Gatsby and Daisy..\n",
-      "Selecting retriever 2: This choice retrieves an even larger amount of context from the Great Gatsby, which could provide a comprehensive summary of the interactions between Gatsby and Daisy..\n",
-      "Selecting retriever 3: This choice retrieves the largest amount of context from the Great Gatsby, which could provide the most detailed and comprehensive summary of the interactions between Gatsby and Daisy..\n",
-      "message='OpenAI API response' path=https://api.openai.com/v1/embeddings processing_ms=40 request_id=d269f8a582ac9a70cdb6f587a34d5877 response_code=200\n",
-      "message='OpenAI API response' path=https://api.openai.com/v1/embeddings processing_ms=57 request_id=29679c9e6d594d1f96eb077a4049c6fa response_code=200\n",
-      "message='OpenAI API response' path=https://api.openai.com/v1/embeddings processing_ms=36 request_id=3b3139e7ed9480ff7e7791cc860b6bcd response_code=200\n",
-      "message='OpenAI API response' path=https://api.openai.com/v1/embeddings processing_ms=183 request_id=c0faed36112cb2554de278af9fd58f66 response_code=200\n"
-     ]
-    }
-   ],
-   "source": [
-    "nodes = await retriever.aretrieve(\n",
-    "    \"Describe and summarize the interactions between Gatsby and Daisy\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "id": "590ed8bc-83ad-4851-9ec6-bfbbdf3ff38d",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "128\n",
-      "the beach that morning. Finally we came to Gatsby’s own\n",
-      "apartment, a bedroom and a bath, and an Adam’s study, where we sat\n",
-      "down and drank a glass of some Chartreuse he took from a cupboard in\n",
-      "the wall.\n",
-      "\n",
-      "He hadn’t once ceased looking at Daisy, and I think he revalued\n",
-      "everything in his house according to the measure of response it drew\n",
-      "from her well-loved eyes. Sometimes too, he stared around at his\n",
-      "possessions in a dazed\n",
-      "128\n",
-      "turn out as he had\n",
-      "imagined. He had intended, probably, to take what he could and go—but\n",
-      "now he found that he had committed himself to the following of a\n",
-      "grail. He knew that Daisy was extraordinary, but he didn’t realize\n",
-      "just how extraordinary a “nice” girl could be. She vanished into her\n",
-      "rich house, into her rich, full life, leaving Gatsby—nothing. He felt\n",
-      "married to her, that was all.\n",
-      "\n",
-      "When they met again, two days later, it\n",
-      "256\n",
-      "the\n",
-      "direction. In this heat every extra gesture was an affront to the\n",
-      "common store of life.\n",
-      "\n",
-      "The room, shadowed well with awnings, was dark and cool. Daisy and\n",
-      "Jordan lay upon an enormous couch, like silver idols weighing down\n",
-      "their own white dresses against the singing breeze of the fans.\n",
-      "\n",
-      "“We can’t move,” they said together.\n",
-      "\n",
-      "Jordan’s fingers, powdered white over their tan, rested for a moment\n",
-      "in mine.\n",
-      "\n",
-      "“And Mr. Thomas Buchanan, the athlete?” I inquired.\n",
-      "\n",
-      "Simultaneously I heard his voice, gruff, muffled, husky, at the hall\n",
-      "telephone.\n",
-      "\n",
-      "Gatsby stood in the centre of the crimson carpet and gazed around with\n",
-      "fascinated eyes. Daisy watched him and laughed, her sweet, exciting\n",
-      "laugh; a tiny gust of powder rose from her bosom into the air.\n",
-      "\n",
-      "“The rumour is,” whispered Jordan, “that\n",
-      "256\n",
-      "In the meantime, In between time—”\n",
-      "\n",
-      "As I went over to say goodbye I saw that the expression of\n",
-      "bewilderment had come back into Gatsby’s face, as though a faint doubt\n",
-      "had occurred to him as to the quality of his present happiness. Almost\n",
-      "five years! There must have been moments even that afternoon when\n",
-      "Daisy tumbled short of his dreams—not through her own fault, but\n",
-      "because of the colossal vitality of his illusion. It had gone beyond\n",
-      "her, beyond everything. He had thrown himself into it with a creative\n",
-      "passion, adding to it all the time, decking it out with every bright\n",
-      "feather that drifted his way. No amount of fire or freshness can\n",
-      "challenge what a man can store up in his ghostly heart.\n",
-      "\n",
-      "As I watched him he adjusted himself a little, visibly. His hand took\n",
-      "hold of hers, and as she said something low in his ear he turned\n",
-      "toward her with a rush of emotion. I think that voice held him most,\n",
-      "with its fluctuating,\n",
-      "512\n",
-      "go downstairs,” interrupted Gatsby. He flipped a switch. The\n",
-      "grey windows disappeared as the house glowed full of light.\n",
-      "\n",
-      "In the music-room Gatsby turned on a solitary lamp beside the piano.\n",
-      "He lit Daisy’s cigarette from a trembling match, and sat down with her\n",
-      "on a couch far across the room, where there was no light save what the\n",
-      "gleaming floor bounced in from the hall.\n",
-      "\n",
-      "When Klipspringer had played “The Love Nest” he turned around on the\n",
-      "bench and searched unhappily for Gatsby in the gloom.\n",
-      "\n",
-      "“I’m all out of practice, you see. I told you I couldn’t play. I’m all\n",
-      "out of prac—”\n",
-      "\n",
-      "“Don’t talk so much, old sport,” commanded Gatsby. “Play!”\n",
-      "\n",
-      " “In the morning, In the evening, Ain’t we got fun—”\n",
-      "\n",
-      "Outside the wind was loud and there was a faint flow of thunder along\n",
-      "the Sound. All the lights were going on in West Egg now; the electric\n",
-      "trains, men-carrying, were plunging home through the rain from New\n",
-      "York. It was the hour of a profound human change, and excitement was\n",
-      "generating on the air.\n",
-      "\n",
-      " “One thing’s sure and nothing’s surer The rich get richer and the\n",
-      " poor get—children. In the meantime, In between time—”\n",
-      "\n",
-      "As I went over to say goodbye I saw that the expression of\n",
-      "bewilderment had come back into Gatsby’s face, as though a faint doubt\n",
-      "had occurred to him as to the quality of his present happiness. Almost\n",
-      "five years! There must have been moments even that afternoon when\n",
-      "Daisy tumbled short of his dreams—not through her own fault, but\n",
-      "because of the colossal vitality of his illusion. It had gone beyond\n",
-      "her, beyond everything. He had thrown himself into it with a creative\n",
-      "passion, adding to it all the time, decking it out\n",
-      "512\n",
-      "world complete\n",
-      "in itself, with its own standards and its own great figures, second to\n",
-      "nothing because it had no consciousness of being so, and now I was\n",
-      "looking at it again, through Daisy’s eyes. It is invariably saddening\n",
-      "to look through new eyes at things upon which you have expended your\n",
-      "own powers of adjustment.\n",
-      "\n",
-      "They arrived at twilight, and, as we strolled out among the sparkling\n",
-      "hundreds, Daisy’s voice was playing murmurous tricks in her throat.\n",
-      "\n",
-      "“These things excite me so,” she whispered. “If you want to kiss me\n",
-      "any time during the evening, Nick, just let me know and I’ll be glad\n",
-      "to arrange it for you. Just mention my name. Or present a green card.\n",
-      "I’m giving out green—”\n",
-      "\n",
-      "“Look around,” suggested Gatsby.\n",
-      "\n",
-      "“I’m looking around. I’m having a marvellous—”\n",
-      "\n",
-      "“You must see the faces of many people you’ve heard about.”\n",
-      "\n",
-      "Tom’s arrogant eyes roamed the crowd.\n",
-      "\n",
-      "“We don’t go around very much,” he said; “in fact, I was just thinking\n",
-      "I don’t know a soul here.”\n",
-      "\n",
-      "“Perhaps you know that lady.” Gatsby indicated a gorgeous, scarcely\n",
-      "human orchid of a woman who sat in state under a white-plum tree. Tom\n",
-      "and Daisy stared, with that peculiarly unreal feeling that accompanies\n",
-      "the recognition of a hitherto ghostly celebrity of the movies.\n",
-      "\n",
-      "“She’s lovely,” said Daisy.\n",
-      "\n",
-      "“The man bending over her is her director.”\n",
-      "\n",
-      "He took them ceremoniously from group to group:\n",
-      "\n",
-      "“Mrs. Buchanan … and Mr. Buchanan—” After an instant’s hesitation he\n",
-      "added: “the polo player.”\n",
-      "\n",
-      "“Oh no,” objected Tom quickly,\n",
-      "1024\n",
-      "The\n",
-      "grey windows disappeared as the house glowed full of light.\n",
-      "\n",
-      "In the music-room Gatsby turned on a solitary lamp beside the piano.\n",
-      "He lit Daisy’s cigarette from a trembling match, and sat down with her\n",
-      "on a couch far across the room, where there was no light save what the\n",
-      "gleaming floor bounced in from the hall.\n",
-      "\n",
-      "When Klipspringer had played “The Love Nest” he turned around on the\n",
-      "bench and searched unhappily for Gatsby in the gloom.\n",
-      "\n",
-      "“I’m all out of practice, you see. I told you I couldn’t play. I’m all\n",
-      "out of prac—”\n",
-      "\n",
-      "“Don’t talk so much, old sport,” commanded Gatsby. “Play!”\n",
-      "\n",
-      " “In the morning, In the evening, Ain’t we got fun—”\n",
-      "\n",
-      "Outside the wind was loud and there was a faint flow of thunder along\n",
-      "the Sound. All the lights were going on in West Egg now; the electric\n",
-      "trains, men-carrying, were plunging home through the rain from New\n",
-      "York. It was the hour of a profound human change, and excitement was\n",
-      "generating on the air.\n",
-      "\n",
-      " “One thing’s sure and nothing’s surer The rich get richer and the\n",
-      " poor get—children. In the meantime, In between time—”\n",
-      "\n",
-      "As I went over to say goodbye I saw that the expression of\n",
-      "bewilderment had come back into Gatsby’s face, as though a faint doubt\n",
-      "had occurred to him as to the quality of his present happiness. Almost\n",
-      "five years! There must have been moments even that afternoon when\n",
-      "Daisy tumbled short of his dreams—not through her own fault, but\n",
-      "because of the colossal vitality of his illusion. It had gone beyond\n",
-      "her, beyond everything. He had thrown himself into it with a creative\n",
-      "passion, adding to it all the time, decking it out with every bright\n",
-      "feather that drifted his way. No amount of fire or freshness can\n",
-      "challenge what a man can store up in his ghostly heart.\n",
-      "\n",
-      "As I watched him he adjusted himself a little, visibly. His hand took\n",
-      "hold of hers, and as she said something low in his ear he turned\n",
-      "toward her with a rush of emotion. I think that voice held him most,\n",
-      "with its fluctuating, feverish warmth, because it couldn’t be\n",
-      "over-dreamed—that voice was a deathless song.\n",
-      "\n",
-      "They had forgotten me, but Daisy glanced up and held out her hand;\n",
-      "Gatsby didn’t know me now at all. I looked once more at them and they\n",
-      "looked back at me, remotely, possessed by intense life. Then I went\n",
-      "out of the room and down the marble steps into the rain, leaving them\n",
-      "there together.\n",
-      "\n",
-      "\n",
-      "                                  VI\n",
-      "\n",
-      "About this time an ambitious young reporter from New York arrived one\n",
-      "morning at Gatsby’s door and asked him if he had anything to say.\n",
-      "\n",
-      "“Anything to say about what?” inquired Gatsby politely.\n",
-      "\n",
-      "“Why—any statement to give out.”\n",
-      "\n",
-      "It transpired after a confused five minutes that the man had heard\n",
-      "Gatsby’s name around his office in a connection which he either\n",
-      "wouldn’t reveal or didn’t fully understand. This was his day off and\n",
-      "with laudable initiative he had hurried out “to see.”\n",
-      "\n",
-      "It was a random shot, and yet the reporter’s instinct was right.\n",
-      "Gatsby’s notoriety, spread about by the hundreds who had accepted his\n",
-      "hospitality and so become authorities upon his past, had increased all\n",
-      "summer until he fell just short of being news. Contemporary legends\n",
-      "such as the “underground pipeline to Canada” attached themselves to\n",
-      "him, and there was one persistent story that he didn’t live in a house\n",
-      "at all, but in a boat that looked like a house and was\n",
-      "1024\n",
-      "Daisy insistently. Gatsby’s eyes\n",
-      "floated toward her. “Ah,” she cried, “you look so cool.”\n",
-      "\n",
-      "Their eyes met, and they stared together at each other, alone in\n",
-      "space. With an effort she glanced down at the table.\n",
-      "\n",
-      "“You always look so cool,” she repeated.\n",
-      "\n",
-      "She had told him that she loved him, and Tom Buchanan saw. He was\n",
-      "astounded. His mouth opened a little, and he looked at Gatsby, and\n",
-      "then back at Daisy as if he had just recognized her as someone he knew\n",
-      "a long time ago.\n",
-      "\n",
-      "“You resemble the advertisement of the man,” she went on innocently.\n",
-      "“You know the advertisement of the man—”\n",
-      "\n",
-      "“All right,” broke in Tom quickly, “I’m perfectly willing to go to\n",
-      "town. Come on—we’re all going to town.”\n",
-      "\n",
-      "He got up, his eyes still flashing between Gatsby and his wife. No one\n",
-      "moved.\n",
-      "\n",
-      "“Come on!” His temper cracked a little. “What’s the matter, anyhow?\n",
-      "If we’re going to town, let’s start.”\n",
-      "\n",
-      "His hand, trembling with his effort at self-control, bore to his lips\n",
-      "the last of his glass of ale. Daisy’s voice got us to our feet and out\n",
-      "on to the blazing gravel drive.\n",
-      "\n",
-      "“Are we just going to go?” she objected. “Like this? Aren’t we going\n",
-      "to let anyone smoke a cigarette first?”\n",
-      "\n",
-      "“Everybody smoked all through lunch.”\n",
-      "\n",
-      "“Oh, let’s have fun,” she begged him. “It’s too hot to fuss.”\n",
-      "\n",
-      "He didn’t answer.\n",
-      "\n",
-      "“Have it your own way,” she said. “Come on, Jordan.”\n",
-      "\n",
-      "They went upstairs to get ready while we three men stood there\n",
-      "shuffling the hot pebbles with our feet. A silver curve of the moon\n",
-      "hovered already in the western sky. Gatsby started to speak, changed\n",
-      "his mind, but not before Tom wheeled and faced him expectantly.\n",
-      "\n",
-      "“Have you got your stables here?” asked Gatsby with an effort.\n",
-      "\n",
-      "“About a quarter of a mile down the road.”\n",
-      "\n",
-      "“Oh.”\n",
-      "\n",
-      "A pause.\n",
-      "\n",
-      "“I don’t see the idea of going to town,” broke out Tom savagely.\n",
-      "“Women get these notions in their heads—”\n",
-      "\n",
-      "“Shall we take anything to drink?” called Daisy from an upper window.\n",
-      "\n",
-      "“I’ll get some whisky,” answered Tom. He went inside.\n",
-      "\n",
-      "Gatsby turned to me rigidly:\n",
-      "\n",
-      "“I can’t say anything in his house, old sport.”\n",
-      "\n",
-      "“She’s got an indiscreet voice,” I remarked. “It’s full of—” I\n",
-      "hesitated.\n",
-      "\n",
-      "“Her voice is full of money,” he said suddenly.\n",
-      "\n",
-      "That was it. I’d never understood before. It was full of money—that\n",
-      "was the inexhaustible charm that rose and fell in it, the jingle of\n",
-      "it, the cymbals’ song of it … High in a white palace the king’s\n",
-      "daughter, the golden girl …\n",
-      "\n",
-      "Tom came out of the house wrapping a quart bottle in a towel, followed\n",
-      "by Daisy and Jordan wearing small tight hats of metallic cloth and\n",
-      "carrying light capes over their arms.\n",
-      "\n",
-      "“Shall we all go in my car?” suggested Gatsby. He felt the hot, green\n",
-      "leather of the seat. “I ought to have left it in the shade.”\n",
-      "\n",
-      "“Is it standard shift?” demanded Tom.\n",
-      "\n",
-      "“Yes.”\n",
-      "\n",
-      "“Well, you take my coupé and let me drive your car to town.”\n",
-      "\n",
-      "The suggestion was distasteful to Gatsby.\n",
-      "\n",
-      "“I\n"
-     ]
-    }
-   ],
-   "source": [
-    "for node in nodes:\n",
-    "    print(node.node.metadata[\"chunk_size\"])\n",
-    "    print(node.node.get_text())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "id": "1f26c527-17d2-4d4e-a6ee-8ea878ef8742",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# define reranker\n",
-    "from llama_index.indices.postprocessor import (\n",
-    "    LLMRerank,\n",
-    "    SentenceTransformerRerank,\n",
-    "    CohereRerank,\n",
-    ")\n",
-    "\n",
-    "# reranker = LLMRerank()\n",
-    "# reranker = SentenceTransformerRerank(top_n=10)\n",
-    "reranker = CohereRerank(top_n=10)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "id": "828589ef-d062-40dc-8a4b-245190769445",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# define RetrieverQueryEngine\n",
-    "from llama_index.query_engine import RetrieverQueryEngine\n",
-    "\n",
-    "query_engine = RetrieverQueryEngine(retriever, node_postprocessors=[reranker])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "53e3c341-e66d-4950-88d5-6411699d064b",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting retriever 0: This choice provides a moderate chunk size that could contain relevant interactions between Gatsby and Daisy without being too overwhelming..\n",
-      "Selecting retriever 1: This choice provides a larger chunk size that could contain more detailed interactions between Gatsby and Daisy..\n",
-      "Selecting retriever 2: This choice provides an even larger chunk size that could contain extensive interactions between Gatsby and Daisy, providing a more comprehensive summary..\n",
-      "Selecting retriever 3: This choice provides the largest chunk size that could contain the most detailed and comprehensive interactions between Gatsby and Daisy, but it might also include a lot of irrelevant information..\n"
-     ]
-    }
-   ],
-   "source": [
-    "response = query_engine.query(\n",
-    "    \"Describe and summarize the interactions between Gatsby and Daisy\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "9aa680dd-03a0-4a76-b456-c4ef0136fdc2",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "display_response(\n",
-    "    response, show_source=True, source_length=500, show_source_metadata=True\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "id": "0a7a8303-be94-45c5-8bc5-13ec8c7f1694",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# compute the average precision for each chunk size based on positioning in combined ranking\n",
-    "from collections import defaultdict\n",
-    "import pandas as pd\n",
-    "\n",
-    "\n",
-    "def mrr_all(metadata_values, metadata_key, source_nodes):\n",
-    "    # source nodes is a ranked list\n",
-    "    # go through each value, find out positioning in source_nodes\n",
-    "    value_to_mrr_dict = {}\n",
-    "    for metadata_value in metadata_values:\n",
-    "        mrr = 0\n",
-    "        for idx, source_node in enumerate(source_nodes):\n",
-    "            if source_node.node.metadata[metadata_key] == metadata_value:\n",
-    "                mrr = 1 / (idx + 1)\n",
-    "                break\n",
-    "            else:\n",
-    "                continue\n",
-    "\n",
-    "        # normalize AP, set in dict\n",
-    "        value_to_mrr_dict[metadata_value] = mrr\n",
-    "\n",
-    "    df = pd.DataFrame(value_to_mrr_dict, index=[\"MRR\"])\n",
-    "    df.style.set_caption(\"Mean Reciprocal Rank\")\n",
-    "    return df"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 14,
-   "id": "adebbb82-764e-4b45-933e-84bf4ad64d40",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Mean Reciprocal Rank for each Chunk Size\n"
-     ]
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "5bf1de44-4047-46cf-a04c-dbf910d9e179",
+            "metadata": {},
+            "source": [
+                "# Ensemble Query Engine Guide\n",
+                "\n",
+                "Oftentimes when building a RAG applications there are many retreival parameters/strategies to decide from (from chunk size to vector vs. keyword vs. hybrid search, for instance).\n",
+                "\n",
+                "Thought: what if we could try a bunch of strategies at once, and have any AI/reranker/LLM prune the results?\n",
+                "\n",
+                "This achieves two purposes:\n",
+                "- Better (albeit more costly) retrieved results by pooling results from multiple strategies, assuming the reranker is good\n",
+                "- A way to benchmark different retrieval strategies against each other (w.r.t reranker)\n",
+                "\n",
+                "This guide showcases this over the Great Gatsby. We do ensemble retrieval over different chunk sizes and also different indices.\n",
+                "\n",
+                "**NOTE**: A closely related guide is our [Ensemble Retrievers Guide](https://gpt-index.readthedocs.io/en/stable/examples/retrievers/ensemble_retrieval.html) - make sure to check it out! "
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "6e73fead-ec2c-4346-bd08-e183c13c7e29",
+            "metadata": {},
+            "source": [
+                "## Setup"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 18,
+            "id": "a2d59778-4cda-47b5-8cd0-b80fee91d1e4",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# NOTE: This is ONLY necessary in jupyter notebook.\n",
+                "# Details: Jupyter runs an event-loop behind the scenes.\n",
+                "#          This results in nested event-loops when we start an event-loop to make async queries.\n",
+                "#          This is normally not allowed, we use nest_asyncio to allow it for convenience.\n",
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "c628448c-573c-4eeb-a7e1-707fe8cc575c",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Note: NumExpr detected 12 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
+                        "NumExpr defaulting to 8 threads.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().handlers = []\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
+                "\n",
+                "from llama_index import (\n",
+                "    VectorStoreIndex,\n",
+                "    SummaryIndex,\n",
+                "    SimpleDirectoryReader,\n",
+                "    ServiceContext,\n",
+                "    StorageContext,\n",
+                "    SimpleKeywordTableIndex,\n",
+                ")\n",
+                "from llama_index.response.notebook_utils import display_response\n",
+                "from llama_index.llms import OpenAI"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "787174ed-10ce-47d7-82fd-9ca7f891eea7",
+            "metadata": {},
+            "source": [
+                "## Load Data\n",
+                "\n",
+                "We first show how to convert a Document into a set of Nodes, and insert into a DocumentStore."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "dd62366b-8a24-40a7-8c47-5859851149fe",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# try loading great gatsby\n",
+                "\n",
+                "documents = SimpleDirectoryReader(\n",
+                "    input_files=[\"../../../examples/gatsby/gatsby_full.txt\"]\n",
+                ").load_data()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "7081194a-ede7-478e-bff2-23e89e23ef16",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Chunk Size: 128\n",
+                        "Chunk Size: 256\n",
+                        "Chunk Size: 512\n",
+                        "Chunk Size: 1024\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# initialize service context (set chunk size)\n",
+                "llm = OpenAI(model=\"gpt-4\")\n",
+                "chunk_sizes = [128, 256, 512, 1024]\n",
+                "service_contexts = []\n",
+                "nodes_list = []\n",
+                "vector_indices = []\n",
+                "query_engines = []\n",
+                "for chunk_size in chunk_sizes:\n",
+                "    print(f\"Chunk Size: {chunk_size}\")\n",
+                "    service_context = ServiceContext.from_defaults(chunk_size=chunk_size, llm=llm)\n",
+                "    service_contexts.append(service_context)\n",
+                "    nodes = service_context.node_parser.get_nodes_from_documents(documents)\n",
+                "\n",
+                "    # add chunk size to nodes to track later\n",
+                "    for node in nodes:\n",
+                "        node.metadata[\"chunk_size\"] = chunk_size\n",
+                "        node.excluded_embed_metadata_keys = [\"chunk_size\"]\n",
+                "        node.excluded_llm_metadata_keys = [\"chunk_size\"]\n",
+                "\n",
+                "    nodes_list.append(nodes)\n",
+                "\n",
+                "    # build vector index\n",
+                "    vector_index = VectorStoreIndex(nodes)\n",
+                "    vector_indices.append(vector_index)\n",
+                "\n",
+                "    # query engines\n",
+                "    query_engines.append(vector_index.as_query_engine())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 5,
+            "id": "fbca69b4-d8d5-4dcb-af33-f9ed4a91ec05",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# try ensemble retrieval\n",
+                "\n",
+                "from llama_index.tools import RetrieverTool\n",
+                "\n",
+                "retriever_tools = []\n",
+                "for chunk_size, vector_index in zip(chunk_sizes, vector_indices):\n",
+                "    retriever_tool = RetrieverTool.from_defaults(\n",
+                "        retriever=vector_index.as_retriever(),\n",
+                "        description=f\"Retrieves relevant context from the Great Gatsby (chunk size {chunk_size})\",\n",
+                "    )\n",
+                "    retriever_tools.append(retriever_tool)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 6,
+            "id": "5c9eaa6f-8f11-4380-b3c6-79092f17def3",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.selectors.pydantic_selectors import PydanticMultiSelector\n",
+                "from llama_index.retrievers import RouterRetriever\n",
+                "\n",
+                "\n",
+                "retriever = RouterRetriever(\n",
+                "    selector=PydanticMultiSelector.from_defaults(llm=llm, max_outputs=4),\n",
+                "    retriever_tools=retriever_tools,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 7,
+            "id": "7c72c61c-d4f7-4159-bb80-1989468ab61c",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting retriever 0: This choice retrieves a moderate amount of context from the Great Gatsby, which could provide a balanced amount of detail for describing and summarizing the interactions between Gatsby and Daisy..\n",
+                        "Selecting retriever 1: This choice retrieves a larger amount of context from the Great Gatsby, which could provide more detail for describing and summarizing the interactions between Gatsby and Daisy..\n",
+                        "Selecting retriever 2: This choice retrieves an even larger amount of context from the Great Gatsby, which could provide a comprehensive summary of the interactions between Gatsby and Daisy..\n",
+                        "Selecting retriever 3: This choice retrieves the largest amount of context from the Great Gatsby, which could provide the most detailed and comprehensive summary of the interactions between Gatsby and Daisy..\n",
+                        "message='OpenAI API response' path=https://api.openai.com/v1/embeddings processing_ms=40 request_id=d269f8a582ac9a70cdb6f587a34d5877 response_code=200\n",
+                        "message='OpenAI API response' path=https://api.openai.com/v1/embeddings processing_ms=57 request_id=29679c9e6d594d1f96eb077a4049c6fa response_code=200\n",
+                        "message='OpenAI API response' path=https://api.openai.com/v1/embeddings processing_ms=36 request_id=3b3139e7ed9480ff7e7791cc860b6bcd response_code=200\n",
+                        "message='OpenAI API response' path=https://api.openai.com/v1/embeddings processing_ms=183 request_id=c0faed36112cb2554de278af9fd58f66 response_code=200\n"
+                    ]
+                }
+            ],
+            "source": [
+                "nodes = await retriever.aretrieve(\n",
+                "    \"Describe and summarize the interactions between Gatsby and Daisy\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 8,
+            "id": "590ed8bc-83ad-4851-9ec6-bfbbdf3ff38d",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "128\n",
+                        "the beach that morning. Finally we came to Gatsby’s own\n",
+                        "apartment, a bedroom and a bath, and an Adam’s study, where we sat\n",
+                        "down and drank a glass of some Chartreuse he took from a cupboard in\n",
+                        "the wall.\n",
+                        "\n",
+                        "He hadn’t once ceased looking at Daisy, and I think he revalued\n",
+                        "everything in his house according to the measure of response it drew\n",
+                        "from her well-loved eyes. Sometimes too, he stared around at his\n",
+                        "possessions in a dazed\n",
+                        "128\n",
+                        "turn out as he had\n",
+                        "imagined. He had intended, probably, to take what he could and go—but\n",
+                        "now he found that he had committed himself to the following of a\n",
+                        "grail. He knew that Daisy was extraordinary, but he didn’t realize\n",
+                        "just how extraordinary a “nice” girl could be. She vanished into her\n",
+                        "rich house, into her rich, full life, leaving Gatsby—nothing. He felt\n",
+                        "married to her, that was all.\n",
+                        "\n",
+                        "When they met again, two days later, it\n",
+                        "256\n",
+                        "the\n",
+                        "direction. In this heat every extra gesture was an affront to the\n",
+                        "common store of life.\n",
+                        "\n",
+                        "The room, shadowed well with awnings, was dark and cool. Daisy and\n",
+                        "Jordan lay upon an enormous couch, like silver idols weighing down\n",
+                        "their own white dresses against the singing breeze of the fans.\n",
+                        "\n",
+                        "“We can’t move,” they said together.\n",
+                        "\n",
+                        "Jordan’s fingers, powdered white over their tan, rested for a moment\n",
+                        "in mine.\n",
+                        "\n",
+                        "“And Mr. Thomas Buchanan, the athlete?” I inquired.\n",
+                        "\n",
+                        "Simultaneously I heard his voice, gruff, muffled, husky, at the hall\n",
+                        "telephone.\n",
+                        "\n",
+                        "Gatsby stood in the centre of the crimson carpet and gazed around with\n",
+                        "fascinated eyes. Daisy watched him and laughed, her sweet, exciting\n",
+                        "laugh; a tiny gust of powder rose from her bosom into the air.\n",
+                        "\n",
+                        "“The rumour is,” whispered Jordan, “that\n",
+                        "256\n",
+                        "In the meantime, In between time—”\n",
+                        "\n",
+                        "As I went over to say goodbye I saw that the expression of\n",
+                        "bewilderment had come back into Gatsby’s face, as though a faint doubt\n",
+                        "had occurred to him as to the quality of his present happiness. Almost\n",
+                        "five years! There must have been moments even that afternoon when\n",
+                        "Daisy tumbled short of his dreams—not through her own fault, but\n",
+                        "because of the colossal vitality of his illusion. It had gone beyond\n",
+                        "her, beyond everything. He had thrown himself into it with a creative\n",
+                        "passion, adding to it all the time, decking it out with every bright\n",
+                        "feather that drifted his way. No amount of fire or freshness can\n",
+                        "challenge what a man can store up in his ghostly heart.\n",
+                        "\n",
+                        "As I watched him he adjusted himself a little, visibly. His hand took\n",
+                        "hold of hers, and as she said something low in his ear he turned\n",
+                        "toward her with a rush of emotion. I think that voice held him most,\n",
+                        "with its fluctuating,\n",
+                        "512\n",
+                        "go downstairs,” interrupted Gatsby. He flipped a switch. The\n",
+                        "grey windows disappeared as the house glowed full of light.\n",
+                        "\n",
+                        "In the music-room Gatsby turned on a solitary lamp beside the piano.\n",
+                        "He lit Daisy’s cigarette from a trembling match, and sat down with her\n",
+                        "on a couch far across the room, where there was no light save what the\n",
+                        "gleaming floor bounced in from the hall.\n",
+                        "\n",
+                        "When Klipspringer had played “The Love Nest” he turned around on the\n",
+                        "bench and searched unhappily for Gatsby in the gloom.\n",
+                        "\n",
+                        "“I’m all out of practice, you see. I told you I couldn’t play. I’m all\n",
+                        "out of prac—”\n",
+                        "\n",
+                        "“Don’t talk so much, old sport,” commanded Gatsby. “Play!”\n",
+                        "\n",
+                        " “In the morning, In the evening, Ain’t we got fun—”\n",
+                        "\n",
+                        "Outside the wind was loud and there was a faint flow of thunder along\n",
+                        "the Sound. All the lights were going on in West Egg now; the electric\n",
+                        "trains, men-carrying, were plunging home through the rain from New\n",
+                        "York. It was the hour of a profound human change, and excitement was\n",
+                        "generating on the air.\n",
+                        "\n",
+                        " “One thing’s sure and nothing’s surer The rich get richer and the\n",
+                        " poor get—children. In the meantime, In between time—”\n",
+                        "\n",
+                        "As I went over to say goodbye I saw that the expression of\n",
+                        "bewilderment had come back into Gatsby’s face, as though a faint doubt\n",
+                        "had occurred to him as to the quality of his present happiness. Almost\n",
+                        "five years! There must have been moments even that afternoon when\n",
+                        "Daisy tumbled short of his dreams—not through her own fault, but\n",
+                        "because of the colossal vitality of his illusion. It had gone beyond\n",
+                        "her, beyond everything. He had thrown himself into it with a creative\n",
+                        "passion, adding to it all the time, decking it out\n",
+                        "512\n",
+                        "world complete\n",
+                        "in itself, with its own standards and its own great figures, second to\n",
+                        "nothing because it had no consciousness of being so, and now I was\n",
+                        "looking at it again, through Daisy’s eyes. It is invariably saddening\n",
+                        "to look through new eyes at things upon which you have expended your\n",
+                        "own powers of adjustment.\n",
+                        "\n",
+                        "They arrived at twilight, and, as we strolled out among the sparkling\n",
+                        "hundreds, Daisy’s voice was playing murmurous tricks in her throat.\n",
+                        "\n",
+                        "“These things excite me so,” she whispered. “If you want to kiss me\n",
+                        "any time during the evening, Nick, just let me know and I’ll be glad\n",
+                        "to arrange it for you. Just mention my name. Or present a green card.\n",
+                        "I’m giving out green—”\n",
+                        "\n",
+                        "“Look around,” suggested Gatsby.\n",
+                        "\n",
+                        "“I’m looking around. I’m having a marvellous—”\n",
+                        "\n",
+                        "“You must see the faces of many people you’ve heard about.”\n",
+                        "\n",
+                        "Tom’s arrogant eyes roamed the crowd.\n",
+                        "\n",
+                        "“We don’t go around very much,” he said; “in fact, I was just thinking\n",
+                        "I don’t know a soul here.”\n",
+                        "\n",
+                        "“Perhaps you know that lady.” Gatsby indicated a gorgeous, scarcely\n",
+                        "human orchid of a woman who sat in state under a white-plum tree. Tom\n",
+                        "and Daisy stared, with that peculiarly unreal feeling that accompanies\n",
+                        "the recognition of a hitherto ghostly celebrity of the movies.\n",
+                        "\n",
+                        "“She’s lovely,” said Daisy.\n",
+                        "\n",
+                        "“The man bending over her is her director.”\n",
+                        "\n",
+                        "He took them ceremoniously from group to group:\n",
+                        "\n",
+                        "“Mrs. Buchanan … and Mr. Buchanan—” After an instant’s hesitation he\n",
+                        "added: “the polo player.”\n",
+                        "\n",
+                        "“Oh no,” objected Tom quickly,\n",
+                        "1024\n",
+                        "The\n",
+                        "grey windows disappeared as the house glowed full of light.\n",
+                        "\n",
+                        "In the music-room Gatsby turned on a solitary lamp beside the piano.\n",
+                        "He lit Daisy’s cigarette from a trembling match, and sat down with her\n",
+                        "on a couch far across the room, where there was no light save what the\n",
+                        "gleaming floor bounced in from the hall.\n",
+                        "\n",
+                        "When Klipspringer had played “The Love Nest” he turned around on the\n",
+                        "bench and searched unhappily for Gatsby in the gloom.\n",
+                        "\n",
+                        "“I’m all out of practice, you see. I told you I couldn’t play. I’m all\n",
+                        "out of prac—”\n",
+                        "\n",
+                        "“Don’t talk so much, old sport,” commanded Gatsby. “Play!”\n",
+                        "\n",
+                        " “In the morning, In the evening, Ain’t we got fun—”\n",
+                        "\n",
+                        "Outside the wind was loud and there was a faint flow of thunder along\n",
+                        "the Sound. All the lights were going on in West Egg now; the electric\n",
+                        "trains, men-carrying, were plunging home through the rain from New\n",
+                        "York. It was the hour of a profound human change, and excitement was\n",
+                        "generating on the air.\n",
+                        "\n",
+                        " “One thing’s sure and nothing’s surer The rich get richer and the\n",
+                        " poor get—children. In the meantime, In between time—”\n",
+                        "\n",
+                        "As I went over to say goodbye I saw that the expression of\n",
+                        "bewilderment had come back into Gatsby’s face, as though a faint doubt\n",
+                        "had occurred to him as to the quality of his present happiness. Almost\n",
+                        "five years! There must have been moments even that afternoon when\n",
+                        "Daisy tumbled short of his dreams—not through her own fault, but\n",
+                        "because of the colossal vitality of his illusion. It had gone beyond\n",
+                        "her, beyond everything. He had thrown himself into it with a creative\n",
+                        "passion, adding to it all the time, decking it out with every bright\n",
+                        "feather that drifted his way. No amount of fire or freshness can\n",
+                        "challenge what a man can store up in his ghostly heart.\n",
+                        "\n",
+                        "As I watched him he adjusted himself a little, visibly. His hand took\n",
+                        "hold of hers, and as she said something low in his ear he turned\n",
+                        "toward her with a rush of emotion. I think that voice held him most,\n",
+                        "with its fluctuating, feverish warmth, because it couldn’t be\n",
+                        "over-dreamed—that voice was a deathless song.\n",
+                        "\n",
+                        "They had forgotten me, but Daisy glanced up and held out her hand;\n",
+                        "Gatsby didn’t know me now at all. I looked once more at them and they\n",
+                        "looked back at me, remotely, possessed by intense life. Then I went\n",
+                        "out of the room and down the marble steps into the rain, leaving them\n",
+                        "there together.\n",
+                        "\n",
+                        "\n",
+                        "                                  VI\n",
+                        "\n",
+                        "About this time an ambitious young reporter from New York arrived one\n",
+                        "morning at Gatsby’s door and asked him if he had anything to say.\n",
+                        "\n",
+                        "“Anything to say about what?” inquired Gatsby politely.\n",
+                        "\n",
+                        "“Why—any statement to give out.”\n",
+                        "\n",
+                        "It transpired after a confused five minutes that the man had heard\n",
+                        "Gatsby’s name around his office in a connection which he either\n",
+                        "wouldn’t reveal or didn’t fully understand. This was his day off and\n",
+                        "with laudable initiative he had hurried out “to see.”\n",
+                        "\n",
+                        "It was a random shot, and yet the reporter’s instinct was right.\n",
+                        "Gatsby’s notoriety, spread about by the hundreds who had accepted his\n",
+                        "hospitality and so become authorities upon his past, had increased all\n",
+                        "summer until he fell just short of being news. Contemporary legends\n",
+                        "such as the “underground pipeline to Canada” attached themselves to\n",
+                        "him, and there was one persistent story that he didn’t live in a house\n",
+                        "at all, but in a boat that looked like a house and was\n",
+                        "1024\n",
+                        "Daisy insistently. Gatsby’s eyes\n",
+                        "floated toward her. “Ah,” she cried, “you look so cool.”\n",
+                        "\n",
+                        "Their eyes met, and they stared together at each other, alone in\n",
+                        "space. With an effort she glanced down at the table.\n",
+                        "\n",
+                        "“You always look so cool,” she repeated.\n",
+                        "\n",
+                        "She had told him that she loved him, and Tom Buchanan saw. He was\n",
+                        "astounded. His mouth opened a little, and he looked at Gatsby, and\n",
+                        "then back at Daisy as if he had just recognized her as someone he knew\n",
+                        "a long time ago.\n",
+                        "\n",
+                        "“You resemble the advertisement of the man,” she went on innocently.\n",
+                        "“You know the advertisement of the man—”\n",
+                        "\n",
+                        "“All right,” broke in Tom quickly, “I’m perfectly willing to go to\n",
+                        "town. Come on—we’re all going to town.”\n",
+                        "\n",
+                        "He got up, his eyes still flashing between Gatsby and his wife. No one\n",
+                        "moved.\n",
+                        "\n",
+                        "“Come on!” His temper cracked a little. “What’s the matter, anyhow?\n",
+                        "If we’re going to town, let’s start.”\n",
+                        "\n",
+                        "His hand, trembling with his effort at self-control, bore to his lips\n",
+                        "the last of his glass of ale. Daisy’s voice got us to our feet and out\n",
+                        "on to the blazing gravel drive.\n",
+                        "\n",
+                        "“Are we just going to go?” she objected. “Like this? Aren’t we going\n",
+                        "to let anyone smoke a cigarette first?”\n",
+                        "\n",
+                        "“Everybody smoked all through lunch.”\n",
+                        "\n",
+                        "“Oh, let’s have fun,” she begged him. “It’s too hot to fuss.”\n",
+                        "\n",
+                        "He didn’t answer.\n",
+                        "\n",
+                        "“Have it your own way,” she said. “Come on, Jordan.”\n",
+                        "\n",
+                        "They went upstairs to get ready while we three men stood there\n",
+                        "shuffling the hot pebbles with our feet. A silver curve of the moon\n",
+                        "hovered already in the western sky. Gatsby started to speak, changed\n",
+                        "his mind, but not before Tom wheeled and faced him expectantly.\n",
+                        "\n",
+                        "“Have you got your stables here?” asked Gatsby with an effort.\n",
+                        "\n",
+                        "“About a quarter of a mile down the road.”\n",
+                        "\n",
+                        "“Oh.”\n",
+                        "\n",
+                        "A pause.\n",
+                        "\n",
+                        "“I don’t see the idea of going to town,” broke out Tom savagely.\n",
+                        "“Women get these notions in their heads—”\n",
+                        "\n",
+                        "“Shall we take anything to drink?” called Daisy from an upper window.\n",
+                        "\n",
+                        "“I’ll get some whisky,” answered Tom. He went inside.\n",
+                        "\n",
+                        "Gatsby turned to me rigidly:\n",
+                        "\n",
+                        "“I can’t say anything in his house, old sport.”\n",
+                        "\n",
+                        "“She’s got an indiscreet voice,” I remarked. “It’s full of—” I\n",
+                        "hesitated.\n",
+                        "\n",
+                        "“Her voice is full of money,” he said suddenly.\n",
+                        "\n",
+                        "That was it. I’d never understood before. It was full of money—that\n",
+                        "was the inexhaustible charm that rose and fell in it, the jingle of\n",
+                        "it, the cymbals’ song of it … High in a white palace the king’s\n",
+                        "daughter, the golden girl …\n",
+                        "\n",
+                        "Tom came out of the house wrapping a quart bottle in a towel, followed\n",
+                        "by Daisy and Jordan wearing small tight hats of metallic cloth and\n",
+                        "carrying light capes over their arms.\n",
+                        "\n",
+                        "“Shall we all go in my car?” suggested Gatsby. He felt the hot, green\n",
+                        "leather of the seat. “I ought to have left it in the shade.”\n",
+                        "\n",
+                        "“Is it standard shift?” demanded Tom.\n",
+                        "\n",
+                        "“Yes.”\n",
+                        "\n",
+                        "“Well, you take my coupé and let me drive your car to town.”\n",
+                        "\n",
+                        "The suggestion was distasteful to Gatsby.\n",
+                        "\n",
+                        "“I\n"
+                    ]
+                }
+            ],
+            "source": [
+                "for node in nodes:\n",
+                "    print(node.node.metadata[\"chunk_size\"])\n",
+                "    print(node.node.get_text())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 9,
+            "id": "1f26c527-17d2-4d4e-a6ee-8ea878ef8742",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# define reranker\n",
+                "from llama_index.indices.postprocessor import (\n",
+                "    LLMRerank,\n",
+                "    SentenceTransformerRerank,\n",
+                "    CohereRerank,\n",
+                ")\n",
+                "\n",
+                "# reranker = LLMRerank()\n",
+                "# reranker = SentenceTransformerRerank(top_n=10)\n",
+                "reranker = CohereRerank(top_n=10)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 10,
+            "id": "828589ef-d062-40dc-8a4b-245190769445",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# define RetrieverQueryEngine\n",
+                "from llama_index.query_engine import RetrieverQueryEngine\n",
+                "\n",
+                "query_engine = RetrieverQueryEngine(retriever, node_postprocessors=[reranker])"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "id": "53e3c341-e66d-4950-88d5-6411699d064b",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting retriever 0: This choice provides a moderate chunk size that could contain relevant interactions between Gatsby and Daisy without being too overwhelming..\n",
+                        "Selecting retriever 1: This choice provides a larger chunk size that could contain more detailed interactions between Gatsby and Daisy..\n",
+                        "Selecting retriever 2: This choice provides an even larger chunk size that could contain extensive interactions between Gatsby and Daisy, providing a more comprehensive summary..\n",
+                        "Selecting retriever 3: This choice provides the largest chunk size that could contain the most detailed and comprehensive interactions between Gatsby and Daisy, but it might also include a lot of irrelevant information..\n"
+                    ]
+                }
+            ],
+            "source": [
+                "response = query_engine.query(\n",
+                "    \"Describe and summarize the interactions between Gatsby and Daisy\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "9aa680dd-03a0-4a76-b456-c4ef0136fdc2",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "display_response(\n",
+                "    response, show_source=True, source_length=500, show_source_metadata=True\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 13,
+            "id": "0a7a8303-be94-45c5-8bc5-13ec8c7f1694",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# compute the average precision for each chunk size based on positioning in combined ranking\n",
+                "from collections import defaultdict\n",
+                "import pandas as pd\n",
+                "\n",
+                "\n",
+                "def mrr_all(metadata_values, metadata_key, source_nodes):\n",
+                "    # source nodes is a ranked list\n",
+                "    # go through each value, find out positioning in source_nodes\n",
+                "    value_to_mrr_dict = {}\n",
+                "    for metadata_value in metadata_values:\n",
+                "        mrr = 0\n",
+                "        for idx, source_node in enumerate(source_nodes):\n",
+                "            if source_node.node.metadata[metadata_key] == metadata_value:\n",
+                "                mrr = 1 / (idx + 1)\n",
+                "                break\n",
+                "            else:\n",
+                "                continue\n",
+                "\n",
+                "        # normalize AP, set in dict\n",
+                "        value_to_mrr_dict[metadata_value] = mrr\n",
+                "\n",
+                "    df = pd.DataFrame(value_to_mrr_dict, index=[\"MRR\"])\n",
+                "    df.style.set_caption(\"Mean Reciprocal Rank\")\n",
+                "    return df"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 14,
+            "id": "adebbb82-764e-4b45-933e-84bf4ad64d40",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Mean Reciprocal Rank for each Chunk Size\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>128</th>\n",
+                            "      <th>256</th>\n",
+                            "      <th>512</th>\n",
+                            "      <th>1024</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>MRR</th>\n",
+                            "      <td>0.2</td>\n",
+                            "      <td>0.166667</td>\n",
+                            "      <td>0.5</td>\n",
+                            "      <td>1.0</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "     128       256   512   1024\n",
+                            "MRR   0.2  0.166667   0.5   1.0"
+                        ]
+                    },
+                    "execution_count": 14,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "# Compute the Mean Reciprocal Rank for each chunk size (higher is better)\n",
+                "# we can see that chunk size of 256 has the highest ranked results.\n",
+                "print(\"Mean Reciprocal Rank for each Chunk Size\")\n",
+                "mrr_all(chunk_sizes, \"chunk_size\", response.source_nodes)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "b27a2f3c-55ce-4fa6-a15a-be539723a967",
+            "metadata": {},
+            "source": [
+                "## Compare Against Baseline\n",
+                "\n",
+                "Compare against a baseline of chunk size 1024 (k=2)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 15,
+            "id": "a4d66b14-4f38-4b61-809c-f603d7e09ef9",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "query_engine_1024 = query_engines[-1]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 16,
+            "id": "43f3e441-f372-4df2-ae21-71fa7968e606",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "response_1024 = query_engine_1024.query(\n",
+                "    \"Describe and summarize the interactions between Gatsby and Daisy\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "4b161e69-da17-4e4e-b8c0-b9c846ce723f",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "display_response(response_1024, show_source=True, source_length=500)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "3e7cea30-b036-4a8b-b16d-823ab6b860be",
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "llama_index_v2",
+            "language": "python",
+            "name": "llama_index_v2"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.10"
+        }
     },
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>128</th>\n",
-       "      <th>256</th>\n",
-       "      <th>512</th>\n",
-       "      <th>1024</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>MRR</th>\n",
-       "      <td>0.2</td>\n",
-       "      <td>0.166667</td>\n",
-       "      <td>0.5</td>\n",
-       "      <td>1.0</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "     128       256   512   1024\n",
-       "MRR   0.2  0.166667   0.5   1.0"
-      ]
-     },
-     "execution_count": 14,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# Compute the Mean Reciprocal Rank for each chunk size (higher is better)\n",
-    "# we can see that chunk size of 256 has the highest ranked results.\n",
-    "print(\"Mean Reciprocal Rank for each Chunk Size\")\n",
-    "mrr_all(chunk_sizes, \"chunk_size\", response.source_nodes)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "b27a2f3c-55ce-4fa6-a15a-be539723a967",
-   "metadata": {},
-   "source": [
-    "## Compare Against Baseline\n",
-    "\n",
-    "Compare against a baseline of chunk size 1024 (k=2)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "id": "a4d66b14-4f38-4b61-809c-f603d7e09ef9",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "query_engine_1024 = query_engines[-1]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "id": "43f3e441-f372-4df2-ae21-71fa7968e606",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "response_1024 = query_engine_1024.query(\n",
-    "    \"Describe and summarize the interactions between Gatsby and Daisy\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "4b161e69-da17-4e4e-b8c0-b9c846ce723f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "display_response(response_1024, show_source=True, source_length=500)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "3e7cea30-b036-4a8b-b16d-823ab6b860be",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "llama_index_v2",
-   "language": "python",
-   "name": "llama_index_v2"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.10"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/docs/examples/retrievers/router_retriever.ipynb b/docs/examples/retrievers/router_retriever.ipynb
index 928b726bc2412ef58a132ad81c83d110577f9dc3..007776bc692dfc4b7274b60c09798bc2bcb68eaa 100644
--- a/docs/examples/retrievers/router_retriever.ipynb
+++ b/docs/examples/retrievers/router_retriever.ipynb
@@ -1,881 +1,881 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "5bf1de44-4047-46cf-a04c-dbf910d9e179",
-   "metadata": {},
-   "source": [
-    "# Router Retriever\n",
-    "In this guide, we define a custom router retriever that selects one or more candidate retrievers in order to execute a given query.\n",
-    "\n",
-    "The router (`BaseSelector`) module uses the LLM to dynamically make decisions on which underlying retrieval tools to use. This can be helpful to select one out of a diverse range of data sources. This can also be helpful to aggregate retrieval results across a variety of data sources (if a multi-selector module is used).\n",
-    "\n",
-    "This notebook is very similar to the RouterQueryEngine notebook."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "6e73fead-ec2c-4346-bd08-e183c13c7e29",
-   "metadata": {},
-   "source": [
-    "### Setup"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "a2d59778-4cda-47b5-8cd0-b80fee91d1e4",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# NOTE: This is ONLY necessary in jupyter notebook.\n",
-    "# Details: Jupyter runs an event-loop behind the scenes.\n",
-    "#          This results in nested event-loops when we start an event-loop to make async queries.\n",
-    "#          This is normally not allowed, we use nest_asyncio to allow it for convenience.\n",
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "c628448c-573c-4eeb-a7e1-707fe8cc575c",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Note: NumExpr detected 12 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
-      "NumExpr defaulting to 8 threads.\n"
-     ]
-    }
-   ],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().handlers = []\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
-    "\n",
-    "from llama_index import (\n",
-    "    VectorStoreIndex,\n",
-    "    ListIndex,\n",
-    "    SimpleDirectoryReader,\n",
-    "    ServiceContext,\n",
-    "    StorageContext,\n",
-    "    SimpleKeywordTableIndex,\n",
-    ")\n",
-    "from llama_index.llms import OpenAI"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "787174ed-10ce-47d7-82fd-9ca7f891eea7",
-   "metadata": {},
-   "source": [
-    "### Load Data\n",
-    "\n",
-    "We first show how to convert a Document into a set of Nodes, and insert into a DocumentStore."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "1fc1b8ac-bf55-4d60-841c-61698663322f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# load documents\n",
-    "documents = SimpleDirectoryReader(\"../data/paul_graham\").load_data()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "7081194a-ede7-478e-bff2-23e89e23ef16",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# initialize service context (set chunk size)\n",
-    "llm = OpenAI(model=\"gpt-4\")\n",
-    "service_context = ServiceContext.from_defaults(chunk_size=1024, llm=llm)\n",
-    "nodes = service_context.node_parser.get_nodes_from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "8f61bca2-c3b4-4ef0-a8f1-367933aa6d05",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# initialize storage context (by default it's in-memory)\n",
-    "storage_context = StorageContext.from_defaults()\n",
-    "storage_context.docstore.add_documents(nodes)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "id": "c8f5c44f-11d2-47a2-a566-c6dc0fd5a1c3",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# define\n",
-    "list_index = ListIndex(nodes, storage_context=storage_context)\n",
-    "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)\n",
-    "keyword_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "0d6162df-9da7-4aad-a2ca-eb318f67daec",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "list_retriever = list_index.as_retriever()\n",
-    "vector_retriever = vector_index.as_retriever()\n",
-    "keyword_retriever = keyword_index.as_retriever()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "id": "ee3f7c3b-69b4-48d5-bf22-ac51a4e3179f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.tools import RetrieverTool\n",
-    "\n",
-    "list_tool = RetrieverTool.from_defaults(\n",
-    "    retriever=list_retriever,\n",
-    "    description=\"Will retrieve all context from Paul Graham's essay on What I Worked On. Don't use if the question only requires more specific context.\",\n",
-    ")\n",
-    "vector_tool = RetrieverTool.from_defaults(\n",
-    "    retriever=vector_retriever,\n",
-    "    description=\"Useful for retrieving specific context from Paul Graham essay on What I Worked On.\",\n",
-    ")\n",
-    "keyword_tool = RetrieverTool.from_defaults(\n",
-    "    retriever=keyword_retriever,\n",
-    "    description=\"Useful for retrieving specific context from Paul Graham essay on What I Worked On (using entities mentioned in query)\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "0bba2d68-13f9-4519-87ec-40511da7abdd",
-   "metadata": {},
-   "source": [
-    "### Define Selector Module for Routing\n",
-    "\n",
-    "There are several selectors available, each with some distinct attributes.\n",
-    "\n",
-    "The LLM selectors use the LLM to output a JSON that is parsed, and the corresponding indexes are queried.\n",
-    "\n",
-    "The Pydantic selectors (currently only supported by `gpt-4-0613` and `gpt-3.5-turbo-0613` (the default)) use the OpenAI Function Call API to produce pydantic selection objects, rather than parsing raw JSON.\n",
-    "\n",
-    "Here we use PydanticSingleSelector/PydanticMultiSelector but you can use the LLM-equivalents as well. "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "id": "6cb64a55-05b7-4565-949b-025b8d19c375",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.selectors.llm_selectors import LLMSingleSelector, LLMMultiSelector\n",
-    "from llama_index.selectors.pydantic_selectors import (\n",
-    "    PydanticMultiSelector,\n",
-    "    PydanticSingleSelector,\n",
-    ")\n",
-    "from llama_index.retrievers import RouterRetriever\n",
-    "from llama_index.response.notebook_utils import display_source_node"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "3513ca57-bef9-47d3-aa17-3cf72a6eb318",
-   "metadata": {},
-   "source": [
-    "#### PydanticSingleSelector"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "id": "8ecb1c95-0096-4036-ad32-2337d844bf68",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "retriever = RouterRetriever(\n",
-    "    selector=PydanticSingleSelector.from_defaults(llm=llm),\n",
-    "    retriever_tools=[\n",
-    "        list_tool,\n",
-    "        vector_tool,\n",
-    "    ],\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "7b8c4c12-1a30-425e-8312-04be050b2101",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting retriever 0: This choice is most relevant as it mentions retrieving all context from the essay, which could include information about the author's life..\n"
-     ]
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 7d07d325-489e-4157-a745-270e2066a643<br>**Similarity:** None<br>**Text:** What I Worked On\n",
-       "\n",
-       "February 2021\n",
-       "\n",
-       "Before college the two main things I worked on, outside of schoo...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 01f0900b-db83-450b-a088-0473f16882d7<br>**Similarity:** None<br>**Text:** showed Terry Winograd using SHRDLU. I haven't tried rereading The Moon is a Harsh Mistress, so I ...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** b2549a68-5fef-4179-b027-620ebfa6e346<br>**Similarity:** None<br>**Text:** Science is an uneasy alliance between two halves, theory and systems. The theory people prove thi...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 4f1e9f0d-9bc6-4169-b3b6-4f169bbfa391<br>**Similarity:** None<br>**Text:** been explored. But all I wanted was to get out of grad school, and my rapidly written dissertatio...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** e20c99f9-5e80-4c92-8cc0-03d2a527131e<br>**Similarity:** None<br>**Text:** stop there, of course, or you get merely photographic accuracy, and what makes a still life inter...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** dbdf341a-f340-49f9-961f-16b9a51eea2d<br>**Similarity:** None<br>**Text:** that big, bureaucratic customers are a dangerous source of money, and that there's not much overl...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** ed341d3a-9dda-49c1-8611-0ab40d04f08a<br>**Similarity:** None<br>**Text:** about money, because I could sense that Interleaf was on the way down. Freelance Lisp hacking wor...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** d69e02d3-2732-4567-a360-893c14ae157b<br>**Similarity:** None<br>**Text:** a web app, is common now, but at the time it wasn't clear that it was even possible. To find out,...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** df9e00a5-e795-40a1-9a6b-8184d1b1e7c0<br>**Similarity:** None<br>**Text:** have to integrate with any other software except Robert's and Trevor's, so it was quite fun to wo...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 38f2699b-0878-499b-90ee-821cb77e387b<br>**Similarity:** None<br>**Text:** all too keenly aware of the near-death experiences we seemed to have every few months. Nor had I ...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** be04d6a9-1fc7-4209-9df2-9c17a453699a<br>**Similarity:** None<br>**Text:** for a second still life, painted from the same objects (which hopefully hadn't rotted yet).\n",
-       "\n",
-       "Mean...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 42344911-8a7c-4e9b-81a8-0fcf40ab7690<br>**Similarity:** None<br>**Text:** which I'd created years before using Viaweb but had never used for anything. In one day it got 30...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 9ec3df49-abf9-47f4-b0c2-16687882742a<br>**Similarity:** None<br>**Text:** I didn't know but would turn out to like a lot: a woman called Jessica Livingston. A couple days ...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** d0cf6975-5261-4fb2-aae3-f3230090fb64<br>**Similarity:** None<br>**Text:** of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" B...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 607d0480-7eee-4fb4-965d-3cb585fda62c<br>**Similarity:** None<br>**Text:** to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get t...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 730a49c9-55f7-4416-ab91-1d0c96e704c8<br>**Similarity:** None<br>**Text:** So this set me thinking. It was true that on my current trajectory, YC would be the last thing I ...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** edbe8c67-e373-42bf-af98-276b559cc08b<br>**Similarity:** None<br>**Text:** operators you need? The Lisp that John McCarthy invented, or more accurately discovered, is an an...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 175a4375-35ec-45a0-a90c-15611505096b<br>**Similarity:** None<br>**Text:** Like McCarthy's original Lisp, it's a spec rather than an implementation, although like McCarthy'...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 0cb367f9-0aac-422b-9243-0eaa7be15090<br>**Similarity:** None<br>**Text:** must tell readers things they don't already know, and some people dislike being told such things....<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 67afd4f1-9fa1-4e76-87ac-23b115823e6c<br>**Similarity:** None<br>**Text:** 1960 paper.\n",
-       "\n",
-       "But if so there's no reason to suppose that this is the limit of the language that m...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "# will retrieve all context from the author's life\n",
-    "nodes = retriever.retrieve(\n",
-    "    \"Can you give me all the context regarding the author's life?\"\n",
-    ")\n",
-    "for node in nodes:\n",
-    "    display_source_node(node)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "2749c34e-97c0-4bd5-8358-377a94b8b2d8",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting retriever 1: The question asks for a specific detail from Paul Graham's essay on 'What I Worked On'. Therefore, the second choice, which is useful for retrieving specific context, is the most relevant..\n"
-     ]
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 22d20835-7de6-4cf7-92de-2bee339f3157<br>**Similarity:** 0.8017176790752668<br>**Text:** that big, bureaucratic customers are a dangerous source of money, and that there's not much overl...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** bf818c58-5d5b-4458-acbc-d87cc67a36ca<br>**Similarity:** 0.7935885352785799<br>**Text:** So this set me thinking. It was true that on my current trajectory, YC would be the last thing I ...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "nodes = retriever.retrieve(\"What did Paul Graham do after RISD?\")\n",
-    "for node in nodes:\n",
-    "    display_source_node(node)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "fae962a0-55c3-42e4-8f90-8332499952b5",
-   "metadata": {},
-   "source": [
-    "#### PydanticMultiSelector"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "d93cd132-fa4d-431f-9b02-0fc7482f097e",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "retriever = RouterRetriever(\n",
-    "    selector=PydanticMultiSelector.from_defaults(llm=llm),\n",
-    "    retriever_tools=[list_tool, vector_tool, keyword_tool],\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "id": "62b877dc-50d9-4841-9747-d902a60b767f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting retriever 1: This choice is relevant as it allows for retrieving specific context from the essay, which is needed to answer the question about notable events at Interleaf and YC..\n",
-      "Selecting retriever 2: This choice is also relevant as it allows for retrieving specific context using entities mentioned in the query, which in this case are 'Interleaf' and 'YC'..\n",
-      "> Starting query: What were noteable events from the authors time at Interleaf and YC?\n",
-      "query keywords: ['interleaf', 'events', 'noteable', 'yc']\n",
-      "> Extracted keywords: ['interleaf', 'yc']\n"
-     ]
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** fbdd25ed-1ecb-4528-88da-34f581c30782<br>**Similarity:** None<br>**Text:** So this set me thinking. It was true that on my current trajectory, YC would be the last thing I ...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 4ce91b17-131f-4155-b7b5-8917cdc612b1<br>**Similarity:** None<br>**Text:** to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get t...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 9fe6c152-28d4-4006-8a1a-43bb72655438<br>**Similarity:** None<br>**Text:** stop there, of course, or you get merely photographic accuracy, and what makes a still life inter...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** d11cd2e2-1dd2-4c3b-863f-246fe3856f49<br>**Similarity:** None<br>**Text:** of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" B...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 2bfbab04-cb71-4641-9bd9-52c75b3a9250<br>**Similarity:** None<br>**Text:** must tell readers things they don't already know, and some people dislike being told such things....<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "nodes = retriever.retrieve(\n",
-    "    \"What were noteable events from the authors time at Interleaf and YC?\"\n",
-    ")\n",
-    "for node in nodes:\n",
-    "    display_source_node(node)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "id": "af51424b-d0b1-4c07-acf3-53e398a7d783",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting retriever 1: This choice is relevant as it allows for retrieving specific context from the essay, which is needed to answer the question about notable events at Interleaf and YC..\n",
-      "Selecting retriever 2: This choice is also relevant as it allows for retrieving specific context using entities mentioned in the query, which in this case are 'Interleaf' and 'YC'..\n",
-      "> Starting query: What were noteable events from the authors time at Interleaf and YC?\n",
-      "query keywords: ['interleaf', 'yc', 'events', 'noteable']\n",
-      "> Extracted keywords: ['interleaf', 'yc']\n"
-     ]
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 49882a2c-bb95-4ff3-9df1-2a40ddaea408<br>**Similarity:** None<br>**Text:** So this set me thinking. It was true that on my current trajectory, YC would be the last thing I ...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** d11aced1-e630-4109-8ec8-194e975b9851<br>**Similarity:** None<br>**Text:** to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get t...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 8aa6cc91-8e9c-4470-b6d5-4360ed13fefd<br>**Similarity:** None<br>**Text:** stop there, of course, or you get merely photographic accuracy, and what makes a still life inter...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** e37465de-c79a-4714-a402-fbd5f52800a2<br>**Similarity:** None<br>**Text:** must tell readers things they don't already know, and some people dislike being told such things....<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** e0ac7fb6-84fc-4763-bca6-b68f300ec7b7<br>**Similarity:** None<br>**Text:** of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" B...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "nodes = retriever.retrieve(\n",
-    "    \"What were noteable events from the authors time at Interleaf and YC?\"\n",
-    ")\n",
-    "for node in nodes:\n",
-    "    display_source_node(node)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "id": "26e1398d-cc34-44d3-a8a1-fc521e3ba009",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Selecting retriever 1: This choice is relevant as it allows for retrieving specific context from the essay, which is needed to answer the question about notable events at Interleaf and YC..\n",
-      "Selecting retriever 2: This choice is also relevant as it allows for retrieving specific context using entities mentioned in the query, which in this case are 'Interleaf' and 'YC'..\n",
-      "> Starting query: What were noteable events from the authors time at Interleaf and YC?\n",
-      "query keywords: ['events', 'interleaf', 'yc', 'noteable']\n",
-      "> Extracted keywords: ['interleaf', 'yc']\n",
-      "message='OpenAI API response' path=https://api.openai.com/v1/embeddings processing_ms=25 request_id=95c73e9360e6473daab85cde93ca4c42 response_code=200\n"
-     ]
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 76d76348-52fb-49e6-95b8-2f7a3900fa1a<br>**Similarity:** None<br>**Text:** So this set me thinking. It was true that on my current trajectory, YC would be the last thing I ...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 61e1908a-79d2-426b-840e-926df469ac49<br>**Similarity:** None<br>**Text:** to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get t...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** cac03004-5c02-4145-8e92-c320b1803847<br>**Similarity:** None<br>**Text:** stop there, of course, or you get merely photographic accuracy, and what makes a still life inter...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** f0d55e5e-5349-4243-ab01-d9dd7b12cd0a<br>**Similarity:** None<br>**Text:** of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" B...<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    },
-    {
-     "data": {
-      "text/markdown": [
-       "**Node ID:** 1516923c-0dee-4af2-b042-3e1f38de7e86<br>**Similarity:** None<br>**Text:** must tell readers things they don't already know, and some people dislike being told such things....<br>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "nodes = await retriever.aretrieve(\n",
-    "    \"What were noteable events from the authors time at Interleaf and YC?\"\n",
-    ")\n",
-    "for node in nodes:\n",
-    "    display_source_node(node)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "719bdc86-7015-4350-af63-8699a1949394",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "llama_index_v2",
-   "language": "python",
-   "name": "llama_index_v2"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.10"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "5bf1de44-4047-46cf-a04c-dbf910d9e179",
+            "metadata": {},
+            "source": [
+                "# Router Retriever\n",
+                "In this guide, we define a custom router retriever that selects one or more candidate retrievers in order to execute a given query.\n",
+                "\n",
+                "The router (`BaseSelector`) module uses the LLM to dynamically make decisions on which underlying retrieval tools to use. This can be helpful to select one out of a diverse range of data sources. This can also be helpful to aggregate retrieval results across a variety of data sources (if a multi-selector module is used).\n",
+                "\n",
+                "This notebook is very similar to the RouterQueryEngine notebook."
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "6e73fead-ec2c-4346-bd08-e183c13c7e29",
+            "metadata": {},
+            "source": [
+                "### Setup"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "a2d59778-4cda-47b5-8cd0-b80fee91d1e4",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# NOTE: This is ONLY necessary in jupyter notebook.\n",
+                "# Details: Jupyter runs an event-loop behind the scenes.\n",
+                "#          This results in nested event-loops when we start an event-loop to make async queries.\n",
+                "#          This is normally not allowed, we use nest_asyncio to allow it for convenience.\n",
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "c628448c-573c-4eeb-a7e1-707fe8cc575c",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Note: NumExpr detected 12 cores but \"NUMEXPR_MAX_THREADS\" not set, so enforcing safe limit of 8.\n",
+                        "NumExpr defaulting to 8 threads.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().handlers = []\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
+                "\n",
+                "from llama_index import (\n",
+                "    VectorStoreIndex,\n",
+                "    SummaryIndex,\n",
+                "    SimpleDirectoryReader,\n",
+                "    ServiceContext,\n",
+                "    StorageContext,\n",
+                "    SimpleKeywordTableIndex,\n",
+                ")\n",
+                "from llama_index.llms import OpenAI"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "787174ed-10ce-47d7-82fd-9ca7f891eea7",
+            "metadata": {},
+            "source": [
+                "### Load Data\n",
+                "\n",
+                "We first show how to convert a Document into a set of Nodes, and insert into a DocumentStore."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "1fc1b8ac-bf55-4d60-841c-61698663322f",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# load documents\n",
+                "documents = SimpleDirectoryReader(\"../data/paul_graham\").load_data()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "7081194a-ede7-478e-bff2-23e89e23ef16",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# initialize service context (set chunk size)\n",
+                "llm = OpenAI(model=\"gpt-4\")\n",
+                "service_context = ServiceContext.from_defaults(chunk_size=1024, llm=llm)\n",
+                "nodes = service_context.node_parser.get_nodes_from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 5,
+            "id": "8f61bca2-c3b4-4ef0-a8f1-367933aa6d05",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# initialize storage context (by default it's in-memory)\n",
+                "storage_context = StorageContext.from_defaults()\n",
+                "storage_context.docstore.add_documents(nodes)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 6,
+            "id": "c8f5c44f-11d2-47a2-a566-c6dc0fd5a1c3",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# define\n",
+                "list_index = SummaryIndex(nodes, storage_context=storage_context)\n",
+                "vector_index = VectorStoreIndex(nodes, storage_context=storage_context)\n",
+                "keyword_index = SimpleKeywordTableIndex(nodes, storage_context=storage_context)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 7,
+            "id": "0d6162df-9da7-4aad-a2ca-eb318f67daec",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "list_retriever = list_index.as_retriever()\n",
+                "vector_retriever = vector_index.as_retriever()\n",
+                "keyword_retriever = keyword_index.as_retriever()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 8,
+            "id": "ee3f7c3b-69b4-48d5-bf22-ac51a4e3179f",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.tools import RetrieverTool\n",
+                "\n",
+                "list_tool = RetrieverTool.from_defaults(\n",
+                "    retriever=list_retriever,\n",
+                "    description=\"Will retrieve all context from Paul Graham's essay on What I Worked On. Don't use if the question only requires more specific context.\",\n",
+                ")\n",
+                "vector_tool = RetrieverTool.from_defaults(\n",
+                "    retriever=vector_retriever,\n",
+                "    description=\"Useful for retrieving specific context from Paul Graham essay on What I Worked On.\",\n",
+                ")\n",
+                "keyword_tool = RetrieverTool.from_defaults(\n",
+                "    retriever=keyword_retriever,\n",
+                "    description=\"Useful for retrieving specific context from Paul Graham essay on What I Worked On (using entities mentioned in query)\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "0bba2d68-13f9-4519-87ec-40511da7abdd",
+            "metadata": {},
+            "source": [
+                "### Define Selector Module for Routing\n",
+                "\n",
+                "There are several selectors available, each with some distinct attributes.\n",
+                "\n",
+                "The LLM selectors use the LLM to output a JSON that is parsed, and the corresponding indexes are queried.\n",
+                "\n",
+                "The Pydantic selectors (currently only supported by `gpt-4-0613` and `gpt-3.5-turbo-0613` (the default)) use the OpenAI Function Call API to produce pydantic selection objects, rather than parsing raw JSON.\n",
+                "\n",
+                "Here we use PydanticSingleSelector/PydanticMultiSelector but you can use the LLM-equivalents as well. "
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 9,
+            "id": "6cb64a55-05b7-4565-949b-025b8d19c375",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.selectors.llm_selectors import LLMSingleSelector, LLMMultiSelector\n",
+                "from llama_index.selectors.pydantic_selectors import (\n",
+                "    PydanticMultiSelector,\n",
+                "    PydanticSingleSelector,\n",
+                ")\n",
+                "from llama_index.retrievers import RouterRetriever\n",
+                "from llama_index.response.notebook_utils import display_source_node"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "3513ca57-bef9-47d3-aa17-3cf72a6eb318",
+            "metadata": {},
+            "source": [
+                "#### PydanticSingleSelector"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 10,
+            "id": "8ecb1c95-0096-4036-ad32-2337d844bf68",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "retriever = RouterRetriever(\n",
+                "    selector=PydanticSingleSelector.from_defaults(llm=llm),\n",
+                "    retriever_tools=[\n",
+                "        list_tool,\n",
+                "        vector_tool,\n",
+                "    ],\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "id": "7b8c4c12-1a30-425e-8312-04be050b2101",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting retriever 0: This choice is most relevant as it mentions retrieving all context from the essay, which could include information about the author's life..\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 7d07d325-489e-4157-a745-270e2066a643<br>**Similarity:** None<br>**Text:** What I Worked On\n",
+                            "\n",
+                            "February 2021\n",
+                            "\n",
+                            "Before college the two main things I worked on, outside of schoo...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 01f0900b-db83-450b-a088-0473f16882d7<br>**Similarity:** None<br>**Text:** showed Terry Winograd using SHRDLU. I haven't tried rereading The Moon is a Harsh Mistress, so I ...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** b2549a68-5fef-4179-b027-620ebfa6e346<br>**Similarity:** None<br>**Text:** Science is an uneasy alliance between two halves, theory and systems. The theory people prove thi...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 4f1e9f0d-9bc6-4169-b3b6-4f169bbfa391<br>**Similarity:** None<br>**Text:** been explored. But all I wanted was to get out of grad school, and my rapidly written dissertatio...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** e20c99f9-5e80-4c92-8cc0-03d2a527131e<br>**Similarity:** None<br>**Text:** stop there, of course, or you get merely photographic accuracy, and what makes a still life inter...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** dbdf341a-f340-49f9-961f-16b9a51eea2d<br>**Similarity:** None<br>**Text:** that big, bureaucratic customers are a dangerous source of money, and that there's not much overl...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** ed341d3a-9dda-49c1-8611-0ab40d04f08a<br>**Similarity:** None<br>**Text:** about money, because I could sense that Interleaf was on the way down. Freelance Lisp hacking wor...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** d69e02d3-2732-4567-a360-893c14ae157b<br>**Similarity:** None<br>**Text:** a web app, is common now, but at the time it wasn't clear that it was even possible. To find out,...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** df9e00a5-e795-40a1-9a6b-8184d1b1e7c0<br>**Similarity:** None<br>**Text:** have to integrate with any other software except Robert's and Trevor's, so it was quite fun to wo...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 38f2699b-0878-499b-90ee-821cb77e387b<br>**Similarity:** None<br>**Text:** all too keenly aware of the near-death experiences we seemed to have every few months. Nor had I ...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** be04d6a9-1fc7-4209-9df2-9c17a453699a<br>**Similarity:** None<br>**Text:** for a second still life, painted from the same objects (which hopefully hadn't rotted yet).\n",
+                            "\n",
+                            "Mean...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 42344911-8a7c-4e9b-81a8-0fcf40ab7690<br>**Similarity:** None<br>**Text:** which I'd created years before using Viaweb but had never used for anything. In one day it got 30...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 9ec3df49-abf9-47f4-b0c2-16687882742a<br>**Similarity:** None<br>**Text:** I didn't know but would turn out to like a lot: a woman called Jessica Livingston. A couple days ...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** d0cf6975-5261-4fb2-aae3-f3230090fb64<br>**Similarity:** None<br>**Text:** of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" B...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 607d0480-7eee-4fb4-965d-3cb585fda62c<br>**Similarity:** None<br>**Text:** to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get t...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 730a49c9-55f7-4416-ab91-1d0c96e704c8<br>**Similarity:** None<br>**Text:** So this set me thinking. It was true that on my current trajectory, YC would be the last thing I ...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** edbe8c67-e373-42bf-af98-276b559cc08b<br>**Similarity:** None<br>**Text:** operators you need? The Lisp that John McCarthy invented, or more accurately discovered, is an an...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 175a4375-35ec-45a0-a90c-15611505096b<br>**Similarity:** None<br>**Text:** Like McCarthy's original Lisp, it's a spec rather than an implementation, although like McCarthy'...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 0cb367f9-0aac-422b-9243-0eaa7be15090<br>**Similarity:** None<br>**Text:** must tell readers things they don't already know, and some people dislike being told such things....<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 67afd4f1-9fa1-4e76-87ac-23b115823e6c<br>**Similarity:** None<br>**Text:** 1960 paper.\n",
+                            "\n",
+                            "But if so there's no reason to suppose that this is the limit of the language that m...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "# will retrieve all context from the author's life\n",
+                "nodes = retriever.retrieve(\n",
+                "    \"Can you give me all the context regarding the author's life?\"\n",
+                ")\n",
+                "for node in nodes:\n",
+                "    display_source_node(node)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "id": "2749c34e-97c0-4bd5-8358-377a94b8b2d8",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting retriever 1: The question asks for a specific detail from Paul Graham's essay on 'What I Worked On'. Therefore, the second choice, which is useful for retrieving specific context, is the most relevant..\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 22d20835-7de6-4cf7-92de-2bee339f3157<br>**Similarity:** 0.8017176790752668<br>**Text:** that big, bureaucratic customers are a dangerous source of money, and that there's not much overl...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** bf818c58-5d5b-4458-acbc-d87cc67a36ca<br>**Similarity:** 0.7935885352785799<br>**Text:** So this set me thinking. It was true that on my current trajectory, YC would be the last thing I ...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "nodes = retriever.retrieve(\"What did Paul Graham do after RISD?\")\n",
+                "for node in nodes:\n",
+                "    display_source_node(node)"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "fae962a0-55c3-42e4-8f90-8332499952b5",
+            "metadata": {},
+            "source": [
+                "#### PydanticMultiSelector"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "id": "d93cd132-fa4d-431f-9b02-0fc7482f097e",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "retriever = RouterRetriever(\n",
+                "    selector=PydanticMultiSelector.from_defaults(llm=llm),\n",
+                "    retriever_tools=[list_tool, vector_tool, keyword_tool],\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 37,
+            "id": "62b877dc-50d9-4841-9747-d902a60b767f",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting retriever 1: This choice is relevant as it allows for retrieving specific context from the essay, which is needed to answer the question about notable events at Interleaf and YC..\n",
+                        "Selecting retriever 2: This choice is also relevant as it allows for retrieving specific context using entities mentioned in the query, which in this case are 'Interleaf' and 'YC'..\n",
+                        "> Starting query: What were noteable events from the authors time at Interleaf and YC?\n",
+                        "query keywords: ['interleaf', 'events', 'noteable', 'yc']\n",
+                        "> Extracted keywords: ['interleaf', 'yc']\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** fbdd25ed-1ecb-4528-88da-34f581c30782<br>**Similarity:** None<br>**Text:** So this set me thinking. It was true that on my current trajectory, YC would be the last thing I ...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 4ce91b17-131f-4155-b7b5-8917cdc612b1<br>**Similarity:** None<br>**Text:** to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get t...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 9fe6c152-28d4-4006-8a1a-43bb72655438<br>**Similarity:** None<br>**Text:** stop there, of course, or you get merely photographic accuracy, and what makes a still life inter...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** d11cd2e2-1dd2-4c3b-863f-246fe3856f49<br>**Similarity:** None<br>**Text:** of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" B...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 2bfbab04-cb71-4641-9bd9-52c75b3a9250<br>**Similarity:** None<br>**Text:** must tell readers things they don't already know, and some people dislike being told such things....<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "nodes = retriever.retrieve(\n",
+                "    \"What were noteable events from the authors time at Interleaf and YC?\"\n",
+                ")\n",
+                "for node in nodes:\n",
+                "    display_source_node(node)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 13,
+            "id": "af51424b-d0b1-4c07-acf3-53e398a7d783",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting retriever 1: This choice is relevant as it allows for retrieving specific context from the essay, which is needed to answer the question about notable events at Interleaf and YC..\n",
+                        "Selecting retriever 2: This choice is also relevant as it allows for retrieving specific context using entities mentioned in the query, which in this case are 'Interleaf' and 'YC'..\n",
+                        "> Starting query: What were noteable events from the authors time at Interleaf and YC?\n",
+                        "query keywords: ['interleaf', 'yc', 'events', 'noteable']\n",
+                        "> Extracted keywords: ['interleaf', 'yc']\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 49882a2c-bb95-4ff3-9df1-2a40ddaea408<br>**Similarity:** None<br>**Text:** So this set me thinking. It was true that on my current trajectory, YC would be the last thing I ...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** d11aced1-e630-4109-8ec8-194e975b9851<br>**Similarity:** None<br>**Text:** to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get t...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 8aa6cc91-8e9c-4470-b6d5-4360ed13fefd<br>**Similarity:** None<br>**Text:** stop there, of course, or you get merely photographic accuracy, and what makes a still life inter...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** e37465de-c79a-4714-a402-fbd5f52800a2<br>**Similarity:** None<br>**Text:** must tell readers things they don't already know, and some people dislike being told such things....<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** e0ac7fb6-84fc-4763-bca6-b68f300ec7b7<br>**Similarity:** None<br>**Text:** of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" B...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "nodes = retriever.retrieve(\n",
+                "    \"What were noteable events from the authors time at Interleaf and YC?\"\n",
+                ")\n",
+                "for node in nodes:\n",
+                "    display_source_node(node)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 12,
+            "id": "26e1398d-cc34-44d3-a8a1-fc521e3ba009",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Selecting retriever 1: This choice is relevant as it allows for retrieving specific context from the essay, which is needed to answer the question about notable events at Interleaf and YC..\n",
+                        "Selecting retriever 2: This choice is also relevant as it allows for retrieving specific context using entities mentioned in the query, which in this case are 'Interleaf' and 'YC'..\n",
+                        "> Starting query: What were noteable events from the authors time at Interleaf and YC?\n",
+                        "query keywords: ['events', 'interleaf', 'yc', 'noteable']\n",
+                        "> Extracted keywords: ['interleaf', 'yc']\n",
+                        "message='OpenAI API response' path=https://api.openai.com/v1/embeddings processing_ms=25 request_id=95c73e9360e6473daab85cde93ca4c42 response_code=200\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 76d76348-52fb-49e6-95b8-2f7a3900fa1a<br>**Similarity:** None<br>**Text:** So this set me thinking. It was true that on my current trajectory, YC would be the last thing I ...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 61e1908a-79d2-426b-840e-926df469ac49<br>**Similarity:** None<br>**Text:** to the \"YC GDP,\" but as YC grows this becomes less and less of a joke. Now lots of startups get t...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** cac03004-5c02-4145-8e92-c320b1803847<br>**Similarity:** None<br>**Text:** stop there, of course, or you get merely photographic accuracy, and what makes a still life inter...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** f0d55e5e-5349-4243-ab01-d9dd7b12cd0a<br>**Similarity:** None<br>**Text:** of readers, but professional investors are thinking \"Wow, that means they got all the returns.\" B...<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                },
+                {
+                    "data": {
+                        "text/markdown": [
+                            "**Node ID:** 1516923c-0dee-4af2-b042-3e1f38de7e86<br>**Similarity:** None<br>**Text:** must tell readers things they don't already know, and some people dislike being told such things....<br>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "nodes = await retriever.aretrieve(\n",
+                "    \"What were noteable events from the authors time at Interleaf and YC?\"\n",
+                ")\n",
+                "for node in nodes:\n",
+                "    display_source_node(node)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "719bdc86-7015-4350-af63-8699a1949394",
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "llama_index_v2",
+            "language": "python",
+            "name": "llama_index_v2"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.10"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/usecases/10k_graph_agent.ipynb b/docs/examples/usecases/10k_graph_agent.ipynb
index e08f28cf441227e4589ffcfaeed6724efb0cf717..f104130d02468266ec26302a703de71ac78dcb4d 100644
--- a/docs/examples/usecases/10k_graph_agent.ipynb
+++ b/docs/examples/usecases/10k_graph_agent.ipynb
@@ -1,618 +1,618 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "034e355d-83a0-4bd2-877e-0f493c5f713d",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "# 10K Analysis\n",
-    "In this demo, we answer a compare and contrast query with two different approaches:\n",
-    "1. structured reasoning via index graph\n",
-    "2. free form reasoning via ReAct agent"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 537,
-   "id": "298e7dfc-be9a-49bd-8793-9a1d9fa52c8d",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index import (\n",
-    "    SimpleDirectoryReader,\n",
-    "    LLMPredictor,\n",
-    "    ServiceContext,\n",
-    "    VectorStoreIndex,\n",
-    ")\n",
-    "from llama_index.response.pprint_utils import pprint_response\n",
-    "from llama_index.llms import OpenAI"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 419,
-   "id": "35364259-f1c3-4df0-b8c9-79e0afca7436",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "llm = OpenAI(temperature=0, model=\"text-davinci-003\", max_tokens=-1)\n",
-    "\n",
-    "service_context = ServiceContext.from_defaults(llm=llm)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "9be92b8e-1d3f-4ac9-b88a-10f69e3d6645",
-   "metadata": {
-    "jp-MarkdownHeadingCollapsed": true,
-    "tags": []
-   },
-   "source": [
-    "# Lyft"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 420,
-   "id": "d282214f-69d2-49d4-8ab3-e1b1560f0be6",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "lyft_docs = SimpleDirectoryReader(input_files=[\"../data/10k/lyft_2021.pdf\"]).load_data()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 240,
-   "id": "60826815-3282-4947-aa4f-ee75dcc033c3",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "lyft_index = VectorStoreIndex.from_documents(lyft_docs)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 421,
-   "id": "a0d76160-00fe-4cab-aab9-1f227db6feca",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "lyft_engine = lyft_index.as_query_engine(similarity_top_k=3)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 451,
-   "id": "c57dad40-add8-469a-aaeb-fe19900206fe",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "response = lyft_engine.query(\n",
-    "    \"what is the revenue growth in the last year, show me the reference page number\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 452,
-   "id": "fe3ee56e-4209-4e43-9f92-dc0352b1afe5",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Final Response: The revenue growth in the last year is 36%, as\n",
-      "referenced on page 63.\n",
-      "______________________________________________________________________\n",
-      "Source Node 1/3\n",
-      "Document ID: 6a1711e2-51be-46c1-8152-72361c192ed9\n",
-      "Similarity: 0.8045102189652867\n",
-      "Text: page_label: 63  Results of OperationsThe following table summar\n",
-      "izes our historical consolidated statements of operations data:Year\n",
-      "Ended December 31, 2021 2020 2019 (in thousands) Revenue $ 3,208,323 $\n",
-      "2,364,681 $ 3,615,960 Costs and expenses Cost of revenue 1,649,532\n",
-      "1,447,516 2,176,469 Operations and support 402,233 453,963 636,116\n",
-      "Research a...\n",
-      "______________________________________________________________________\n",
-      "Source Node 2/3\n",
-      "Document ID: c15baf48-6db4-4892-88ba-8be2a6f195a5\n",
-      "Similarity: 0.8030623987053549\n",
-      "Text: page_label: 19  changing industries. If our assumptions\n",
-      "regarding these risks and uncertainties, which we use to plan and\n",
-      "operate ourbusiness,  are incorrect or change, or if we do not address\n",
-      "these risks successfully, our results of operations could differ\n",
-      "materially from our expectations and our business,financial condition\n",
-      "and  results of ope...\n",
-      "______________________________________________________________________\n",
-      "Source Node 3/3\n",
-      "Document ID: 21db4c23-fb06-48bb-91c7-2f52d158a123\n",
-      "Similarity: 0.8023439081890719\n",
-      "Text: page_label: 64  second quarter of 2021. These increases were\n",
-      "offset by investments in driver supply by increasing driver incentives\n",
-      "recorded as a reduction to revenue by $942.9 million in2021 as\n",
-      "compared to the prior y ear as rider demand outpaced driver supply\n",
-      "during certain periods of the pandemic recovery in 2021. Revenue in\n",
-      "2020 was also hig...\n"
-     ]
-    }
-   ],
-   "source": [
-    "pprint_response(response, show_source=True)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "e7113434-0e41-46b6-a74e-284ce211fd38",
-   "metadata": {
-    "jp-MarkdownHeadingCollapsed": true,
-    "tags": []
-   },
-   "source": [
-    "# Uber"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 424,
-   "id": "e35842e1-b058-4e1a-8f51-4c84b42ff412",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "uber_docs = SimpleDirectoryReader(input_files=[\"../data/10k/uber_2021.pdf\"]).load_data()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 425,
-   "id": "4e6c3178-6aab-4fdc-99f6-c820661e7a73",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "uber_index = VectorStoreIndex.from_documents(uber_docs)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 429,
-   "id": "7b8e9d46-7d66-4a22-a9d8-4e2ce21491ce",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "uber_engine = uber_index.as_query_engine(similarity_top_k=3)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 430,
-   "id": "444ff6cf-2eb6-4f5c-905a-5928d0dc8947",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "response = uber_engine.query(\n",
-    "    \"what is the revenue growth in the last year, show me the reference page number\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 431,
-   "id": "71de4581-5c03-4a08-8d00-ddee08530a0f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Final Response: The revenue growth in the last year was 57%, as\n",
-      "referenced on page 57.\n",
-      "______________________________________________________________________\n",
-      "Source Node 1/3\n",
-      "Document ID: 0c8abeea-57cd-41bf-a94c-1c5d195bb3ae\n",
-      "Similarity: 0.8059365413378133\n",
-      "Text: page_label: 57  The following table sets forth the components of\n",
-      "our consolidated statements of operations for each of the periods\n",
-      "presented as a percentage of revenue : Year Ended December 31, 2020\n",
-      "2021 Revenue 100 %100 %Costs and expenses Cost of revenue, exclusive\n",
-      "of dep reciation and amortization shown separately below46 %54\n",
-      "%Operations and ...\n",
-      "______________________________________________________________________\n",
-      "Source Node 2/3\n",
-      "Document ID: 012bb7d8-36ca-42ee-8d9e-9d4754ce599f\n",
-      "Similarity: 0.7980283554264312\n",
-      "Text: page_label: 60  Provision for (Benefit from) Income TaxesYear\n",
-      "Ended December 31, 2020 to 2021 % Change (In millions, except\n",
-      "percentages) 2020 2021 Provision for (benefit fro m) income taxes$\n",
-      "(192) $ (492) (156) % Effective tax rate 2.8 %48.0 % 2021 Compared to\n",
-      "2020 Provision  for (benefit from) income taxes increased by $300\n",
-      "million primarily du...\n",
-      "______________________________________________________________________\n",
-      "Source Node 3/3\n",
-      "Document ID: 697ccb91-0b6e-4dfe-b78a-b3115a3bfbe0\n",
-      "Similarity: 0.792540481338011\n",
-      "Text: page_label: 62  All OtherFor  the  year  ended  December  31,\n",
-      "2021  compared  to  the  same  period  in  2020,  All  Other  revenue\n",
-      "decreased  $127  million,  or  94%  and  All  Otheradjusted EBITDA\n",
-      "loss improved $450 million, or 98%. All  Other revenue and All Other\n",
-      "adjusted EBITDA loss improved primarily due to the  favorable impact\n",
-      "of the s...\n"
-     ]
-    }
-   ],
-   "source": [
-    "pprint_response(response, show_source=True)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "20884bcd-8120-43ed-a137-1ddb3e14d201",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "# Compare and Contrast - Graph"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 149,
-   "id": "c67111e5-4fd3-4586-b51a-fd905486f247",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index import ComposableGraph, ListIndex"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 471,
-   "id": "13134fbb-85da-4580-871f-373f373f60df",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "graph = ComposableGraph.from_indices(\n",
-    "    ListIndex,\n",
-    "    children_indices=[lyft_index, uber_index],\n",
-    "    index_summaries=[\n",
-    "        \"Provides information about Lyft financials for year 2021\",\n",
-    "        \"Provides information about Uber financials for year 2021\",\n",
-    "    ],\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 472,
-   "id": "e01ddd97-3770-44de-a5c6-859d22af9842",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.query_engine.transform_query_engine import TransformQueryEngine\n",
-    "from llama_index.indices.query.query_transform.base import DecomposeQueryTransform\n",
-    "\n",
-    "# define decompose_transform\n",
-    "decompose_transform = DecomposeQueryTransform(verbose=True)\n",
-    "\n",
-    "# define custom query engines\n",
-    "custom_query_engines = {}\n",
-    "for index in [lyft_index, uber_index]:\n",
-    "    query_engine = index.as_query_engine(service_context=service_context)\n",
-    "    query_engine = TransformQueryEngine(\n",
-    "        query_engine,\n",
-    "        query_transform=decompose_transform,\n",
-    "        transform_extra_info={\"index_summary\": index.index_struct.summary},\n",
-    "    )\n",
-    "    custom_query_engines[index.index_id] = query_engine\n",
-    "\n",
-    "custom_query_engines[graph.root_id] = graph.root_index.as_query_engine(\n",
-    "    service_context=service_context,\n",
-    "    streaming=True,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 473,
-   "id": "63e1f7a3-d792-4103-9351-1a11fe64f6b6",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# define graph\n",
-    "g_engine = graph.as_query_engine(custom_query_engines=custom_query_engines)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 489,
-   "id": "071ba563-c90d-4984-b111-7fec6687d38d",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\u001b[33;1m\u001b[1;3m> Current query: Compare Uber and Lyft revenue growth in 2021.\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3m> New query:  What is Lyft's revenue growth in 2021?\n",
-      "\u001b[0m\u001b[33;1m\u001b[1;3m> Current query: Compare Uber and Lyft revenue growth in 2021.\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3m> New query:  What is Lyft's revenue growth in 2021?\n",
-      "\u001b[0m\u001b[33;1m\u001b[1;3m> Current query: Compare Uber and Lyft revenue growth in 2021.\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3m> New query:  What is Uber's revenue growth in 2021?\n",
-      "\u001b[0m\u001b[33;1m\u001b[1;3m> Current query: Compare Uber and Lyft revenue growth in 2021.\n",
-      "\u001b[0m\u001b[38;5;200m\u001b[1;3m> New query:  What is Uber's revenue growth in 2021?\n",
-      "\u001b[0m"
-     ]
-    }
-   ],
-   "source": [
-    "response = g_engine.query(\"Compare Uber and Lyft revenue growth in 2021.\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 490,
-   "id": "593ccb02-3cab-4f22-9f59-375dc6627090",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "Uber's revenue growth in 2021 is higher than Lyft's revenue growth in 2021, at 57% compared to 38.2%."
-     ]
-    }
-   ],
-   "source": [
-    "response.print_response_stream()"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "1d79414d-9143-4150-a6c9-f4f59852e839",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "## Compare and Contrast - Agent"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 491,
-   "id": "5ddff61c-8d8e-4686-b944-d879f59d4466",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "llm = OpenAI(temperature=0.0, max_tokens=-1)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 497,
-   "id": "78880292-cdba-4d0c-8e1a-ca9f8f99d562",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.langchain_helpers.agents import (\n",
-    "    IndexToolConfig,\n",
-    "    LlamaIndexTool,\n",
-    "    LlamaToolkit,\n",
-    ")\n",
-    "\n",
-    "uber_config = IndexToolConfig(\n",
-    "    query_engine=uber_engine,\n",
-    "    name=f\"Uber 10K 2021\",\n",
-    "    description=f\"Provides information about Lyft financials for year 2021\",\n",
-    "    tool_kwargs={\"return_direct\": False},\n",
-    ")\n",
-    "\n",
-    "lyft_config = IndexToolConfig(\n",
-    "    query_engine=lyft_engine,\n",
-    "    name=f\"Lyft 10K 2021\",\n",
-    "    description=f\"Provides information about Uber financials for year 2021\",\n",
-    "    tool_kwargs={\"return_direct\": False},\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 498,
-   "id": "07328a05-70d7-47f7-965d-58d4b3a5dbfa",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "toolkit = LlamaToolkit(\n",
-    "    index_configs=[uber_config, lyft_config],\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 499,
-   "id": "8a37d9cb-bbb6-4b91-9c76-14a8db13c90f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "from llama_index.langchain_helpers.agents import create_llama_agent\n",
-    "\n",
-    "agent_chain = create_llama_agent(toolkit, llm, memory=memory, verbose=True)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 500,
-   "id": "361ece8a-e6b8-491e-9a63-f8d3142dd743",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "\n",
-      "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n",
-      "\u001b[32;1m\u001b[1;3m I need to look at the financials of both companies.\n",
-      "Action: Uber 10K 2021\n",
-      "Action Input: Revenue growth\u001b[0m\n",
-      "Observation: \u001b[36;1m\u001b[1;3min 2021 was primarily driven by what?\n",
-      "\n",
-      "Revenue growth in 2021 was primarily driven by an increase in Delivery Gross Bookings of 71%, or 66% on a constant currency basis, due to an increase in food delivery orders and higher basket sizes as a result of stay-at-home order demand related to COVID-19, as well as continued expansion across U.S. and international markets. Additionally, Mobility Gross Bookings growth of 38%, or 36% on a constant currency basis, due to increases in Trip volumes as the business recovers from the impacts of COVID-19, contributed to the revenue growth.\u001b[0m\n",
-      "Thought:\u001b[32;1m\u001b[1;3m I need to compare this to Lyft's revenue growth.\n",
-      "Action: Lyft 10K 2021\n",
-      "Action Input: Revenue growth\u001b[0m\n",
-      "Observation: \u001b[33;1m\u001b[1;3m\n",
-      "Revenue increased $843.6 million, or 36%, in 2021 as compared to the prior year, driven primarily by the significant increase in the number of Active Riders in 2021 as compared to the prior year, as vaccines became more widely distributed and more communities reopened.\u001b[0m\n",
-      "Thought:\u001b[32;1m\u001b[1;3m I now know the final answer.\n",
-      "Final Answer: Uber's revenue growth in 2021 was primarily driven by an increase in Delivery Gross Bookings of 71%, or 66% on a constant currency basis, while Lyft's revenue increased $843.6 million, or 36%, in 2021 as compared to the prior year, driven primarily by the significant increase in the number of Active Riders in 2021 as compared to the prior year.\u001b[0m\n",
-      "\n",
-      "\u001b[1m> Finished chain.\u001b[0m\n"
-     ]
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "034e355d-83a0-4bd2-877e-0f493c5f713d",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "# 10K Analysis\n",
+                "In this demo, we answer a compare and contrast query with two different approaches:\n",
+                "1. structured reasoning via index graph\n",
+                "2. free form reasoning via ReAct agent"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 537,
+            "id": "298e7dfc-be9a-49bd-8793-9a1d9fa52c8d",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index import (\n",
+                "    SimpleDirectoryReader,\n",
+                "    LLMPredictor,\n",
+                "    ServiceContext,\n",
+                "    VectorStoreIndex,\n",
+                ")\n",
+                "from llama_index.response.pprint_utils import pprint_response\n",
+                "from llama_index.llms import OpenAI"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 419,
+            "id": "35364259-f1c3-4df0-b8c9-79e0afca7436",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "llm = OpenAI(temperature=0, model=\"text-davinci-003\", max_tokens=-1)\n",
+                "\n",
+                "service_context = ServiceContext.from_defaults(llm=llm)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "9be92b8e-1d3f-4ac9-b88a-10f69e3d6645",
+            "metadata": {
+                "jp-MarkdownHeadingCollapsed": true,
+                "tags": []
+            },
+            "source": [
+                "# Lyft"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 420,
+            "id": "d282214f-69d2-49d4-8ab3-e1b1560f0be6",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "lyft_docs = SimpleDirectoryReader(input_files=[\"../data/10k/lyft_2021.pdf\"]).load_data()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 240,
+            "id": "60826815-3282-4947-aa4f-ee75dcc033c3",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "lyft_index = VectorStoreIndex.from_documents(lyft_docs)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 421,
+            "id": "a0d76160-00fe-4cab-aab9-1f227db6feca",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "lyft_engine = lyft_index.as_query_engine(similarity_top_k=3)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 451,
+            "id": "c57dad40-add8-469a-aaeb-fe19900206fe",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "response = lyft_engine.query(\n",
+                "    \"what is the revenue growth in the last year, show me the reference page number\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 452,
+            "id": "fe3ee56e-4209-4e43-9f92-dc0352b1afe5",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Final Response: The revenue growth in the last year is 36%, as\n",
+                        "referenced on page 63.\n",
+                        "______________________________________________________________________\n",
+                        "Source Node 1/3\n",
+                        "Document ID: 6a1711e2-51be-46c1-8152-72361c192ed9\n",
+                        "Similarity: 0.8045102189652867\n",
+                        "Text: page_label: 63  Results of OperationsThe following table summar\n",
+                        "izes our historical consolidated statements of operations data:Year\n",
+                        "Ended December 31, 2021 2020 2019 (in thousands) Revenue $ 3,208,323 $\n",
+                        "2,364,681 $ 3,615,960 Costs and expenses Cost of revenue 1,649,532\n",
+                        "1,447,516 2,176,469 Operations and support 402,233 453,963 636,116\n",
+                        "Research a...\n",
+                        "______________________________________________________________________\n",
+                        "Source Node 2/3\n",
+                        "Document ID: c15baf48-6db4-4892-88ba-8be2a6f195a5\n",
+                        "Similarity: 0.8030623987053549\n",
+                        "Text: page_label: 19  changing industries. If our assumptions\n",
+                        "regarding these risks and uncertainties, which we use to plan and\n",
+                        "operate ourbusiness,  are incorrect or change, or if we do not address\n",
+                        "these risks successfully, our results of operations could differ\n",
+                        "materially from our expectations and our business,financial condition\n",
+                        "and  results of ope...\n",
+                        "______________________________________________________________________\n",
+                        "Source Node 3/3\n",
+                        "Document ID: 21db4c23-fb06-48bb-91c7-2f52d158a123\n",
+                        "Similarity: 0.8023439081890719\n",
+                        "Text: page_label: 64  second quarter of 2021. These increases were\n",
+                        "offset by investments in driver supply by increasing driver incentives\n",
+                        "recorded as a reduction to revenue by $942.9 million in2021 as\n",
+                        "compared to the prior y ear as rider demand outpaced driver supply\n",
+                        "during certain periods of the pandemic recovery in 2021. Revenue in\n",
+                        "2020 was also hig...\n"
+                    ]
+                }
+            ],
+            "source": [
+                "pprint_response(response, show_source=True)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "e7113434-0e41-46b6-a74e-284ce211fd38",
+            "metadata": {
+                "jp-MarkdownHeadingCollapsed": true,
+                "tags": []
+            },
+            "source": [
+                "# Uber"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 424,
+            "id": "e35842e1-b058-4e1a-8f51-4c84b42ff412",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "uber_docs = SimpleDirectoryReader(input_files=[\"../data/10k/uber_2021.pdf\"]).load_data()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 425,
+            "id": "4e6c3178-6aab-4fdc-99f6-c820661e7a73",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "uber_index = VectorStoreIndex.from_documents(uber_docs)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 429,
+            "id": "7b8e9d46-7d66-4a22-a9d8-4e2ce21491ce",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "uber_engine = uber_index.as_query_engine(similarity_top_k=3)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 430,
+            "id": "444ff6cf-2eb6-4f5c-905a-5928d0dc8947",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "response = uber_engine.query(\n",
+                "    \"what is the revenue growth in the last year, show me the reference page number\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 431,
+            "id": "71de4581-5c03-4a08-8d00-ddee08530a0f",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Final Response: The revenue growth in the last year was 57%, as\n",
+                        "referenced on page 57.\n",
+                        "______________________________________________________________________\n",
+                        "Source Node 1/3\n",
+                        "Document ID: 0c8abeea-57cd-41bf-a94c-1c5d195bb3ae\n",
+                        "Similarity: 0.8059365413378133\n",
+                        "Text: page_label: 57  The following table sets forth the components of\n",
+                        "our consolidated statements of operations for each of the periods\n",
+                        "presented as a percentage of revenue : Year Ended December 31, 2020\n",
+                        "2021 Revenue 100 %100 %Costs and expenses Cost of revenue, exclusive\n",
+                        "of dep reciation and amortization shown separately below46 %54\n",
+                        "%Operations and ...\n",
+                        "______________________________________________________________________\n",
+                        "Source Node 2/3\n",
+                        "Document ID: 012bb7d8-36ca-42ee-8d9e-9d4754ce599f\n",
+                        "Similarity: 0.7980283554264312\n",
+                        "Text: page_label: 60  Provision for (Benefit from) Income TaxesYear\n",
+                        "Ended December 31, 2020 to 2021 % Change (In millions, except\n",
+                        "percentages) 2020 2021 Provision for (benefit fro m) income taxes$\n",
+                        "(192) $ (492) (156) % Effective tax rate 2.8 %48.0 % 2021 Compared to\n",
+                        "2020 Provision  for (benefit from) income taxes increased by $300\n",
+                        "million primarily du...\n",
+                        "______________________________________________________________________\n",
+                        "Source Node 3/3\n",
+                        "Document ID: 697ccb91-0b6e-4dfe-b78a-b3115a3bfbe0\n",
+                        "Similarity: 0.792540481338011\n",
+                        "Text: page_label: 62  All OtherFor  the  year  ended  December  31,\n",
+                        "2021  compared  to  the  same  period  in  2020,  All  Other  revenue\n",
+                        "decreased  $127  million,  or  94%  and  All  Otheradjusted EBITDA\n",
+                        "loss improved $450 million, or 98%. All  Other revenue and All Other\n",
+                        "adjusted EBITDA loss improved primarily due to the  favorable impact\n",
+                        "of the s...\n"
+                    ]
+                }
+            ],
+            "source": [
+                "pprint_response(response, show_source=True)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "20884bcd-8120-43ed-a137-1ddb3e14d201",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "# Compare and Contrast - Graph"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 149,
+            "id": "c67111e5-4fd3-4586-b51a-fd905486f247",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index import ComposableGraph, SummaryIndex"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 471,
+            "id": "13134fbb-85da-4580-871f-373f373f60df",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "graph = ComposableGraph.from_indices(\n",
+                "    SummaryIndex,\n",
+                "    children_indices=[lyft_index, uber_index],\n",
+                "    index_summaries=[\n",
+                "        \"Provides information about Lyft financials for year 2021\",\n",
+                "        \"Provides information about Uber financials for year 2021\",\n",
+                "    ],\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 472,
+            "id": "e01ddd97-3770-44de-a5c6-859d22af9842",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.query_engine.transform_query_engine import TransformQueryEngine\n",
+                "from llama_index.indices.query.query_transform.base import DecomposeQueryTransform\n",
+                "\n",
+                "# define decompose_transform\n",
+                "decompose_transform = DecomposeQueryTransform(verbose=True)\n",
+                "\n",
+                "# define custom query engines\n",
+                "custom_query_engines = {}\n",
+                "for index in [lyft_index, uber_index]:\n",
+                "    query_engine = index.as_query_engine(service_context=service_context)\n",
+                "    query_engine = TransformQueryEngine(\n",
+                "        query_engine,\n",
+                "        query_transform=decompose_transform,\n",
+                "        transform_extra_info={\"index_summary\": index.index_struct.summary},\n",
+                "    )\n",
+                "    custom_query_engines[index.index_id] = query_engine\n",
+                "\n",
+                "custom_query_engines[graph.root_id] = graph.root_index.as_query_engine(\n",
+                "    service_context=service_context,\n",
+                "    streaming=True,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 473,
+            "id": "63e1f7a3-d792-4103-9351-1a11fe64f6b6",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# define graph\n",
+                "g_engine = graph.as_query_engine(custom_query_engines=custom_query_engines)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 489,
+            "id": "071ba563-c90d-4984-b111-7fec6687d38d",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\u001b[33;1m\u001b[1;3m> Current query: Compare Uber and Lyft revenue growth in 2021.\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3m> New query:  What is Lyft's revenue growth in 2021?\n",
+                        "\u001b[0m\u001b[33;1m\u001b[1;3m> Current query: Compare Uber and Lyft revenue growth in 2021.\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3m> New query:  What is Lyft's revenue growth in 2021?\n",
+                        "\u001b[0m\u001b[33;1m\u001b[1;3m> Current query: Compare Uber and Lyft revenue growth in 2021.\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3m> New query:  What is Uber's revenue growth in 2021?\n",
+                        "\u001b[0m\u001b[33;1m\u001b[1;3m> Current query: Compare Uber and Lyft revenue growth in 2021.\n",
+                        "\u001b[0m\u001b[38;5;200m\u001b[1;3m> New query:  What is Uber's revenue growth in 2021?\n",
+                        "\u001b[0m"
+                    ]
+                }
+            ],
+            "source": [
+                "response = g_engine.query(\"Compare Uber and Lyft revenue growth in 2021.\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 490,
+            "id": "593ccb02-3cab-4f22-9f59-375dc6627090",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "Uber's revenue growth in 2021 is higher than Lyft's revenue growth in 2021, at 57% compared to 38.2%."
+                    ]
+                }
+            ],
+            "source": [
+                "response.print_response_stream()"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "1d79414d-9143-4150-a6c9-f4f59852e839",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "## Compare and Contrast - Agent"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 491,
+            "id": "5ddff61c-8d8e-4686-b944-d879f59d4466",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "llm = OpenAI(temperature=0.0, max_tokens=-1)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 497,
+            "id": "78880292-cdba-4d0c-8e1a-ca9f8f99d562",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.langchain_helpers.agents import (\n",
+                "    IndexToolConfig,\n",
+                "    LlamaIndexTool,\n",
+                "    LlamaToolkit,\n",
+                ")\n",
+                "\n",
+                "uber_config = IndexToolConfig(\n",
+                "    query_engine=uber_engine,\n",
+                "    name=f\"Uber 10K 2021\",\n",
+                "    description=f\"Provides information about Lyft financials for year 2021\",\n",
+                "    tool_kwargs={\"return_direct\": False},\n",
+                ")\n",
+                "\n",
+                "lyft_config = IndexToolConfig(\n",
+                "    query_engine=lyft_engine,\n",
+                "    name=f\"Lyft 10K 2021\",\n",
+                "    description=f\"Provides information about Uber financials for year 2021\",\n",
+                "    tool_kwargs={\"return_direct\": False},\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 498,
+            "id": "07328a05-70d7-47f7-965d-58d4b3a5dbfa",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "toolkit = LlamaToolkit(\n",
+                "    index_configs=[uber_config, lyft_config],\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 499,
+            "id": "8a37d9cb-bbb6-4b91-9c76-14a8db13c90f",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "from llama_index.langchain_helpers.agents import create_llama_agent\n",
+                "\n",
+                "agent_chain = create_llama_agent(toolkit, llm, memory=memory, verbose=True)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 500,
+            "id": "361ece8a-e6b8-491e-9a63-f8d3142dd743",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "\n",
+                        "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n",
+                        "\u001b[32;1m\u001b[1;3m I need to look at the financials of both companies.\n",
+                        "Action: Uber 10K 2021\n",
+                        "Action Input: Revenue growth\u001b[0m\n",
+                        "Observation: \u001b[36;1m\u001b[1;3min 2021 was primarily driven by what?\n",
+                        "\n",
+                        "Revenue growth in 2021 was primarily driven by an increase in Delivery Gross Bookings of 71%, or 66% on a constant currency basis, due to an increase in food delivery orders and higher basket sizes as a result of stay-at-home order demand related to COVID-19, as well as continued expansion across U.S. and international markets. Additionally, Mobility Gross Bookings growth of 38%, or 36% on a constant currency basis, due to increases in Trip volumes as the business recovers from the impacts of COVID-19, contributed to the revenue growth.\u001b[0m\n",
+                        "Thought:\u001b[32;1m\u001b[1;3m I need to compare this to Lyft's revenue growth.\n",
+                        "Action: Lyft 10K 2021\n",
+                        "Action Input: Revenue growth\u001b[0m\n",
+                        "Observation: \u001b[33;1m\u001b[1;3m\n",
+                        "Revenue increased $843.6 million, or 36%, in 2021 as compared to the prior year, driven primarily by the significant increase in the number of Active Riders in 2021 as compared to the prior year, as vaccines became more widely distributed and more communities reopened.\u001b[0m\n",
+                        "Thought:\u001b[32;1m\u001b[1;3m I now know the final answer.\n",
+                        "Final Answer: Uber's revenue growth in 2021 was primarily driven by an increase in Delivery Gross Bookings of 71%, or 66% on a constant currency basis, while Lyft's revenue increased $843.6 million, or 36%, in 2021 as compared to the prior year, driven primarily by the significant increase in the number of Active Riders in 2021 as compared to the prior year.\u001b[0m\n",
+                        "\n",
+                        "\u001b[1m> Finished chain.\u001b[0m\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "\"Uber's revenue growth in 2021 was primarily driven by an increase in Delivery Gross Bookings of 71%, or 66% on a constant currency basis, while Lyft's revenue increased $843.6 million, or 36%, in 2021 as compared to the prior year, driven primarily by the significant increase in the number of Active Riders in 2021 as compared to the prior year.\""
+                        ]
+                    },
+                    "execution_count": 500,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "agent_chain.run(input=\"Compare Uber and Lyft revenue growth in 2021.\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 534,
+            "id": "822515c6-8266-4d55-bafe-f5262103b68a",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "\n",
+                        "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n",
+                        "\u001b[32;1m\u001b[1;3m I need to look at the financials of both Uber and Lyft\n",
+                        "Action: Uber 10K 2021\n",
+                        "Action Input: Customer segment and geography information\u001b[0m\n",
+                        "Observation: \u001b[36;1m\u001b[1;3m\n",
+                        "The information provided in the context does not include customer segment and geography information. It does, however, provide information about our reportable segments and a reconciliation of the total segment adjusted EBITDA to loss from operations, as well as information about revenue and long-lived assets, net by geographic area.\u001b[0m\n",
+                        "Thought:\u001b[32;1m\u001b[1;3m I need to look at the financials of Lyft\n",
+                        "Action: Lyft 10K 2021\n",
+                        "Action Input: Customer segment and geography information\u001b[0m\n",
+                        "Observation: \u001b[33;1m\u001b[1;3m\n",
+                        "The Company's customer base is diverse, and its employees reflect that diversity. As of December 31, 2021, the ethnicity of its U.S. employees was 44% White, 30% Asian, 11% Hispanic or Latinx, 8% Black, and 5% two or more races, American Indian, Alaska Native, Native Hawaiian or other Pacific Islander. The Company generates its revenue from its multimodal transportation networks that offer access to a variety of transportation options through the Lyft Platform and mobile-based applications, primarily in the United States.\u001b[0m\n",
+                        "Thought:\u001b[32;1m\u001b[1;3m I now know the final answer\n",
+                        "Final Answer: Uber and Lyft both have diverse customer bases and generate revenue from their multimodal transportation networks in the United States. Uber does not provide customer segment and geography information, while Lyft provides information about its U.S. employees' ethnicity and the geographic area in which it generates revenue.\u001b[0m\n",
+                        "\n",
+                        "\u001b[1m> Finished chain.\u001b[0m\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "\"Uber and Lyft both have diverse customer bases and generate revenue from their multimodal transportation networks in the United States. Uber does not provide customer segment and geography information, while Lyft provides information about its U.S. employees' ethnicity and the geographic area in which it generates revenue.\""
+                        ]
+                    },
+                    "execution_count": 534,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "agent_chain.run(\n",
+                "    input=\"Compare and contrast the customer segments and geographies that grew the fastest\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "fce81d02-e07b-467a-9682-a128d508c426",
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.9.16"
+        }
     },
-    {
-     "data": {
-      "text/plain": [
-       "\"Uber's revenue growth in 2021 was primarily driven by an increase in Delivery Gross Bookings of 71%, or 66% on a constant currency basis, while Lyft's revenue increased $843.6 million, or 36%, in 2021 as compared to the prior year, driven primarily by the significant increase in the number of Active Riders in 2021 as compared to the prior year.\""
-      ]
-     },
-     "execution_count": 500,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "agent_chain.run(input=\"Compare Uber and Lyft revenue growth in 2021.\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 534,
-   "id": "822515c6-8266-4d55-bafe-f5262103b68a",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "\n",
-      "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n",
-      "\u001b[32;1m\u001b[1;3m I need to look at the financials of both Uber and Lyft\n",
-      "Action: Uber 10K 2021\n",
-      "Action Input: Customer segment and geography information\u001b[0m\n",
-      "Observation: \u001b[36;1m\u001b[1;3m\n",
-      "The information provided in the context does not include customer segment and geography information. It does, however, provide information about our reportable segments and a reconciliation of the total segment adjusted EBITDA to loss from operations, as well as information about revenue and long-lived assets, net by geographic area.\u001b[0m\n",
-      "Thought:\u001b[32;1m\u001b[1;3m I need to look at the financials of Lyft\n",
-      "Action: Lyft 10K 2021\n",
-      "Action Input: Customer segment and geography information\u001b[0m\n",
-      "Observation: \u001b[33;1m\u001b[1;3m\n",
-      "The Company's customer base is diverse, and its employees reflect that diversity. As of December 31, 2021, the ethnicity of its U.S. employees was 44% White, 30% Asian, 11% Hispanic or Latinx, 8% Black, and 5% two or more races, American Indian, Alaska Native, Native Hawaiian or other Pacific Islander. The Company generates its revenue from its multimodal transportation networks that offer access to a variety of transportation options through the Lyft Platform and mobile-based applications, primarily in the United States.\u001b[0m\n",
-      "Thought:\u001b[32;1m\u001b[1;3m I now know the final answer\n",
-      "Final Answer: Uber and Lyft both have diverse customer bases and generate revenue from their multimodal transportation networks in the United States. Uber does not provide customer segment and geography information, while Lyft provides information about its U.S. employees' ethnicity and the geographic area in which it generates revenue.\u001b[0m\n",
-      "\n",
-      "\u001b[1m> Finished chain.\u001b[0m\n"
-     ]
-    },
-    {
-     "data": {
-      "text/plain": [
-       "\"Uber and Lyft both have diverse customer bases and generate revenue from their multimodal transportation networks in the United States. Uber does not provide customer segment and geography information, while Lyft provides information about its U.S. employees' ethnicity and the geographic area in which it generates revenue.\""
-      ]
-     },
-     "execution_count": 534,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "agent_chain.run(\n",
-    "    input=\"Compare and contrast the customer segments and geographies that grew the fastest\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "fce81d02-e07b-467a-9682-a128d508c426",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.9.16"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/docs/examples/usecases/10q_fn_agent-react-compare.ipynb b/docs/examples/usecases/10q_fn_agent-react-compare.ipynb
index f193edb1a44b7be098409f6c800a5eeea50e4eed..0db3124f7d0da47777ff00978f2878d8ca33c470 100644
--- a/docs/examples/usecases/10q_fn_agent-react-compare.ipynb
+++ b/docs/examples/usecases/10q_fn_agent-react-compare.ipynb
@@ -867,7 +867,7 @@
    },
    "outputs": [],
    "source": [
-    "from llama_index import ComposableGraph, GPTListIndex"
+    "from llama_index import ComposableGraph, SummaryIndex"
    ]
   },
   {
@@ -880,7 +880,7 @@
    "outputs": [],
    "source": [
     "graph = ComposableGraph.from_indices(\n",
-    "    GPTListIndex,\n",
+    "    SummaryIndex,\n",
     "    children_indices=[march_index, june_index, sept_index],\n",
     "    index_summaries=[\n",
     "        \"Provides information about Uber quarterly financials ending March 2022\",\n",
diff --git a/docs/examples/usecases/City_Analysis-Decompose-KeywordTable.ipynb b/docs/examples/usecases/City_Analysis-Decompose-KeywordTable.ipynb
index df495571dfeecf8c1796e324b0b940f5ed3ff499..b860492e756ceb72c8f4b686edc1ba09474189c6 100644
--- a/docs/examples/usecases/City_Analysis-Decompose-KeywordTable.ipynb
+++ b/docs/examples/usecases/City_Analysis-Decompose-KeywordTable.ipynb
@@ -59,7 +59,7 @@
     "from llama_index import (\n",
     "    VectorStoreIndex,\n",
     "    SimpleKeywordTableIndex,\n",
-    "    ListIndex,\n",
+    "    SummaryIndex,\n",
     "    SimpleDirectoryReader,\n",
     "    LLMPredictor,\n",
     "    ServiceContext,\n",
diff --git a/examples/async/AsyncComposableIndicesSEC.ipynb b/examples/async/AsyncComposableIndicesSEC.ipynb
index 96b03f40ecf7eb0d99163d147d7ac19992d79b1f..6f646b5b277300a2e19732dd3dd2138fc5f8cbe1 100644
--- a/examples/async/AsyncComposableIndicesSEC.ipynb
+++ b/examples/async/AsyncComposableIndicesSEC.ipynb
@@ -176,7 +176,7 @@
    },
    "outputs": [],
    "source": [
-    "from llama_index import ListIndex, LLMPredictor\n",
+    "from llama_index import SummaryIndex, LLMPredictor\n",
     "from llama_index.llms import OpenAI\n",
     "from llama_index.composability import ComposableGraph"
    ]
@@ -223,7 +223,7 @@
     "# define a list index over the vector indices\n",
     "# allows us to synthesize information across each index\n",
     "graph = ComposableGraph.from_indices(\n",
-    "    ListIndex,\n",
+    "    SummaryIndex,\n",
     "    children_indices=[index_set[y] for y in years],\n",
     "    index_summaries=index_summaries,\n",
     "    service_context=service_context,\n",
diff --git a/examples/async/AsyncQueryDemo.ipynb b/examples/async/AsyncQueryDemo.ipynb
index b4d5081b47eb22b1f819e445d8564e3050570a69..5c30a226c9ea085cbbbd967f5634ecf5a69d34ac 100644
--- a/examples/async/AsyncQueryDemo.ipynb
+++ b/examples/async/AsyncQueryDemo.ipynb
@@ -36,7 +36,7 @@
    "outputs": [],
    "source": [
     "import time\n",
-    "from llama_index import ListIndex, SimpleDirectoryReader"
+    "from llama_index import SummaryIndex, SimpleDirectoryReader"
    ]
   },
   {
@@ -73,7 +73,7 @@
    },
    "outputs": [],
    "source": [
-    "index = ListIndex.from_documents(documents)"
+    "index = SummaryIndex.from_documents(documents)"
    ]
   },
   {
diff --git a/examples/chatbot/Chatbot_SEC.ipynb b/examples/chatbot/Chatbot_SEC.ipynb
index 2c30694175c3dca3ceb9f18bbdc7bf8e442e769d..39c6dc3c123c5d9014ea01a59a153156a6af9db1 100644
--- a/examples/chatbot/Chatbot_SEC.ipynb
+++ b/examples/chatbot/Chatbot_SEC.ipynb
@@ -203,7 +203,7 @@
    },
    "outputs": [],
    "source": [
-    "from llama_index import ListIndex, LLMPredictor\n",
+    "from llama_index import SummaryIndex, LLMPredictor\n",
     "from llama_index.llms import OpenAI\n",
     "from llama_index.indices.composability import ComposableGraph"
    ]
@@ -248,7 +248,7 @@
     "# define a list index over the vector indices\n",
     "# allows us to synthesize information across each index\n",
     "graph = ComposableGraph.from_indices(\n",
-    "    ListIndex,\n",
+    "    SummaryIndex,\n",
     "    [index_set[y] for y in years],\n",
     "    index_summaries=index_summaries,\n",
     "    service_context=service_context,\n",
diff --git a/examples/chatgpt_plugin/ChatGPTRetrievalPluginReaderDemo.ipynb b/examples/chatgpt_plugin/ChatGPTRetrievalPluginReaderDemo.ipynb
index 5a937c7ad04055e0d457ac606f2bc588cdfe6796..090a5945ba1bed779cf7e258ca17a7fbb386578f 100644
--- a/examples/chatgpt_plugin/ChatGPTRetrievalPluginReaderDemo.ipynb
+++ b/examples/chatgpt_plugin/ChatGPTRetrievalPluginReaderDemo.ipynb
@@ -116,7 +116,7 @@
    },
    "outputs": [],
    "source": [
-    "from llama_index import ListIndex"
+    "from llama_index import SummaryIndex"
    ]
   },
   {
@@ -139,7 +139,7 @@
     }
    ],
    "source": [
-    "index = ListIndex(documents)"
+    "index = SummaryIndex(documents)"
    ]
   },
   {
@@ -245,4 +245,4 @@
  },
  "nbformat": 4,
  "nbformat_minor": 5
-}
\ No newline at end of file
+}
diff --git a/examples/langchain_demo/LangchainDemo.ipynb b/examples/langchain_demo/LangchainDemo.ipynb
index b54e16f5465e37b3dcb455a1c508605b9c9c3813..3f0dee3cab6b94e909a4392da0718c472997960b 100644
--- a/examples/langchain_demo/LangchainDemo.ipynb
+++ b/examples/langchain_demo/LangchainDemo.ipynb
@@ -192,7 +192,7 @@
     "from langchain.llms import OpenAIChat\n",
     "from langchain.agents import initialize_agent\n",
     "\n",
-    "from llama_index import ListIndex\n",
+    "from llama_index import SummaryIndex\n",
     "from llama_index.langchain_helpers.memory_wrapper import GPTIndexChatMemory"
    ]
   },
@@ -212,7 +212,7 @@
     }
    ],
    "source": [
-    "index = ListIndex([])"
+    "index = SummaryIndex([])"
    ]
   },
   {
@@ -320,7 +320,7 @@
     }
    ],
    "source": [
-    "# NOTE: the query now calls the ListIndex memory module.\n",
+    "# NOTE: the query now calls the SummaryIndex memory module.\n",
     "agent_executor.run(input=\"what's my name?\")"
    ]
   },
diff --git a/examples/paul_graham_essay/GPT4Comparison.ipynb b/examples/paul_graham_essay/GPT4Comparison.ipynb
index 475db06e5a53074180a2312784053e1ce1ae864c..f819898115267b1c867f41b82f65ee0a6869ccef 100644
--- a/examples/paul_graham_essay/GPT4Comparison.ipynb
+++ b/examples/paul_graham_essay/GPT4Comparison.ipynb
@@ -7,7 +7,12 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "from llama_index import ListIndex, SimpleDirectoryReader, LLMPredictor, ServiceContext\n",
+    "from llama_index import (\n",
+    "    SummaryIndex,\n",
+    "    SimpleDirectoryReader,\n",
+    "    LLMPredictor,\n",
+    "    ServiceContext,\n",
+    ")\n",
     "from llama_index.response.notebook_utils import display_response\n",
     "from llama_index.llms import OpenAI\n",
     "from IPython.display import Markdown, display"
@@ -59,7 +64,7 @@
     }
    ],
    "source": [
-    "davinci_index = ListIndex.from_documents(documents, service_context=service_context)"
+    "davinci_index = SummaryIndex.from_documents(documents, service_context=service_context)"
    ]
   },
   {
@@ -391,7 +396,7 @@
     }
    ],
    "source": [
-    "gpt4_index = ListIndex.from_documents(documents, service_context=service_context)"
+    "gpt4_index = SummaryIndex.from_documents(documents, service_context=service_context)"
    ]
   },
   {
diff --git a/examples/paul_graham_essay/InsertDemo.ipynb b/examples/paul_graham_essay/InsertDemo.ipynb
index 76ce80d7906403d6210f5fe9117eba6f709930ea..a3cfe058427067b7bff2ee94ab419cd0ba096b63 100644
--- a/examples/paul_graham_essay/InsertDemo.ipynb
+++ b/examples/paul_graham_essay/InsertDemo.ipynb
@@ -1,319 +1,319 @@
 {
- "cells": [
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "46e5110c-ed35-463e-a9f6-cff9cda6221b",
-   "metadata": {},
-   "source": [
-    "This notebook showcases the insert capabilities of different LlamaIndex data structures.\n",
-    "\n",
-    "To see how to build the index during initialization, see `TestEssay.ipynb` instead."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "6ef7a7a6-dc10-4d94-8bdd-22b4954d365a",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# My OpenAI Key\n",
-    "import os\n",
-    "\n",
-    "os.environ[\"OPENAI_API_KEY\"] = \"INSERT OPENAI KEY\""
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "708da88f-d1b0-41d2-ad71-773300bf3ec5",
-   "metadata": {},
-   "source": [
-    "## GPT List Insert"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "5ff6b625-fb53-4885-8fbb-0fa8dcf6ef57",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "#### Data Prep\n",
-    "Chunk up the data into sub documents that we can insert"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a191e411-f07f-4895-9943-6a8b030abd66",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.text_splitter import TokenTextSplitter\n",
-    "from llama_index import SimpleDirectoryReader, Document"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "id": "0a23c1a8-71ea-4b6d-ae42-5c1cf4014dff",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "document = SimpleDirectoryReader(\"data\").load_data()[0]\n",
-    "text_splitter = TokenTextSplitter(separator=\" \", chunk_size=2048, chunk_overlap=20)\n",
-    "text_chunks = text_splitter.split_text(document.text)\n",
-    "doc_chunks = [Document(text=t) for t in text_chunks]"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "bbdd1b84-d8e4-421a-972c-389a5b0160c6",
-   "metadata": {},
-   "source": [
-    "#### Insert into Index and Query"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "id": "d6dcb65d-1a10-471a-8b80-d1bedf7437dc",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import ListIndex, SimpleDirectoryReader\n",
-    "from IPython.display import Markdown, display"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "id": "52c3839a-aa57-412b-b5cf-78c71d6dae3c",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# initialize blank list index\n",
-    "index = ListIndex([])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "3abf2203-54b6-44c3-ac98-97503b18d3ef",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# insert new document chunks\n",
-    "for doc_chunk in doc_chunks:\n",
-    "    index.insert(doc_chunk)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "id": "228ccfb8-00c5-49d1-ba6c-61a757dd76eb",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "> Starting query: What did the author do growing up?\n"
-     ]
-    }
-   ],
-   "source": [
-    "# query\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\"What did the author do growing up?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "id": "2580025c-7a5a-419e-84f0-b2ad7b62b6c2",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/markdown": [
-       "<b>\n",
-       "\n",
-       "The author worked on writing and programming and also took classes at Harvard and RISD. The author also worked at a company called Interleaf and wrote a book on Lisp. The author decided to write another book on Lisp and also started a company called Viaweb with the goal of putting art galleries online. The company eventually pivoted to focus on creating software to build online stores. The author also worked on making ecommerce software in the second half of the 90s.\n",
-       "\n",
-       "The author also worked on building the infrastructure of the web and wrote essays that were published online. The author also worked on spam filters and bought a building in Cambridge to use as an office. The author also had a dinner party every Thursday night.\n",
-       "\n",
-       "The author also worked on marketing at a Boston VC firm, writing essays, and building the infrastructure of the web. The author also started the Y Combinator program to help fund startups.</b>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "dd41721b-0388-4300-b1cd-75c1013795f4",
-   "metadata": {},
-   "source": [
-    "## GPT Tree Insert"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "7d5ecfd2-5913-4484-9619-8825b252406b",
-   "metadata": {},
-   "source": [
-    "#### Data Prep\n",
-    "Chunk up the data into sub documents that we can insert"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "4d55ffef-a309-40ca-bbcf-231e418b6d4c",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index.text_splitter import TokenTextSplitter\n",
-    "from llama_index import SimpleDirectoryReader, Document"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "2d850fe2-78e3-46de-a78d-4ef12848e41d",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: we truncate to the first 30 nodes to save on cost\n",
-    "document = SimpleDirectoryReader(\"data\").load_data()[0]\n",
-    "text_splitter = TokenTextSplitter(separator=\" \", chunk_size=256, chunk_overlap=20)\n",
-    "text_chunks = text_splitter.split_text(document.get_text())\n",
-    "doc_chunks = [Document(text=t) for t in text_chunks]\n",
-    "\n",
-    "doc_chunks = doc_chunks[:30]"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "d4f13125-5627-48f8-9035-e734849dc9da",
-   "metadata": {},
-   "source": [
-    "#### Insert into Index and Query"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "4d631ba9-e5fe-46d1-ae10-936a4704c95e",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import TreeIndex, SimpleDirectoryReader\n",
-    "from IPython.display import Markdown, display"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "54454e62-4df1-482d-978b-807d4a802033",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "> Building index from nodes: 0 chunks\n"
-     ]
-    }
-   ],
-   "source": [
-    "# initialize blank tree index\n",
-    "tree_index = TreeIndex([])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "422dde38-5228-499c-a9d3-49b40fd14d34",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# insert new document chunks\n",
-    "for i, doc_chunk in enumerate(doc_chunks):\n",
-    "    print(f\"Inserting {i}/{len(doc_chunks)}\")\n",
-    "    tree_index.insert(doc_chunk)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f528cded-7a43-4854-82ce-361a6610bc34",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# query\n",
-    "query_engine = tree_index.as_query_engine()\n",
-    "response_tree = query_engine.query(\"What did the author do growing up?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "id": "c753016d-b21e-47f3-b9f3-a70943f407c5",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/markdown": [
-       "<b>The author wrote stories and tried to program computers.</b>"
-      ],
-      "text/plain": [
-       "<IPython.core.display.Markdown object>"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display(Markdown(f\"<b>{response_tree}</b>\"))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "llama_index",
-   "language": "python",
-   "name": "llama_index"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.10"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "46e5110c-ed35-463e-a9f6-cff9cda6221b",
+            "metadata": {},
+            "source": [
+                "This notebook showcases the insert capabilities of different LlamaIndex data structures.\n",
+                "\n",
+                "To see how to build the index during initialization, see `TestEssay.ipynb` instead."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "6ef7a7a6-dc10-4d94-8bdd-22b4954d365a",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# My OpenAI Key\n",
+                "import os\n",
+                "\n",
+                "os.environ[\"OPENAI_API_KEY\"] = \"INSERT OPENAI KEY\""
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "708da88f-d1b0-41d2-ad71-773300bf3ec5",
+            "metadata": {},
+            "source": [
+                "## GPT List Insert"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "5ff6b625-fb53-4885-8fbb-0fa8dcf6ef57",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "#### Data Prep\n",
+                "Chunk up the data into sub documents that we can insert"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "a191e411-f07f-4895-9943-6a8b030abd66",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.text_splitter import TokenTextSplitter\n",
+                "from llama_index import SimpleDirectoryReader, Document"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 15,
+            "id": "0a23c1a8-71ea-4b6d-ae42-5c1cf4014dff",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "document = SimpleDirectoryReader(\"data\").load_data()[0]\n",
+                "text_splitter = TokenTextSplitter(separator=\" \", chunk_size=2048, chunk_overlap=20)\n",
+                "text_chunks = text_splitter.split_text(document.text)\n",
+                "doc_chunks = [Document(text=t) for t in text_chunks]"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "bbdd1b84-d8e4-421a-972c-389a5b0160c6",
+            "metadata": {},
+            "source": [
+                "#### Insert into Index and Query"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 16,
+            "id": "d6dcb65d-1a10-471a-8b80-d1bedf7437dc",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import SummaryIndex, SimpleDirectoryReader\n",
+                "from IPython.display import Markdown, display"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 17,
+            "id": "52c3839a-aa57-412b-b5cf-78c71d6dae3c",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# initialize blank list index\n",
+                "index = SummaryIndex([])"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "3abf2203-54b6-44c3-ac98-97503b18d3ef",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# insert new document chunks\n",
+                "for doc_chunk in doc_chunks:\n",
+                "    index.insert(doc_chunk)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 19,
+            "id": "228ccfb8-00c5-49d1-ba6c-61a757dd76eb",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "> Starting query: What did the author do growing up?\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# query\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\"What did the author do growing up?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 21,
+            "id": "2580025c-7a5a-419e-84f0-b2ad7b62b6c2",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/markdown": [
+                            "<b>\n",
+                            "\n",
+                            "The author worked on writing and programming and also took classes at Harvard and RISD. The author also worked at a company called Interleaf and wrote a book on Lisp. The author decided to write another book on Lisp and also started a company called Viaweb with the goal of putting art galleries online. The company eventually pivoted to focus on creating software to build online stores. The author also worked on making ecommerce software in the second half of the 90s.\n",
+                            "\n",
+                            "The author also worked on building the infrastructure of the web and wrote essays that were published online. The author also worked on spam filters and bought a building in Cambridge to use as an office. The author also had a dinner party every Thursday night.\n",
+                            "\n",
+                            "The author also worked on marketing at a Boston VC firm, writing essays, and building the infrastructure of the web. The author also started the Y Combinator program to help fund startups.</b>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "dd41721b-0388-4300-b1cd-75c1013795f4",
+            "metadata": {},
+            "source": [
+                "## GPT Tree Insert"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "7d5ecfd2-5913-4484-9619-8825b252406b",
+            "metadata": {},
+            "source": [
+                "#### Data Prep\n",
+                "Chunk up the data into sub documents that we can insert"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "4d55ffef-a309-40ca-bbcf-231e418b6d4c",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index.text_splitter import TokenTextSplitter\n",
+                "from llama_index import SimpleDirectoryReader, Document"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "2d850fe2-78e3-46de-a78d-4ef12848e41d",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: we truncate to the first 30 nodes to save on cost\n",
+                "document = SimpleDirectoryReader(\"data\").load_data()[0]\n",
+                "text_splitter = TokenTextSplitter(separator=\" \", chunk_size=256, chunk_overlap=20)\n",
+                "text_chunks = text_splitter.split_text(document.get_text())\n",
+                "doc_chunks = [Document(text=t) for t in text_chunks]\n",
+                "\n",
+                "doc_chunks = doc_chunks[:30]"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "d4f13125-5627-48f8-9035-e734849dc9da",
+            "metadata": {},
+            "source": [
+                "#### Insert into Index and Query"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "4d631ba9-e5fe-46d1-ae10-936a4704c95e",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import TreeIndex, SimpleDirectoryReader\n",
+                "from IPython.display import Markdown, display"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "54454e62-4df1-482d-978b-807d4a802033",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "> Building index from nodes: 0 chunks\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# initialize blank tree index\n",
+                "tree_index = TreeIndex([])"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "422dde38-5228-499c-a9d3-49b40fd14d34",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# insert new document chunks\n",
+                "for i, doc_chunk in enumerate(doc_chunks):\n",
+                "    print(f\"Inserting {i}/{len(doc_chunks)}\")\n",
+                "    tree_index.insert(doc_chunk)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f528cded-7a43-4854-82ce-361a6610bc34",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# query\n",
+                "query_engine = tree_index.as_query_engine()\n",
+                "response_tree = query_engine.query(\"What did the author do growing up?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 9,
+            "id": "c753016d-b21e-47f3-b9f3-a70943f407c5",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/markdown": [
+                            "<b>The author wrote stories and tried to program computers.</b>"
+                        ],
+                        "text/plain": [
+                            "<IPython.core.display.Markdown object>"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "display(Markdown(f\"<b>{response_tree}</b>\"))"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "llama_index",
+            "language": "python",
+            "name": "llama_index"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.10"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/examples/paul_graham_essay/TestEssay.ipynb b/examples/paul_graham_essay/TestEssay.ipynb
index 404778d020042e5a292ad6926d66b0b5cc01a2e5..965840933e43f3470df1b5f43a955f9b1449988f 100644
--- a/examples/paul_graham_essay/TestEssay.ipynb
+++ b/examples/paul_graham_essay/TestEssay.ipynb
@@ -493,7 +493,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "from llama_index import ListIndex, SimpleDirectoryReader\n",
+    "from llama_index import SummaryIndex, SimpleDirectoryReader\n",
     "from IPython.display import Markdown, display"
    ]
   },
@@ -526,7 +526,7 @@
    "source": [
     "# build list index\n",
     "documents = SimpleDirectoryReader(\"data\").load_data()\n",
-    "index = ListIndex.from_documents(documents)"
+    "index = SummaryIndex.from_documents(documents)"
    ]
   },
   {
diff --git a/examples/test_wiki/TestNYC-Benchmark-GPT4.ipynb b/examples/test_wiki/TestNYC-Benchmark-GPT4.ipynb
index 25bfc407c2df1e555a35b2d5f3199b3fcf250a2a..14f2c6b4d720ab62c7f271db0153b9fa4c11d43e 100644
--- a/examples/test_wiki/TestNYC-Benchmark-GPT4.ipynb
+++ b/examples/test_wiki/TestNYC-Benchmark-GPT4.ipynb
@@ -1,1724 +1,1724 @@
 {
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "id": "9080b39e",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging, sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
-    "\n",
-    "# Uncomment if you want to temporarily disable logger\n",
-    "logging.disable(sys.maxsize)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "7de92ce3",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# NOTE: only necessary for querying with `use_async=True` in notebook\n",
-    "import nest_asyncio\n",
-    "\n",
-    "nest_asyncio.apply()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "f1a9eb90-335c-4214-8bb6-fd1edbe3ccbd",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# My OpenAI Key\n",
-    "import os\n",
-    "\n",
-    "os.environ[\"OPENAI_API_KEY\"] = \"\""
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "8d0b2364-4806-4656-81e7-3f6e4b910b5b",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import (\n",
-    "    TreeIndex,\n",
-    "    SimpleDirectoryReader,\n",
-    "    LLMPredictor,\n",
-    "    VectorStoreIndex,\n",
-    "    ListIndex,\n",
-    "    PromptTemplate,\n",
-    "    ServiceContext,\n",
-    ")\n",
-    "from llama_index.indices.base import BaseIndex\n",
-    "from llama_index.llms.base import LLM\n",
-    "from llama_index.llms import OpenAI\n",
-    "from llama_index.response.schema import Response\n",
-    "import pandas as pd\n",
-    "from typing import Tuple"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "707662e5",
-   "metadata": {},
-   "source": [
-    "# Setup data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "b4b4387b-413e-4016-ba1e-88b3d9410a38",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# fetch \"New York City\" page from Wikipedia\n",
-    "from pathlib import Path\n",
-    "\n",
-    "import requests\n",
-    "\n",
-    "response = requests.get(\n",
-    "    \"https://en.wikipedia.org/w/api.php\",\n",
-    "    params={\n",
-    "        \"action\": \"query\",\n",
-    "        \"format\": \"json\",\n",
-    "        \"titles\": \"New York City\",\n",
-    "        \"prop\": \"extracts\",\n",
-    "        # 'exintro': True,\n",
-    "        \"explaintext\": True,\n",
-    "    },\n",
-    ").json()\n",
-    "page = next(iter(response[\"query\"][\"pages\"].values()))\n",
-    "nyc_text = page[\"extract\"]\n",
-    "\n",
-    "data_path = Path(\"data\")\n",
-    "if not data_path.exists():\n",
-    "    Path.mkdir(data_path)\n",
-    "\n",
-    "with open(\"data/nyc_text.txt\", \"w\") as fp:\n",
-    "    fp.write(nyc_text)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "523fbebe-6e79-4d7b-b400-188b711a0e8f",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "DEBUG:llama_index.readers.file.base:> [SimpleDirectoryReader] Total files added: 1\n",
-      "> [SimpleDirectoryReader] Total files added: 1\n"
-     ]
-    }
-   ],
-   "source": [
-    "documents = SimpleDirectoryReader(\"data\").load_data()"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "f4a269bd",
-   "metadata": {},
-   "source": [
-    "# Setup benchmark"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 5,
-   "id": "62f01ddf",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from dataclasses import dataclass\n",
-    "from typing import List"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "id": "4ff13cd4",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "@dataclass\n",
-    "class TestCase:\n",
-    "    query: str\n",
-    "    must_contain: List[str]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "9c653b72",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "@dataclass\n",
-    "class TestOutcome:\n",
-    "    test: TestCase\n",
-    "    response: Response\n",
-    "\n",
-    "    @property\n",
-    "    def is_correct_response(self) -> bool:\n",
-    "        is_correct = True\n",
-    "        for answer in self.test.must_contain:\n",
-    "            if answer not in self.response.response:\n",
-    "                is_correct = False\n",
-    "        return is_correct\n",
-    "\n",
-    "    @property\n",
-    "    def is_correct_source(self) -> bool:\n",
-    "        is_correct = True\n",
-    "        for answer in self.test.must_contain:\n",
-    "            if all(\n",
-    "                answer not in node.source_text for node in self.response.source_nodes\n",
-    "            ):\n",
-    "                is_correct = False\n",
-    "        return is_correct"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 8,
-   "id": "b9cd18ae",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "class Benchmark:\n",
-    "    def __init__(self, tests: List[TestCase]) -> None:\n",
-    "        self._tests = tests\n",
-    "\n",
-    "    def test(self, index: BaseIndex, llm: LLM, **kwargs) -> List[TestOutcome]:\n",
-    "        outcomes: List[TestOutcome] = []\n",
-    "        service_context = ServiceContext.from_defaults(llm=llm)\n",
-    "        for test in self._tests:\n",
-    "            query_engine = index.as_query_engine(\n",
-    "                service_context=service_context, **kwargs\n",
-    "            )\n",
-    "            response = query_engine.query(\n",
-    "                test.query,\n",
-    "            )\n",
-    "            outcome = TestOutcome(test=test, response=response)\n",
-    "            outcomes.append(outcome)\n",
-    "        return outcomes"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 9,
-   "id": "8edad985",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def analyze_outcome(outcomes: List[TestOutcome]) -> None:\n",
-    "    rows = []\n",
-    "    for outcome in outcomes:\n",
-    "        row = [\n",
-    "            outcome.test.query,\n",
-    "            outcome.is_correct_response,\n",
-    "            outcome.is_correct_source,\n",
-    "        ]\n",
-    "        rows.append(row)\n",
-    "    df = pd.DataFrame(\n",
-    "        rows, columns=[\"Test Query\", \"Correct Response\", \"Correct Source\"]\n",
-    "    )\n",
-    "    return df"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 10,
-   "id": "4bc38077",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "test_battle = TestCase(\n",
-    "    query=\"What battles took place in New York City in the American Revolution?\",\n",
-    "    must_contain=[\"Battle of Long Island\"],\n",
-    ")\n",
-    "\n",
-    "test_mayor = TestCase(\n",
-    "    query=\"Who was elected as the mayor after the Great Depression?\",\n",
-    "    must_contain=[\"Fiorello La Guardia\"],\n",
-    ")\n",
-    "\n",
-    "test_tourists = TestCase(\n",
-    "    query=\"How many tourists visited New York City in 2019?\",\n",
-    "    must_contain=[\"66.6 million\"],\n",
-    ")\n",
-    "test_airport = TestCase(\n",
-    "    query=\"What are the airports in New York City?\", must_contain=[\"LaGuardia Airport\"]\n",
-    ")\n",
-    "test_visit = TestCase(\n",
-    "    query=\"When was the first documented visit into New York Harbor?\",\n",
-    "    must_contain=[\"1524\"],\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "f159dadb",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "bm = Benchmark(\n",
-    "    [\n",
-    "        test_battle,\n",
-    "        test_mayor,\n",
-    "        test_tourists,\n",
-    "        test_airport,\n",
-    "        test_visit,\n",
-    "    ]\n",
-    ")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "65ddbd56",
-   "metadata": {},
-   "source": [
-    "# LLM based evaluation"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 592,
-   "id": "ed175de5",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "EVAL_PROMPT_TMPL = (\n",
-    "    \"Given the question below. \\n\"\n",
-    "    \"---------------------\\n\"\n",
-    "    \"{query_str}\"\n",
-    "    \"\\n---------------------\\n\"\n",
-    "    \"Decide if the following retreived context is relevant. \\n\"\n",
-    "    \"\\n---------------------\\n\"\n",
-    "    \"{context_str}\"\n",
-    "    \"\\n---------------------\\n\"\n",
-    "    \"Then decide if the answer is correct. \\n\"\n",
-    "    \"\\n---------------------\\n\"\n",
-    "    \"{answer_str}\"\n",
-    "    \"\\n---------------------\\n\"\n",
-    "    \"Answer in the following format:\\n\"\n",
-    "    \"'Context is relevant: <True>\\nAnswer is correct: <True>' \"\n",
-    "    \"and explain why.\"\n",
-    ")\n",
-    "\n",
-    "DEFAULT_EVAL_PROMPT = PromptTemplate(EVAL_PROMPT_TMPL)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 593,
-   "id": "93c498b6",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import re\n",
-    "\n",
-    "\n",
-    "def extract_eval_result(result_str: str):\n",
-    "    boolean_pattern = r\"(True|False)\"\n",
-    "    matches = re.findall(boolean_pattern, result_str)\n",
-    "    return [match == \"True\" for match in matches]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 594,
-   "id": "4c8109c3",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def analyze_outcome_llm_single(\n",
-    "    outcome: TestOutcome, llm_predictor: LLMPredictor\n",
-    ") -> Tuple[bool, bool]:\n",
-    "    try:\n",
-    "        source_text = outcome.response.source_nodes[0].source_text\n",
-    "    except:\n",
-    "        source_text = \"Failed to retrieve any context\"\n",
-    "    result_str, _ = llm_predictor.predict(\n",
-    "        DEFAULT_EVAL_PROMPT,\n",
-    "        query_str=outcome.test.query,\n",
-    "        context_str=source_text,\n",
-    "        answer_str=outcome.response.response,\n",
-    "    )\n",
-    "    is_context_relevant, is_answer_correct = extract_eval_result(result_str)\n",
-    "    return is_answer_correct, is_context_relevant, result_str\n",
-    "\n",
-    "\n",
-    "def analyze_outcome_llm(\n",
-    "    outcomes: List[TestOutcome], llm_predictor: LLMPredictor\n",
-    ") -> None:\n",
-    "    rows = []\n",
-    "    for outcome in outcomes:\n",
-    "        is_correct_response, is_correct_source, result_str = analyze_outcome_llm_single(\n",
-    "            outcome, llm_predictor\n",
-    "        )\n",
-    "        row = [outcome.test.query, is_correct_response, is_correct_source, result_str]\n",
-    "        rows.append(row)\n",
-    "    df = pd.DataFrame(\n",
-    "        rows,\n",
-    "        columns=[\n",
-    "            \"Test Query\",\n",
-    "            \"Correct Response (LLM)\",\n",
-    "            \"Correct Source (LLM)\",\n",
-    "            \"Eval (LLM)\",\n",
-    "        ],\n",
-    "    )\n",
-    "    return df"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "5a9f43a6",
-   "metadata": {},
-   "source": [
-    "# Build Indices"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 643,
-   "id": "790bad05",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "vector_index = VectorStoreIndex.from_documents(\n",
-    "    documents,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 473,
-   "id": "64c970e0",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "list_index = ListIndex.from_documents(\n",
-    "    documents,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 468,
-   "id": "bacc4f1c",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "tree_index = TreeIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "5b2e7fdd",
-   "metadata": {},
-   "source": [
-    "# Create LLMPredictors"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "id": "4766ac56-ac8d-4f33-b994-6901964241ea",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# gpt-4\n",
-    "gpt4 = OpenAI(temperature=0, model=\"gpt-4\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 169,
-   "id": "c8692cf6",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# gpt-3 (text-davinci-003)\n",
-    "gpt3 = OpenAI(temperature=0, model=\"text-davinci-003\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "id": "fb74ec62",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# chatgpt (gpt-3.5-turbo)\n",
-    "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "1354f668",
-   "metadata": {},
-   "source": [
-    "# Benchmarking "
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "01124a3f",
-   "metadata": {},
-   "source": [
-    "### Tree Index + GPT4"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 583,
-   "id": "6f418554",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "outcomes_tree_gpt4 = bm.test(tree_index, gpt4)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 584,
-   "id": "de98ceba",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response</th>\n",
-       "      <th>Correct Source</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor after the Great D...</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response  \\\n",
-       "0  What battles took place in New York City in th...              True   \n",
-       "1  Who was elected as the mayor after the Great D...             False   \n",
-       "2   How many tourists visited New York City in 2019?             False   \n",
-       "3            What are the airports in New York City?             False   \n",
-       "4  When was the first documented visit into New Y...             False   \n",
-       "\n",
-       "   Correct Source  \n",
-       "0            True  \n",
-       "1           False  \n",
-       "2           False  \n",
-       "3           False  \n",
-       "4           False  "
-      ]
-     },
-     "execution_count": 584,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "analyze_outcome(outcomes_tree_gpt4)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "f5ef33a0",
-   "metadata": {},
-   "source": [
-    "### Tree Index + GPT3"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 549,
-   "id": "ba871d2a",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "outcomes_tree_gpt3 = bm.test(tree_index, gpt3)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 550,
-   "id": "7d4c6930",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response</th>\n",
-       "      <th>Correct Source</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor after the Great D...</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response  \\\n",
-       "0  What battles took place in New York City in th...              True   \n",
-       "1  Who was elected as the mayor after the Great D...             False   \n",
-       "2   How many tourists visited New York City in 2019?             False   \n",
-       "3            What are the airports in New York City?              True   \n",
-       "4  When was the first documented visit into New Y...              True   \n",
-       "\n",
-       "   Correct Source  \n",
-       "0           False  \n",
-       "1           False  \n",
-       "2           False  \n",
-       "3           False  \n",
-       "4           False  "
-      ]
-     },
-     "execution_count": 550,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "analyze_outcome(outcomes_tree_gpt3)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "30a9ba34",
-   "metadata": {},
-   "source": [
-    "### List Index + GPT4"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "id": "bc0f05d1",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "outcomes_list_gpt4 = bm.test(\n",
-    "    list_index, gpt4, response_mode=\"tree_summarize\", use_async=True\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "id": "2d2e879d",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response</th>\n",
-       "      <th>Correct Source</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>False</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor after the Great D...</td>\n",
-       "      <td>False</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response  \\\n",
-       "0  What battles took place in New York City in th...             False   \n",
-       "1  Who was elected as the mayor after the Great D...             False   \n",
-       "2   How many tourists visited New York City in 2019?              True   \n",
-       "3            What are the airports in New York City?              True   \n",
-       "4  When was the first documented visit into New Y...              True   \n",
-       "\n",
-       "   Correct Source  \n",
-       "0            True  \n",
-       "1            True  \n",
-       "2            True  \n",
-       "3            True  \n",
-       "4            True  "
-      ]
-     },
-     "execution_count": 19,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "analyze_outcome(outcomes_list_gpt4)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "8cba793c",
-   "metadata": {},
-   "source": [
-    "### List Index + GPT3"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 501,
-   "id": "66cfa3fa",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "outcomes_list_gpt3 = bm.test(\n",
-    "    list_index, gpt3, response_mode=\"tree_summarize\", use_async=True\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 502,
-   "id": "06bc98d8",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response</th>\n",
-       "      <th>Correct Source</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor during the Great ...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>False</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response  \\\n",
-       "0  What battles took place in New York City in th...              True   \n",
-       "1  Who was elected as the mayor during the Great ...              True   \n",
-       "2   How many tourists visited New York City in 2019?             False   \n",
-       "3            What are the airports in New York City?              True   \n",
-       "4  When was the first documented visit into New Y...              True   \n",
-       "\n",
-       "   Correct Source  \n",
-       "0            True  \n",
-       "1            True  \n",
-       "2            True  \n",
-       "3            True  \n",
-       "4            True  "
-      ]
-     },
-     "execution_count": 502,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "analyze_outcome(outcomes_list_gpt3)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "c4d0b3eb",
-   "metadata": {},
-   "source": [
-    "### List Index + ChatGPT"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "id": "f146c74e",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "outcomes_list_chatgpt = bm.test(\n",
-    "    list_index, chatgpt, response_mode=\"tree_summarize\", use_async=True\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "id": "8eb9d392",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response</th>\n",
-       "      <th>Correct Source</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>False</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor after the Great D...</td>\n",
-       "      <td>False</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>False</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response  \\\n",
-       "0  What battles took place in New York City in th...             False   \n",
-       "1  Who was elected as the mayor after the Great D...             False   \n",
-       "2   How many tourists visited New York City in 2019?             False   \n",
-       "3            What are the airports in New York City?              True   \n",
-       "4  When was the first documented visit into New Y...              True   \n",
-       "\n",
-       "   Correct Source  \n",
-       "0            True  \n",
-       "1            True  \n",
-       "2            True  \n",
-       "3            True  \n",
-       "4            True  "
-      ]
-     },
-     "execution_count": 24,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "analyze_outcome(outcomes_list_chatgpt)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "38fc1438",
-   "metadata": {},
-   "source": [
-    "### Vector Store Index + GPT4 "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 487,
-   "id": "5349d1e7",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "outcomes_vector_gpt4 = bm.test(vector_index, gpt4)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 488,
-   "id": "7fc53e19",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response</th>\n",
-       "      <th>Correct Source</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor during the Great ...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response  \\\n",
-       "0  What battles took place in New York City in th...              True   \n",
-       "1  Who was elected as the mayor during the Great ...              True   \n",
-       "2   How many tourists visited New York City in 2019?             False   \n",
-       "3            What are the airports in New York City?              True   \n",
-       "4  When was the first documented visit into New Y...              True   \n",
-       "\n",
-       "   Correct Source  \n",
-       "0            True  \n",
-       "1            True  \n",
-       "2           False  \n",
-       "3            True  \n",
-       "4            True  "
-      ]
-     },
-     "execution_count": 488,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "analyze_outcome(outcomes_vector_gpt4)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "70eb711f",
-   "metadata": {},
-   "source": [
-    "### Vector Store Index + GPT3"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 644,
-   "id": "e35ebdf9",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "outcomes_vector_gpt3 = bm.test(vector_index, gpt3)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 645,
-   "id": "95c49697",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response</th>\n",
-       "      <th>Correct Source</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor after the Great D...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response  \\\n",
-       "0  What battles took place in New York City in th...              True   \n",
-       "1  Who was elected as the mayor after the Great D...              True   \n",
-       "2   How many tourists visited New York City in 2019?             False   \n",
-       "3            What are the airports in New York City?              True   \n",
-       "4  When was the first documented visit into New Y...              True   \n",
-       "\n",
-       "   Correct Source  \n",
-       "0            True  \n",
-       "1           False  \n",
-       "2           False  \n",
-       "3           False  \n",
-       "4           False  "
-      ]
-     },
-     "execution_count": 645,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "analyze_outcome(outcomes_vector_gpt3)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "a36ba2ee",
-   "metadata": {},
-   "source": [
-    "# LLM based Evaluation"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 646,
-   "id": "59ff561c",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response</th>\n",
-       "      <th>Correct Source</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor after the Great D...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response  \\\n",
-       "0  What battles took place in New York City in th...              True   \n",
-       "1  Who was elected as the mayor after the Great D...              True   \n",
-       "2   How many tourists visited New York City in 2019?             False   \n",
-       "3            What are the airports in New York City?              True   \n",
-       "4  When was the first documented visit into New Y...              True   \n",
-       "\n",
-       "   Correct Source  \n",
-       "0            True  \n",
-       "1           False  \n",
-       "2           False  \n",
-       "3           False  \n",
-       "4           False  "
-      ]
-     },
-     "execution_count": 646,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "analyze_outcome(outcomes_vector_gpt3)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 647,
-   "id": "e4ffaca6",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "eval_gpt4 = analyze_outcome_llm(outcomes_vector_gpt3, gpt4)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 657,
-   "id": "85c4e415",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response (LLM)</th>\n",
-       "      <th>Correct Source (LLM)</th>\n",
-       "      <th>Eval (LLM)</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "      <td>Context is relevant: True\\nAnswer is correct: ...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor after the Great D...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "      <td>Context is relevant: False\\nAnswer is correct:...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "      <td>Context is relevant: False\\nAnswer is correct:...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "      <td>Context is relevant: False\\nAnswer is correct:...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "      <td>Context is relevant: False\\nAnswer is correct:...</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response (LLM)  \\\n",
-       "0  What battles took place in New York City in th...                    True   \n",
-       "1  Who was elected as the mayor after the Great D...                    True   \n",
-       "2   How many tourists visited New York City in 2019?                    True   \n",
-       "3            What are the airports in New York City?                    True   \n",
-       "4  When was the first documented visit into New Y...                    True   \n",
-       "\n",
-       "   Correct Source (LLM)                                         Eval (LLM)  \n",
-       "0                  True  Context is relevant: True\\nAnswer is correct: ...  \n",
-       "1                 False  Context is relevant: False\\nAnswer is correct:...  \n",
-       "2                 False  Context is relevant: False\\nAnswer is correct:...  \n",
-       "3                 False  Context is relevant: False\\nAnswer is correct:...  \n",
-       "4                 False  Context is relevant: False\\nAnswer is correct:...  "
-      ]
-     },
-     "execution_count": 657,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "eval_gpt4"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 651,
-   "id": "3efb66d6",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "eval_chatgpt = analyze_outcome_llm(outcomes_vector_gpt3, chatgpt)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 652,
-   "id": "4c452767",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response (LLM)</th>\n",
-       "      <th>Correct Source (LLM)</th>\n",
-       "      <th>Eval (LLM)</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor after the Great D...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "      <td>\\n\\nContext is relevant: False\\nAnswer is corr...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>False</td>\n",
-       "      <td>\\n\\nContext is relevant: False\\nAnswer is corr...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>False</td>\n",
-       "      <td>True</td>\n",
-       "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response (LLM)  \\\n",
-       "0  What battles took place in New York City in th...                    True   \n",
-       "1  Who was elected as the mayor after the Great D...                    True   \n",
-       "2   How many tourists visited New York City in 2019?                   False   \n",
-       "3            What are the airports in New York City?                    True   \n",
-       "4  When was the first documented visit into New Y...                   False   \n",
-       "\n",
-       "   Correct Source (LLM)                                         Eval (LLM)  \n",
-       "0                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  \n",
-       "1                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  \n",
-       "2                 False  \\n\\nContext is relevant: False\\nAnswer is corr...  \n",
-       "3                 False  \\n\\nContext is relevant: False\\nAnswer is corr...  \n",
-       "4                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  "
-      ]
-     },
-     "execution_count": 652,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "eval_chatgpt"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 649,
-   "id": "61e8dad2",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "eval_gpt3 = analyze_outcome_llm(outcomes_vector_gpt3, gpt3)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 650,
-   "id": "170400c3",
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [
-    {
-     "data": {
-      "text/html": [
-       "<div>\n",
-       "<style scoped>\n",
-       "    .dataframe tbody tr th:only-of-type {\n",
-       "        vertical-align: middle;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe tbody tr th {\n",
-       "        vertical-align: top;\n",
-       "    }\n",
-       "\n",
-       "    .dataframe thead th {\n",
-       "        text-align: right;\n",
-       "    }\n",
-       "</style>\n",
-       "<table border=\"1\" class=\"dataframe\">\n",
-       "  <thead>\n",
-       "    <tr style=\"text-align: right;\">\n",
-       "      <th></th>\n",
-       "      <th>Test Query</th>\n",
-       "      <th>Correct Response (LLM)</th>\n",
-       "      <th>Correct Source (LLM)</th>\n",
-       "      <th>Eval (LLM)</th>\n",
-       "    </tr>\n",
-       "  </thead>\n",
-       "  <tbody>\n",
-       "    <tr>\n",
-       "      <th>0</th>\n",
-       "      <td>What battles took place in New York City in th...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>1</th>\n",
-       "      <td>Who was elected as the mayor after the Great D...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>2</th>\n",
-       "      <td>How many tourists visited New York City in 2019?</td>\n",
-       "      <td>False</td>\n",
-       "      <td>False</td>\n",
-       "      <td>\\n\\nContext is relevant: False\\nAnswer is corr...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>3</th>\n",
-       "      <td>What are the airports in New York City?</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
-       "    </tr>\n",
-       "    <tr>\n",
-       "      <th>4</th>\n",
-       "      <td>When was the first documented visit into New Y...</td>\n",
-       "      <td>True</td>\n",
-       "      <td>True</td>\n",
-       "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
-       "    </tr>\n",
-       "  </tbody>\n",
-       "</table>\n",
-       "</div>"
-      ],
-      "text/plain": [
-       "                                          Test Query  Correct Response (LLM)  \\\n",
-       "0  What battles took place in New York City in th...                    True   \n",
-       "1  Who was elected as the mayor after the Great D...                    True   \n",
-       "2   How many tourists visited New York City in 2019?                   False   \n",
-       "3            What are the airports in New York City?                    True   \n",
-       "4  When was the first documented visit into New Y...                    True   \n",
-       "\n",
-       "   Correct Source (LLM)                                         Eval (LLM)  \n",
-       "0                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  \n",
-       "1                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  \n",
-       "2                 False  \\n\\nContext is relevant: False\\nAnswer is corr...  \n",
-       "3                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  \n",
-       "4                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  "
-      ]
-     },
-     "execution_count": 650,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "eval_gpt3"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.9"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "cell_type": "code",
+            "execution_count": 17,
+            "id": "9080b39e",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging, sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))\n",
+                "\n",
+                "# Uncomment if you want to temporarily disable logger\n",
+                "logging.disable(sys.maxsize)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "7de92ce3",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# NOTE: only necessary for querying with `use_async=True` in notebook\n",
+                "import nest_asyncio\n",
+                "\n",
+                "nest_asyncio.apply()"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "f1a9eb90-335c-4214-8bb6-fd1edbe3ccbd",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# My OpenAI Key\n",
+                "import os\n",
+                "\n",
+                "os.environ[\"OPENAI_API_KEY\"] = \"\""
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "8d0b2364-4806-4656-81e7-3f6e4b910b5b",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import (\n",
+                "    TreeIndex,\n",
+                "    SimpleDirectoryReader,\n",
+                "    LLMPredictor,\n",
+                "    VectorStoreIndex,\n",
+                "    SummaryIndex,\n",
+                "    PromptTemplate,\n",
+                "    ServiceContext,\n",
+                ")\n",
+                "from llama_index.indices.base import BaseIndex\n",
+                "from llama_index.llms.base import LLM\n",
+                "from llama_index.llms import OpenAI\n",
+                "from llama_index.response.schema import Response\n",
+                "import pandas as pd\n",
+                "from typing import Tuple"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "707662e5",
+            "metadata": {},
+            "source": [
+                "# Setup data"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "b4b4387b-413e-4016-ba1e-88b3d9410a38",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# fetch \"New York City\" page from Wikipedia\n",
+                "from pathlib import Path\n",
+                "\n",
+                "import requests\n",
+                "\n",
+                "response = requests.get(\n",
+                "    \"https://en.wikipedia.org/w/api.php\",\n",
+                "    params={\n",
+                "        \"action\": \"query\",\n",
+                "        \"format\": \"json\",\n",
+                "        \"titles\": \"New York City\",\n",
+                "        \"prop\": \"extracts\",\n",
+                "        # 'exintro': True,\n",
+                "        \"explaintext\": True,\n",
+                "    },\n",
+                ").json()\n",
+                "page = next(iter(response[\"query\"][\"pages\"].values()))\n",
+                "nyc_text = page[\"extract\"]\n",
+                "\n",
+                "data_path = Path(\"data\")\n",
+                "if not data_path.exists():\n",
+                "    Path.mkdir(data_path)\n",
+                "\n",
+                "with open(\"data/nyc_text.txt\", \"w\") as fp:\n",
+                "    fp.write(nyc_text)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "523fbebe-6e79-4d7b-b400-188b711a0e8f",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "DEBUG:llama_index.readers.file.base:> [SimpleDirectoryReader] Total files added: 1\n",
+                        "> [SimpleDirectoryReader] Total files added: 1\n"
+                    ]
+                }
+            ],
+            "source": [
+                "documents = SimpleDirectoryReader(\"data\").load_data()"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "f4a269bd",
+            "metadata": {},
+            "source": [
+                "# Setup benchmark"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 5,
+            "id": "62f01ddf",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from dataclasses import dataclass\n",
+                "from typing import List"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 6,
+            "id": "4ff13cd4",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "@dataclass\n",
+                "class TestCase:\n",
+                "    query: str\n",
+                "    must_contain: List[str]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 7,
+            "id": "9c653b72",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "@dataclass\n",
+                "class TestOutcome:\n",
+                "    test: TestCase\n",
+                "    response: Response\n",
+                "\n",
+                "    @property\n",
+                "    def is_correct_response(self) -> bool:\n",
+                "        is_correct = True\n",
+                "        for answer in self.test.must_contain:\n",
+                "            if answer not in self.response.response:\n",
+                "                is_correct = False\n",
+                "        return is_correct\n",
+                "\n",
+                "    @property\n",
+                "    def is_correct_source(self) -> bool:\n",
+                "        is_correct = True\n",
+                "        for answer in self.test.must_contain:\n",
+                "            if all(\n",
+                "                answer not in node.source_text for node in self.response.source_nodes\n",
+                "            ):\n",
+                "                is_correct = False\n",
+                "        return is_correct"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 8,
+            "id": "b9cd18ae",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "class Benchmark:\n",
+                "    def __init__(self, tests: List[TestCase]) -> None:\n",
+                "        self._tests = tests\n",
+                "\n",
+                "    def test(self, index: BaseIndex, llm: LLM, **kwargs) -> List[TestOutcome]:\n",
+                "        outcomes: List[TestOutcome] = []\n",
+                "        service_context = ServiceContext.from_defaults(llm=llm)\n",
+                "        for test in self._tests:\n",
+                "            query_engine = index.as_query_engine(\n",
+                "                service_context=service_context, **kwargs\n",
+                "            )\n",
+                "            response = query_engine.query(\n",
+                "                test.query,\n",
+                "            )\n",
+                "            outcome = TestOutcome(test=test, response=response)\n",
+                "            outcomes.append(outcome)\n",
+                "        return outcomes"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 9,
+            "id": "8edad985",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "def analyze_outcome(outcomes: List[TestOutcome]) -> None:\n",
+                "    rows = []\n",
+                "    for outcome in outcomes:\n",
+                "        row = [\n",
+                "            outcome.test.query,\n",
+                "            outcome.is_correct_response,\n",
+                "            outcome.is_correct_source,\n",
+                "        ]\n",
+                "        rows.append(row)\n",
+                "    df = pd.DataFrame(\n",
+                "        rows, columns=[\"Test Query\", \"Correct Response\", \"Correct Source\"]\n",
+                "    )\n",
+                "    return df"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 10,
+            "id": "4bc38077",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "test_battle = TestCase(\n",
+                "    query=\"What battles took place in New York City in the American Revolution?\",\n",
+                "    must_contain=[\"Battle of Long Island\"],\n",
+                ")\n",
+                "\n",
+                "test_mayor = TestCase(\n",
+                "    query=\"Who was elected as the mayor after the Great Depression?\",\n",
+                "    must_contain=[\"Fiorello La Guardia\"],\n",
+                ")\n",
+                "\n",
+                "test_tourists = TestCase(\n",
+                "    query=\"How many tourists visited New York City in 2019?\",\n",
+                "    must_contain=[\"66.6 million\"],\n",
+                ")\n",
+                "test_airport = TestCase(\n",
+                "    query=\"What are the airports in New York City?\", must_contain=[\"LaGuardia Airport\"]\n",
+                ")\n",
+                "test_visit = TestCase(\n",
+                "    query=\"When was the first documented visit into New York Harbor?\",\n",
+                "    must_contain=[\"1524\"],\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "id": "f159dadb",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "bm = Benchmark(\n",
+                "    [\n",
+                "        test_battle,\n",
+                "        test_mayor,\n",
+                "        test_tourists,\n",
+                "        test_airport,\n",
+                "        test_visit,\n",
+                "    ]\n",
+                ")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "65ddbd56",
+            "metadata": {},
+            "source": [
+                "# LLM based evaluation"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 592,
+            "id": "ed175de5",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "EVAL_PROMPT_TMPL = (\n",
+                "    \"Given the question below. \\n\"\n",
+                "    \"---------------------\\n\"\n",
+                "    \"{query_str}\"\n",
+                "    \"\\n---------------------\\n\"\n",
+                "    \"Decide if the following retreived context is relevant. \\n\"\n",
+                "    \"\\n---------------------\\n\"\n",
+                "    \"{context_str}\"\n",
+                "    \"\\n---------------------\\n\"\n",
+                "    \"Then decide if the answer is correct. \\n\"\n",
+                "    \"\\n---------------------\\n\"\n",
+                "    \"{answer_str}\"\n",
+                "    \"\\n---------------------\\n\"\n",
+                "    \"Answer in the following format:\\n\"\n",
+                "    \"'Context is relevant: <True>\\nAnswer is correct: <True>' \"\n",
+                "    \"and explain why.\"\n",
+                ")\n",
+                "\n",
+                "DEFAULT_EVAL_PROMPT = PromptTemplate(EVAL_PROMPT_TMPL)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 593,
+            "id": "93c498b6",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import re\n",
+                "\n",
+                "\n",
+                "def extract_eval_result(result_str: str):\n",
+                "    boolean_pattern = r\"(True|False)\"\n",
+                "    matches = re.findall(boolean_pattern, result_str)\n",
+                "    return [match == \"True\" for match in matches]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 594,
+            "id": "4c8109c3",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "def analyze_outcome_llm_single(\n",
+                "    outcome: TestOutcome, llm_predictor: LLMPredictor\n",
+                ") -> Tuple[bool, bool]:\n",
+                "    try:\n",
+                "        source_text = outcome.response.source_nodes[0].source_text\n",
+                "    except:\n",
+                "        source_text = \"Failed to retrieve any context\"\n",
+                "    result_str, _ = llm_predictor.predict(\n",
+                "        DEFAULT_EVAL_PROMPT,\n",
+                "        query_str=outcome.test.query,\n",
+                "        context_str=source_text,\n",
+                "        answer_str=outcome.response.response,\n",
+                "    )\n",
+                "    is_context_relevant, is_answer_correct = extract_eval_result(result_str)\n",
+                "    return is_answer_correct, is_context_relevant, result_str\n",
+                "\n",
+                "\n",
+                "def analyze_outcome_llm(\n",
+                "    outcomes: List[TestOutcome], llm_predictor: LLMPredictor\n",
+                ") -> None:\n",
+                "    rows = []\n",
+                "    for outcome in outcomes:\n",
+                "        is_correct_response, is_correct_source, result_str = analyze_outcome_llm_single(\n",
+                "            outcome, llm_predictor\n",
+                "        )\n",
+                "        row = [outcome.test.query, is_correct_response, is_correct_source, result_str]\n",
+                "        rows.append(row)\n",
+                "    df = pd.DataFrame(\n",
+                "        rows,\n",
+                "        columns=[\n",
+                "            \"Test Query\",\n",
+                "            \"Correct Response (LLM)\",\n",
+                "            \"Correct Source (LLM)\",\n",
+                "            \"Eval (LLM)\",\n",
+                "        ],\n",
+                "    )\n",
+                "    return df"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "5a9f43a6",
+            "metadata": {},
+            "source": [
+                "# Build Indices"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 643,
+            "id": "790bad05",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "vector_index = VectorStoreIndex.from_documents(\n",
+                "    documents,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 473,
+            "id": "64c970e0",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "list_index = SummaryIndex.from_documents(\n",
+                "    documents,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 468,
+            "id": "bacc4f1c",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "tree_index = TreeIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "5b2e7fdd",
+            "metadata": {},
+            "source": [
+                "# Create LLMPredictors"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 12,
+            "id": "4766ac56-ac8d-4f33-b994-6901964241ea",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# gpt-4\n",
+                "gpt4 = OpenAI(temperature=0, model=\"gpt-4\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 169,
+            "id": "c8692cf6",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# gpt-3 (text-davinci-003)\n",
+                "gpt3 = OpenAI(temperature=0, model=\"text-davinci-003\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 22,
+            "id": "fb74ec62",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# chatgpt (gpt-3.5-turbo)\n",
+                "chatgpt = OpenAI(temperature=0, model=\"gpt-3.5-turbo\")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "1354f668",
+            "metadata": {},
+            "source": [
+                "# Benchmarking "
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "01124a3f",
+            "metadata": {},
+            "source": [
+                "### Tree Index + GPT4"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 583,
+            "id": "6f418554",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "outcomes_tree_gpt4 = bm.test(tree_index, gpt4)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 584,
+            "id": "de98ceba",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response</th>\n",
+                            "      <th>Correct Source</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor after the Great D...</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response  \\\n",
+                            "0  What battles took place in New York City in th...              True   \n",
+                            "1  Who was elected as the mayor after the Great D...             False   \n",
+                            "2   How many tourists visited New York City in 2019?             False   \n",
+                            "3            What are the airports in New York City?             False   \n",
+                            "4  When was the first documented visit into New Y...             False   \n",
+                            "\n",
+                            "   Correct Source  \n",
+                            "0            True  \n",
+                            "1           False  \n",
+                            "2           False  \n",
+                            "3           False  \n",
+                            "4           False  "
+                        ]
+                    },
+                    "execution_count": 584,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "analyze_outcome(outcomes_tree_gpt4)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "f5ef33a0",
+            "metadata": {},
+            "source": [
+                "### Tree Index + GPT3"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 549,
+            "id": "ba871d2a",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "outcomes_tree_gpt3 = bm.test(tree_index, gpt3)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 550,
+            "id": "7d4c6930",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response</th>\n",
+                            "      <th>Correct Source</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor after the Great D...</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response  \\\n",
+                            "0  What battles took place in New York City in th...              True   \n",
+                            "1  Who was elected as the mayor after the Great D...             False   \n",
+                            "2   How many tourists visited New York City in 2019?             False   \n",
+                            "3            What are the airports in New York City?              True   \n",
+                            "4  When was the first documented visit into New Y...              True   \n",
+                            "\n",
+                            "   Correct Source  \n",
+                            "0           False  \n",
+                            "1           False  \n",
+                            "2           False  \n",
+                            "3           False  \n",
+                            "4           False  "
+                        ]
+                    },
+                    "execution_count": 550,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "analyze_outcome(outcomes_tree_gpt3)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "30a9ba34",
+            "metadata": {},
+            "source": [
+                "### List Index + GPT4"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 18,
+            "id": "bc0f05d1",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [],
+            "source": [
+                "outcomes_list_gpt4 = bm.test(\n",
+                "    list_index, gpt4, response_mode=\"tree_summarize\", use_async=True\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 19,
+            "id": "2d2e879d",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response</th>\n",
+                            "      <th>Correct Source</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor after the Great D...</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response  \\\n",
+                            "0  What battles took place in New York City in th...             False   \n",
+                            "1  Who was elected as the mayor after the Great D...             False   \n",
+                            "2   How many tourists visited New York City in 2019?              True   \n",
+                            "3            What are the airports in New York City?              True   \n",
+                            "4  When was the first documented visit into New Y...              True   \n",
+                            "\n",
+                            "   Correct Source  \n",
+                            "0            True  \n",
+                            "1            True  \n",
+                            "2            True  \n",
+                            "3            True  \n",
+                            "4            True  "
+                        ]
+                    },
+                    "execution_count": 19,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "analyze_outcome(outcomes_list_gpt4)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "8cba793c",
+            "metadata": {},
+            "source": [
+                "### List Index + GPT3"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 501,
+            "id": "66cfa3fa",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "outcomes_list_gpt3 = bm.test(\n",
+                "    list_index, gpt3, response_mode=\"tree_summarize\", use_async=True\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 502,
+            "id": "06bc98d8",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response</th>\n",
+                            "      <th>Correct Source</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor during the Great ...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response  \\\n",
+                            "0  What battles took place in New York City in th...              True   \n",
+                            "1  Who was elected as the mayor during the Great ...              True   \n",
+                            "2   How many tourists visited New York City in 2019?             False   \n",
+                            "3            What are the airports in New York City?              True   \n",
+                            "4  When was the first documented visit into New Y...              True   \n",
+                            "\n",
+                            "   Correct Source  \n",
+                            "0            True  \n",
+                            "1            True  \n",
+                            "2            True  \n",
+                            "3            True  \n",
+                            "4            True  "
+                        ]
+                    },
+                    "execution_count": 502,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "analyze_outcome(outcomes_list_gpt3)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "c4d0b3eb",
+            "metadata": {},
+            "source": [
+                "### List Index + ChatGPT"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 23,
+            "id": "f146c74e",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "outcomes_list_chatgpt = bm.test(\n",
+                "    list_index, chatgpt, response_mode=\"tree_summarize\", use_async=True\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 24,
+            "id": "8eb9d392",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response</th>\n",
+                            "      <th>Correct Source</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor after the Great D...</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response  \\\n",
+                            "0  What battles took place in New York City in th...             False   \n",
+                            "1  Who was elected as the mayor after the Great D...             False   \n",
+                            "2   How many tourists visited New York City in 2019?             False   \n",
+                            "3            What are the airports in New York City?              True   \n",
+                            "4  When was the first documented visit into New Y...              True   \n",
+                            "\n",
+                            "   Correct Source  \n",
+                            "0            True  \n",
+                            "1            True  \n",
+                            "2            True  \n",
+                            "3            True  \n",
+                            "4            True  "
+                        ]
+                    },
+                    "execution_count": 24,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "analyze_outcome(outcomes_list_chatgpt)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "38fc1438",
+            "metadata": {},
+            "source": [
+                "### Vector Store Index + GPT4 "
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 487,
+            "id": "5349d1e7",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "outcomes_vector_gpt4 = bm.test(vector_index, gpt4)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 488,
+            "id": "7fc53e19",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response</th>\n",
+                            "      <th>Correct Source</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor during the Great ...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response  \\\n",
+                            "0  What battles took place in New York City in th...              True   \n",
+                            "1  Who was elected as the mayor during the Great ...              True   \n",
+                            "2   How many tourists visited New York City in 2019?             False   \n",
+                            "3            What are the airports in New York City?              True   \n",
+                            "4  When was the first documented visit into New Y...              True   \n",
+                            "\n",
+                            "   Correct Source  \n",
+                            "0            True  \n",
+                            "1            True  \n",
+                            "2           False  \n",
+                            "3            True  \n",
+                            "4            True  "
+                        ]
+                    },
+                    "execution_count": 488,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "analyze_outcome(outcomes_vector_gpt4)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "70eb711f",
+            "metadata": {},
+            "source": [
+                "### Vector Store Index + GPT3"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 644,
+            "id": "e35ebdf9",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "outcomes_vector_gpt3 = bm.test(vector_index, gpt3)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 645,
+            "id": "95c49697",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response</th>\n",
+                            "      <th>Correct Source</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor after the Great D...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response  \\\n",
+                            "0  What battles took place in New York City in th...              True   \n",
+                            "1  Who was elected as the mayor after the Great D...              True   \n",
+                            "2   How many tourists visited New York City in 2019?             False   \n",
+                            "3            What are the airports in New York City?              True   \n",
+                            "4  When was the first documented visit into New Y...              True   \n",
+                            "\n",
+                            "   Correct Source  \n",
+                            "0            True  \n",
+                            "1           False  \n",
+                            "2           False  \n",
+                            "3           False  \n",
+                            "4           False  "
+                        ]
+                    },
+                    "execution_count": 645,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "analyze_outcome(outcomes_vector_gpt3)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "a36ba2ee",
+            "metadata": {},
+            "source": [
+                "# LLM based Evaluation"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 646,
+            "id": "59ff561c",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response</th>\n",
+                            "      <th>Correct Source</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor after the Great D...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response  \\\n",
+                            "0  What battles took place in New York City in th...              True   \n",
+                            "1  Who was elected as the mayor after the Great D...              True   \n",
+                            "2   How many tourists visited New York City in 2019?             False   \n",
+                            "3            What are the airports in New York City?              True   \n",
+                            "4  When was the first documented visit into New Y...              True   \n",
+                            "\n",
+                            "   Correct Source  \n",
+                            "0            True  \n",
+                            "1           False  \n",
+                            "2           False  \n",
+                            "3           False  \n",
+                            "4           False  "
+                        ]
+                    },
+                    "execution_count": 646,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "analyze_outcome(outcomes_vector_gpt3)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 647,
+            "id": "e4ffaca6",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "eval_gpt4 = analyze_outcome_llm(outcomes_vector_gpt3, gpt4)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 657,
+            "id": "85c4e415",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response (LLM)</th>\n",
+                            "      <th>Correct Source (LLM)</th>\n",
+                            "      <th>Eval (LLM)</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>Context is relevant: True\\nAnswer is correct: ...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor after the Great D...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>Context is relevant: False\\nAnswer is correct:...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>Context is relevant: False\\nAnswer is correct:...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>Context is relevant: False\\nAnswer is correct:...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>Context is relevant: False\\nAnswer is correct:...</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response (LLM)  \\\n",
+                            "0  What battles took place in New York City in th...                    True   \n",
+                            "1  Who was elected as the mayor after the Great D...                    True   \n",
+                            "2   How many tourists visited New York City in 2019?                    True   \n",
+                            "3            What are the airports in New York City?                    True   \n",
+                            "4  When was the first documented visit into New Y...                    True   \n",
+                            "\n",
+                            "   Correct Source (LLM)                                         Eval (LLM)  \n",
+                            "0                  True  Context is relevant: True\\nAnswer is correct: ...  \n",
+                            "1                 False  Context is relevant: False\\nAnswer is correct:...  \n",
+                            "2                 False  Context is relevant: False\\nAnswer is correct:...  \n",
+                            "3                 False  Context is relevant: False\\nAnswer is correct:...  \n",
+                            "4                 False  Context is relevant: False\\nAnswer is correct:...  "
+                        ]
+                    },
+                    "execution_count": 657,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "eval_gpt4"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 651,
+            "id": "3efb66d6",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "eval_chatgpt = analyze_outcome_llm(outcomes_vector_gpt3, chatgpt)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 652,
+            "id": "4c452767",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response (LLM)</th>\n",
+                            "      <th>Correct Source (LLM)</th>\n",
+                            "      <th>Eval (LLM)</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor after the Great D...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>\\n\\nContext is relevant: False\\nAnswer is corr...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>\\n\\nContext is relevant: False\\nAnswer is corr...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response (LLM)  \\\n",
+                            "0  What battles took place in New York City in th...                    True   \n",
+                            "1  Who was elected as the mayor after the Great D...                    True   \n",
+                            "2   How many tourists visited New York City in 2019?                   False   \n",
+                            "3            What are the airports in New York City?                    True   \n",
+                            "4  When was the first documented visit into New Y...                   False   \n",
+                            "\n",
+                            "   Correct Source (LLM)                                         Eval (LLM)  \n",
+                            "0                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  \n",
+                            "1                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  \n",
+                            "2                 False  \\n\\nContext is relevant: False\\nAnswer is corr...  \n",
+                            "3                 False  \\n\\nContext is relevant: False\\nAnswer is corr...  \n",
+                            "4                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  "
+                        ]
+                    },
+                    "execution_count": 652,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "eval_chatgpt"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 649,
+            "id": "61e8dad2",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "eval_gpt3 = analyze_outcome_llm(outcomes_vector_gpt3, gpt3)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 650,
+            "id": "170400c3",
+            "metadata": {
+                "scrolled": true
+            },
+            "outputs": [
+                {
+                    "data": {
+                        "text/html": [
+                            "<div>\n",
+                            "<style scoped>\n",
+                            "    .dataframe tbody tr th:only-of-type {\n",
+                            "        vertical-align: middle;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe tbody tr th {\n",
+                            "        vertical-align: top;\n",
+                            "    }\n",
+                            "\n",
+                            "    .dataframe thead th {\n",
+                            "        text-align: right;\n",
+                            "    }\n",
+                            "</style>\n",
+                            "<table border=\"1\" class=\"dataframe\">\n",
+                            "  <thead>\n",
+                            "    <tr style=\"text-align: right;\">\n",
+                            "      <th></th>\n",
+                            "      <th>Test Query</th>\n",
+                            "      <th>Correct Response (LLM)</th>\n",
+                            "      <th>Correct Source (LLM)</th>\n",
+                            "      <th>Eval (LLM)</th>\n",
+                            "    </tr>\n",
+                            "  </thead>\n",
+                            "  <tbody>\n",
+                            "    <tr>\n",
+                            "      <th>0</th>\n",
+                            "      <td>What battles took place in New York City in th...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>1</th>\n",
+                            "      <td>Who was elected as the mayor after the Great D...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>2</th>\n",
+                            "      <td>How many tourists visited New York City in 2019?</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>False</td>\n",
+                            "      <td>\\n\\nContext is relevant: False\\nAnswer is corr...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>3</th>\n",
+                            "      <td>What are the airports in New York City?</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
+                            "    </tr>\n",
+                            "    <tr>\n",
+                            "      <th>4</th>\n",
+                            "      <td>When was the first documented visit into New Y...</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>True</td>\n",
+                            "      <td>\\n\\nContext is relevant: True\\nAnswer is corre...</td>\n",
+                            "    </tr>\n",
+                            "  </tbody>\n",
+                            "</table>\n",
+                            "</div>"
+                        ],
+                        "text/plain": [
+                            "                                          Test Query  Correct Response (LLM)  \\\n",
+                            "0  What battles took place in New York City in th...                    True   \n",
+                            "1  Who was elected as the mayor after the Great D...                    True   \n",
+                            "2   How many tourists visited New York City in 2019?                   False   \n",
+                            "3            What are the airports in New York City?                    True   \n",
+                            "4  When was the first documented visit into New Y...                    True   \n",
+                            "\n",
+                            "   Correct Source (LLM)                                         Eval (LLM)  \n",
+                            "0                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  \n",
+                            "1                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  \n",
+                            "2                 False  \\n\\nContext is relevant: False\\nAnswer is corr...  \n",
+                            "3                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  \n",
+                            "4                  True  \\n\\nContext is relevant: True\\nAnswer is corre...  "
+                        ]
+                    },
+                    "execution_count": 650,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "eval_gpt3"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.9"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/examples/test_wiki/TestNYC_Embeddings.ipynb b/examples/test_wiki/TestNYC_Embeddings.ipynb
index 1998d3b6dc475c7ef0e8e15e4126600c5be29ad9..96448b1f7fd534438c313ae8b4bcdeba2804a0b9 100644
--- a/examples/test_wiki/TestNYC_Embeddings.ipynb
+++ b/examples/test_wiki/TestNYC_Embeddings.ipynb
@@ -1,408 +1,408 @@
 {
- "cells": [
-  {
-   "cell_type": "markdown",
-   "id": "7a9f093e-e027-405b-ae3d-17dda9e30cd0",
-   "metadata": {},
-   "source": [
-    "# NYC Wikipedia Embeddings Demo"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "cadae9f2",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "3e594a62-110e-40b3-ad1e-c99f49a4e537",
-   "metadata": {},
-   "source": [
-    "Demonstrate embedding capabilities in TreeIndex and ListIndex"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "b145f093-afb0-46b8-a81f-466af8478439",
-   "metadata": {},
-   "source": [
-    "### Setup + Data Prep"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "d038dcc1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "b4b4387b-413e-4016-ba1e-88b3d9410a38",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# fetch \"New York City\" page from Wikipedia\n",
-    "from pathlib import Path\n",
-    "\n",
-    "import requests\n",
-    "\n",
-    "response = requests.get(\n",
-    "    \"https://en.wikipedia.org/w/api.php\",\n",
-    "    params={\n",
-    "        \"action\": \"query\",\n",
-    "        \"format\": \"json\",\n",
-    "        \"titles\": \"New York City\",\n",
-    "        \"prop\": \"extracts\",\n",
-    "        # 'exintro': True,\n",
-    "        \"explaintext\": True,\n",
-    "    },\n",
-    ").json()\n",
-    "page = next(iter(response[\"query\"][\"pages\"].values()))\n",
-    "nyc_text = page[\"extract\"]\n",
-    "\n",
-    "data_path = Path(\"data\")\n",
-    "if not data_path.exists():\n",
-    "    Path.mkdir(data_path)\n",
-    "\n",
-    "with open(\"data/nyc_text.txt\", \"w\") as fp:\n",
-    "    fp.write(nyc_text)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "f1a9eb90-335c-4214-8bb6-fd1edbe3ccbd",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# My OpenAI Key\n",
-    "import os\n",
-    "\n",
-    "os.environ[\"OPENAI_API_KEY\"] = \"INSERT OPENAI KEY\""
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "def4eca7-ba03-48e2-b18f-fd669b91a5fc",
-   "metadata": {},
-   "source": [
-    "### TreeIndex - Embedding-based Query"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "8d0b2364-4806-4656-81e7-3f6e4b910b5b",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n"
-     ]
-    }
-   ],
-   "source": [
-    "from llama_index import TreeIndex, SimpleDirectoryReader\n",
-    "from IPython.display import Markdown"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1298bbb4-c99e-431e-93ef-eb32c0a2fc2a",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "documents = SimpleDirectoryReader(\"data\").load_data()\n",
-    "index = TreeIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "68c9ebfe-b1b6-4f4e-9278-174346de8c90",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine(retriever_mode=\"embedding\")\n",
-    "response = query_engine.query(\n",
-    "    \"What is the name of the professional women's basketball team in New York City?\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e1000018-18de-410d-b6d9-c66bf37ccf1d",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "4fc3f18a-0ef9-453c-acf8-7aedd784cdcf",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "response = query_engine.query(\n",
-    "    \"What battles took place in New York City in the American Revolution?\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "5588289b-9fdc-4b86-bab9-808c97be05e1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "53265fd4-da98-4cf9-abfb-3f76105fd2ff",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "response = query_engine.query(\"What are the airports in New York City?\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "bc08060f-b031-4dc5-a980-427dd2407b5d",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "63009734-deda-4159-9f2b-0af19720e913",
-   "metadata": {},
-   "source": [
-    "### ListIndex - Embedding-based Query"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "fd8920ae-8115-457c-b092-21e50cc3bcc0",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import ListIndex, SimpleDirectoryReader\n",
-    "from IPython.display import Markdown"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "27c8bbee-daf5-494d-ba66-b60142592a96",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "documents = SimpleDirectoryReader(\"data\").load_data()\n",
-    "index = ListIndex.from_documents(documents)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "2cbf24c2-060e-4216-9188-a6746af1830d",
-   "metadata": {
-    "tags": []
-   },
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine(retriever_mode=\"embedding\")\n",
-    "response = query_engine.query(\n",
-    "    \"What is the name of the professional women's basketball team in New York City?\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "14e1b19f-fbf7-49fd-a96f-cbb37bafd498",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "48b86c8d-9149-4395-9d52-6070597c814d",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "response = query_engine.query(\n",
-    "    \"What battles took place in New York City in the American Revolution?\",\n",
-    "    retriever_mode=\"embedding\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "57fbd90c-a8d3-4738-8531-e8f48a953167",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "7ab01446-9b07-4222-a577-eeb4617ce4fc",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "response = query_engine.query(\n",
-    "    \"What are the airports in New York City?\", retriever_mode=\"embedding\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "091afaea-a61e-4a7c-b2f1-7df387380b8b",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "display(Markdown(f\"<b>{response}</b>\"))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "aca03087-d6cc-4d87-8ec6-185fa03d9fea",
-   "metadata": {},
-   "source": [
-    "## Try out other embeddings! \n",
-    "(courtesy of langchain)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "27c24411-7049-45c7-862c-0857c03db580",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import ListIndex, SimpleDirectoryReader, ServiceContext\n",
-    "from IPython.display import Markdown"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "b9ff1944-a06a-4b05-adae-a2ef25e74e8b",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# load in HF embedding model from langchain\n",
-    "from langchain.embeddings.huggingface import HuggingFaceEmbeddings\n",
-    "from llama_index import LangchainEmbedding\n",
-    "\n",
-    "embed_model = LangchainEmbedding(HuggingFaceEmbeddings())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "1494cabb-0123-408a-9d81-8e02db9b3acd",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# configure\n",
-    "service_context = ServiceContext.from_defaults(embed_model=embed_model)\n",
-    "\n",
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine(\n",
-    "    retriever_mode=\"embedding\",\n",
-    "    service_context=service_context,\n",
-    ")\n",
-    "response = query_engine.query(\n",
-    "    \"What is the name of the professional women's basketball team in New York City?\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "4d96a2e7-4eb1-474e-b855-eca3efed1bad",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "response"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "80510d3a-8bf8-47f2-b1d4-3d1bd0d5a1bb",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.10.9"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "cell_type": "markdown",
+            "id": "7a9f093e-e027-405b-ae3d-17dda9e30cd0",
+            "metadata": {},
+            "source": [
+                "# NYC Wikipedia Embeddings Demo"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "cadae9f2",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "3e594a62-110e-40b3-ad1e-c99f49a4e537",
+            "metadata": {},
+            "source": [
+                "Demonstrate embedding capabilities in TreeIndex and SummaryIndex"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "b145f093-afb0-46b8-a81f-466af8478439",
+            "metadata": {},
+            "source": [
+                "### Setup + Data Prep"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "d038dcc1",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "b4b4387b-413e-4016-ba1e-88b3d9410a38",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# fetch \"New York City\" page from Wikipedia\n",
+                "from pathlib import Path\n",
+                "\n",
+                "import requests\n",
+                "\n",
+                "response = requests.get(\n",
+                "    \"https://en.wikipedia.org/w/api.php\",\n",
+                "    params={\n",
+                "        \"action\": \"query\",\n",
+                "        \"format\": \"json\",\n",
+                "        \"titles\": \"New York City\",\n",
+                "        \"prop\": \"extracts\",\n",
+                "        # 'exintro': True,\n",
+                "        \"explaintext\": True,\n",
+                "    },\n",
+                ").json()\n",
+                "page = next(iter(response[\"query\"][\"pages\"].values()))\n",
+                "nyc_text = page[\"extract\"]\n",
+                "\n",
+                "data_path = Path(\"data\")\n",
+                "if not data_path.exists():\n",
+                "    Path.mkdir(data_path)\n",
+                "\n",
+                "with open(\"data/nyc_text.txt\", \"w\") as fp:\n",
+                "    fp.write(nyc_text)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "f1a9eb90-335c-4214-8bb6-fd1edbe3ccbd",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# My OpenAI Key\n",
+                "import os\n",
+                "\n",
+                "os.environ[\"OPENAI_API_KEY\"] = \"INSERT OPENAI KEY\""
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "def4eca7-ba03-48e2-b18f-fd669b91a5fc",
+            "metadata": {},
+            "source": [
+                "### TreeIndex - Embedding-based Query"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "8d0b2364-4806-4656-81e7-3f6e4b910b5b",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n"
+                    ]
+                }
+            ],
+            "source": [
+                "from llama_index import TreeIndex, SimpleDirectoryReader\n",
+                "from IPython.display import Markdown"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "1298bbb4-c99e-431e-93ef-eb32c0a2fc2a",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "documents = SimpleDirectoryReader(\"data\").load_data()\n",
+                "index = TreeIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "68c9ebfe-b1b6-4f4e-9278-174346de8c90",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine(retriever_mode=\"embedding\")\n",
+                "response = query_engine.query(\n",
+                "    \"What is the name of the professional women's basketball team in New York City?\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e1000018-18de-410d-b6d9-c66bf37ccf1d",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "4fc3f18a-0ef9-453c-acf8-7aedd784cdcf",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "response = query_engine.query(\n",
+                "    \"What battles took place in New York City in the American Revolution?\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "5588289b-9fdc-4b86-bab9-808c97be05e1",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "53265fd4-da98-4cf9-abfb-3f76105fd2ff",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "response = query_engine.query(\"What are the airports in New York City?\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "bc08060f-b031-4dc5-a980-427dd2407b5d",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "63009734-deda-4159-9f2b-0af19720e913",
+            "metadata": {},
+            "source": [
+                "### SummaryIndex - Embedding-based Query"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "fd8920ae-8115-457c-b092-21e50cc3bcc0",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import SummaryIndex, SimpleDirectoryReader\n",
+                "from IPython.display import Markdown"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "27c8bbee-daf5-494d-ba66-b60142592a96",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "documents = SimpleDirectoryReader(\"data\").load_data()\n",
+                "index = SummaryIndex.from_documents(documents)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "2cbf24c2-060e-4216-9188-a6746af1830d",
+            "metadata": {
+                "tags": []
+            },
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine(retriever_mode=\"embedding\")\n",
+                "response = query_engine.query(\n",
+                "    \"What is the name of the professional women's basketball team in New York City?\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "14e1b19f-fbf7-49fd-a96f-cbb37bafd498",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "48b86c8d-9149-4395-9d52-6070597c814d",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "response = query_engine.query(\n",
+                "    \"What battles took place in New York City in the American Revolution?\",\n",
+                "    retriever_mode=\"embedding\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "57fbd90c-a8d3-4738-8531-e8f48a953167",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "7ab01446-9b07-4222-a577-eeb4617ce4fc",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "response = query_engine.query(\n",
+                "    \"What are the airports in New York City?\", retriever_mode=\"embedding\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "091afaea-a61e-4a7c-b2f1-7df387380b8b",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "display(Markdown(f\"<b>{response}</b>\"))"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "aca03087-d6cc-4d87-8ec6-185fa03d9fea",
+            "metadata": {},
+            "source": [
+                "## Try out other embeddings! \n",
+                "(courtesy of langchain)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "27c24411-7049-45c7-862c-0857c03db580",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import SummaryIndex, SimpleDirectoryReader, ServiceContext\n",
+                "from IPython.display import Markdown"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "b9ff1944-a06a-4b05-adae-a2ef25e74e8b",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# load in HF embedding model from langchain\n",
+                "from langchain.embeddings.huggingface import HuggingFaceEmbeddings\n",
+                "from llama_index import LangchainEmbedding\n",
+                "\n",
+                "embed_model = LangchainEmbedding(HuggingFaceEmbeddings())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "1494cabb-0123-408a-9d81-8e02db9b3acd",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# configure\n",
+                "service_context = ServiceContext.from_defaults(embed_model=embed_model)\n",
+                "\n",
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine(\n",
+                "    retriever_mode=\"embedding\",\n",
+                "    service_context=service_context,\n",
+                ")\n",
+                "response = query_engine.query(\n",
+                "    \"What is the name of the professional women's basketball team in New York City?\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "4d96a2e7-4eb1-474e-b855-eca3efed1bad",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "response"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "80510d3a-8bf8-47f2-b1d4-3d1bd0d5a1bb",
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.10.9"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/examples/test_wiki/TestWikiReader.ipynb b/examples/test_wiki/TestWikiReader.ipynb
index 28e71f72bf63013c5acefae708e01094acdd5c10..ee0380278f1c68af0d9cc01c06cc05835a801a62 100644
--- a/examples/test_wiki/TestWikiReader.ipynb
+++ b/examples/test_wiki/TestWikiReader.ipynb
@@ -1,276 +1,276 @@
 {
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "52295407",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import logging\n",
-    "import sys\n",
-    "\n",
-    "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
-    "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "c5d167a5-81f8-4d2c-b42f-0a190577132f",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# My OpenAI Key\n",
-    "import os\n",
-    "\n",
-    "os.environ[\"OPENAI_API_KEY\"] = \"INSERT OPENAI KEY\""
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "575750cc-479f-4b1f-b93f-4b00ed756d52",
-   "metadata": {},
-   "source": [
-    "## Wikipedia Reader + Keyword Table"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "id": "5f60348e-731d-4a95-bae2-426e184a914e",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import KeywordTableIndex, WikipediaReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 37,
-   "id": "952c4659-7fbb-447e-8caf-06916412cc37",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "page: Covid-19\n"
-     ]
-    }
-   ],
-   "source": [
-    "wiki_docs = WikipediaReader().load_data(pages=[\"Covid-19\"])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "3be202db-a4c7-41d2-ba7d-446d1f934830",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = KeywordTableIndex.from_documents(wiki_docs)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 46,
-   "id": "28d7163e-f26f-4ad8-89d5-9cb7662c4d9c",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "> Starting query: Which country included tocilizumab in treatment for covid-19?\n",
-      "Extracted keywords: ['tocilizumab', 'treatment', 'covid-19', 'covid', '19']\n",
-      "> Querying with idx: 1105763466456338724: of age or older weighing at least 40 kilograms ...\n",
-      "> Querying with idx: 2820318727532393752: Coronavirus disease 2019 (COVID-19) is a contag...\n",
-      "> Querying with idx: 897499143815831368: if the mask includes an exhalation valve, a wea...\n",
-      "> Querying with idx: 8628144746434065339: pulmonary fibrosis, cystic fibrosis. Evidence s...\n"
-     ]
+    "cells": [
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "52295407",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "import logging\n",
+                "import sys\n",
+                "\n",
+                "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n",
+                "logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "c5d167a5-81f8-4d2c-b42f-0a190577132f",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# My OpenAI Key\n",
+                "import os\n",
+                "\n",
+                "os.environ[\"OPENAI_API_KEY\"] = \"INSERT OPENAI KEY\""
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "575750cc-479f-4b1f-b93f-4b00ed756d52",
+            "metadata": {},
+            "source": [
+                "## Wikipedia Reader + Keyword Table"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 36,
+            "id": "5f60348e-731d-4a95-bae2-426e184a914e",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import KeywordTableIndex, WikipediaReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 37,
+            "id": "952c4659-7fbb-447e-8caf-06916412cc37",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "page: Covid-19\n"
+                    ]
+                }
+            ],
+            "source": [
+                "wiki_docs = WikipediaReader().load_data(pages=[\"Covid-19\"])"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "3be202db-a4c7-41d2-ba7d-446d1f934830",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = KeywordTableIndex.from_documents(wiki_docs)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 46,
+            "id": "28d7163e-f26f-4ad8-89d5-9cb7662c4d9c",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "> Starting query: Which country included tocilizumab in treatment for covid-19?\n",
+                        "Extracted keywords: ['tocilizumab', 'treatment', 'covid-19', 'covid', '19']\n",
+                        "> Querying with idx: 1105763466456338724: of age or older weighing at least 40 kilograms ...\n",
+                        "> Querying with idx: 2820318727532393752: Coronavirus disease 2019 (COVID-19) is a contag...\n",
+                        "> Querying with idx: 897499143815831368: if the mask includes an exhalation valve, a wea...\n",
+                        "> Querying with idx: 8628144746434065339: pulmonary fibrosis, cystic fibrosis. Evidence s...\n"
+                    ]
+                },
+                {
+                    "data": {
+                        "text/plain": [
+                            "'\\n\\nChina'"
+                        ]
+                    },
+                    "execution_count": 46,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "# GPT doesn't find the corresponding evidence in the leaf node, but still gives the correct answer\n",
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "query_engine = index.as_query_engine()\n",
+                "query_engine.query(\"Which country included tocilizumab in treatment for covid-19?\")"
+            ]
+        },
+        {
+            "cell_type": "markdown",
+            "id": "addb0c4d-f1ae-40c1-8b69-5a989609672f",
+            "metadata": {},
+            "source": [
+                "## Wikipedia Reader + List"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "a0fc24e1-eca5-4267-a962-f7fe0fc5c7df",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import SummaryIndex, WikipediaReader"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "872a651a-ca4a-43e2-8b29-e4f667f9d3c5",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "wiki_docs = WikipediaReader().load_data(pages=[\"Covid-19\"])"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "37e85af0-b1c3-4c18-b239-6e32a7acf8d6",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "> Adding chunk: Coronavirus disease 2019 (COVID-19) is a contag...\n",
+                        "> Adding chunk: people with COVID‑19 and acute respiratory dist...\n",
+                        "> Adding chunk: encourage or mandate the use of face masks or c...\n",
+                        "> Adding chunk: have elevated liver enzymes, reflecting liver i...\n",
+                        "> Adding chunk: insofar as their drug use may have caused lung ...\n",
+                        "> Adding chunk: treatment of mild-to-moderate COVID‑19 in adult...\n"
+                    ]
+                }
+            ],
+            "source": [
+                "index = SummaryIndex.from_documents(wiki_docs)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 13,
+            "id": "ec0119ef-786e-40ea-89af-f1ca0ad26de6",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "> Starting query: Which country included tocilizumab in treatment for covid-19?\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "# with keyword lookup\n",
+                "from llama_index.indices.postprocessor import KeywordNodePostprocessor\n",
+                "\n",
+                "\n",
+                "query_engine = index.as_query_engine(\n",
+                "    node_postprocessors=[KeywordNodePostprocessor(required_keywords=[\"tocilizumab\"])]\n",
+                ")\n",
+                "response = query_engine.query(\n",
+                "    \"Which country included tocilizumab in treatment for covid-19?\",\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 18,
+            "id": "b4087a84-0939-444f-93f2-a1a7aa32db3f",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "'China'"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "display(response.strip())"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 19,
+            "id": "fb155bc7-cb50-47b6-b92b-895852c2d8f4",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "> Starting query: Which country included tocilizumab in treatment for covid-19?\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# set Logging to DEBUG for more detailed outputs\n",
+                "# without keyword lookup\n",
+                "query_engine = index.as_query_engine()\n",
+                "response = query_engine.query(\n",
+                "    \"Which country included tocilizumab in treatment for covid-19?\"\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 20,
+            "id": "5b45c07a-4e76-4a45-86b6-6b2df1ef4f7b",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "'There is no definite answer to this question as different countries have different treatment methods for covid-19. However, according to the context information, it is known that the virus SARS-CoV-2 can cause severe damage to various organs in the human body by inducing systemic inflammation. Therefore, it is possible that tocilizumab, which is a drug that inhibits the virus, may be included in treatment for covid-19 in some countries in order to prevent or reduce the severity of a cytokine storm. Additionally, passive antibodies may be used to treat people with active COVID-19 in order to help them recover.'"
+                        ]
+                    },
+                    "metadata": {},
+                    "output_type": "display_data"
+                }
+            ],
+            "source": [
+                "display(response.strip())"
+            ]
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "Python 3 (ipykernel)",
+            "language": "python",
+            "name": "python3"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.11.1"
+        }
     },
-    {
-     "data": {
-      "text/plain": [
-       "'\\n\\nChina'"
-      ]
-     },
-     "execution_count": 46,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "# GPT doesn't find the corresponding evidence in the leaf node, but still gives the correct answer\n",
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "query_engine = index.as_query_engine()\n",
-    "query_engine.query(\"Which country included tocilizumab in treatment for covid-19?\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "id": "addb0c4d-f1ae-40c1-8b69-5a989609672f",
-   "metadata": {},
-   "source": [
-    "## Wikipedia Reader + List"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "a0fc24e1-eca5-4267-a962-f7fe0fc5c7df",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import ListIndex, WikipediaReader"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "872a651a-ca4a-43e2-8b29-e4f667f9d3c5",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "wiki_docs = WikipediaReader().load_data(pages=[\"Covid-19\"])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "37e85af0-b1c3-4c18-b239-6e32a7acf8d6",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "> Adding chunk: Coronavirus disease 2019 (COVID-19) is a contag...\n",
-      "> Adding chunk: people with COVID‑19 and acute respiratory dist...\n",
-      "> Adding chunk: encourage or mandate the use of face masks or c...\n",
-      "> Adding chunk: have elevated liver enzymes, reflecting liver i...\n",
-      "> Adding chunk: insofar as their drug use may have caused lung ...\n",
-      "> Adding chunk: treatment of mild-to-moderate COVID‑19 in adult...\n"
-     ]
-    }
-   ],
-   "source": [
-    "index = ListIndex.from_documents(wiki_docs)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "id": "ec0119ef-786e-40ea-89af-f1ca0ad26de6",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "> Starting query: Which country included tocilizumab in treatment for covid-19?\n"
-     ]
-    }
-   ],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "# with keyword lookup\n",
-    "from llama_index.indices.postprocessor import KeywordNodePostprocessor\n",
-    "\n",
-    "\n",
-    "query_engine = index.as_query_engine(\n",
-    "    node_postprocessors=[KeywordNodePostprocessor(required_keywords=[\"tocilizumab\"])]\n",
-    ")\n",
-    "response = query_engine.query(\n",
-    "    \"Which country included tocilizumab in treatment for covid-19?\",\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "id": "b4087a84-0939-444f-93f2-a1a7aa32db3f",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "'China'"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display(response.strip())"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "id": "fb155bc7-cb50-47b6-b92b-895852c2d8f4",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "> Starting query: Which country included tocilizumab in treatment for covid-19?\n"
-     ]
-    }
-   ],
-   "source": [
-    "# set Logging to DEBUG for more detailed outputs\n",
-    "# without keyword lookup\n",
-    "query_engine = index.as_query_engine()\n",
-    "response = query_engine.query(\n",
-    "    \"Which country included tocilizumab in treatment for covid-19?\"\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "id": "5b45c07a-4e76-4a45-86b6-6b2df1ef4f7b",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "'There is no definite answer to this question as different countries have different treatment methods for covid-19. However, according to the context information, it is known that the virus SARS-CoV-2 can cause severe damage to various organs in the human body by inducing systemic inflammation. Therefore, it is possible that tocilizumab, which is a drug that inhibits the virus, may be included in treatment for covid-19 in some countries in order to prevent or reduce the severity of a cytokine storm. Additionally, passive antibodies may be used to treat people with active COVID-19 in order to help them recover.'"
-      ]
-     },
-     "metadata": {},
-     "output_type": "display_data"
-    }
-   ],
-   "source": [
-    "display(response.strip())"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 3 (ipykernel)",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.11.1"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
+    "nbformat": 4,
+    "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/experimental/classifier/TitanicModel.ipynb b/experimental/classifier/TitanicModel.ipynb
index d78774c03e7ce2440069bb00ef3702387840709f..89c106c3709a9bfce4f3792578a10bb8fb40f8e4 100644
--- a/experimental/classifier/TitanicModel.ipynb
+++ b/experimental/classifier/TitanicModel.ipynb
@@ -1,558 +1,558 @@
 {
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "f445c1d1-acb9-431e-a7ff-50c41f064359",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stderr",
-     "output_type": "stream",
-     "text": [
-      "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n",
-      "[nltk_data] Downloading package stopwords to\n",
-      "[nltk_data]     /Users/jerryliu/nltk_data...\n",
-      "[nltk_data]   Package stopwords is already up-to-date!\n"
-     ]
-    }
-   ],
-   "source": [
-    "from utils import get_train_str, get_train_and_eval_data, get_eval_preds, train_prompt\n",
-    "\n",
-    "import warnings\n",
-    "\n",
-    "warnings.filterwarnings(\"ignore\")\n",
-    "warnings.simplefilter(\"ignore\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "cf3cbd90-d5e1-4c30-a3bc-8b39fbd85d70",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# load up the titanic data\n",
-    "train_df, train_labels, eval_df, eval_labels = get_train_and_eval_data(\"data/train.csv\")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "fa2634f9-cb33-4f1e-81f9-3a3b285e2580",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "## Few-shot Prompting with GPT-3 for Titanic Dataset\n",
-    "In this section, we can show how we can prompt GPT-3 on its own (without using GPT Index) to attain ~80% accuracy on Titanic! \n",
-    "\n",
-    "We can do this by simply providing a few example inputs. Or we can simply provide no example inputs at all (zero-shot). Both achieve the same results."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "d0698fd2-1361-49ae-8c17-8124e9b932a4",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "The following structured data is provided in \"Feature Name\":\"Feature Value\" format.\n",
-      "Each datapoint describes a passenger on the Titanic.\n",
-      "The task is to decide whether the passenger survived.\n",
-      "Some example datapoints are given below: \n",
-      "-------------------\n",
-      "{train_str}\n",
-      "-------------------\n",
-      "Given this, predict whether the following passenger survived. Return answer as a number between 0 or 1. \n",
-      "{eval_str}\n",
-      "Survived: \n"
-     ]
-    }
-   ],
-   "source": [
-    "# first demonstrate the prompt template\n",
-    "print(train_prompt.template)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "4b39e2e7-be07-42f8-a27a-3419e84cfb2c",
-   "metadata": {
-    "scrolled": true,
-    "tags": []
-   },
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Example datapoints in `train_str`: \n",
-      "This is the Data:\n",
-      "Age:28.0\n",
-      "Embarked:S\n",
-      "Fare:7.8958\n",
-      "Parch:0\n",
-      "Pclass:3\n",
-      "Sex:male\n",
-      "SibSp:0\n",
-      "This is the correct answer:\n",
-      "Survived: 0\n",
-      "\n",
-      "This is the Data:\n",
-      "Age:17.0\n",
-      "Embarked:S\n",
-      "Fare:7.925\n",
-      "Parch:2\n",
-      "Pclass:3\n",
-      "Sex:female\n",
-      "SibSp:4\n",
-      "This is the correct answer:\n",
-      "Survived: 1\n",
-      "\n",
-      "This is the Data:\n",
-      "Age:30.0\n",
-      "Embarked:S\n",
-      "Fare:16.1\n",
-      "Parch:0\n",
-      "Pclass:3\n",
-      "Sex:male\n",
-      "SibSp:1\n",
-      "This is the correct answer:\n",
-      "Survived: 0\n",
-      "\n",
-      "This is the Data:\n",
-      "Age:22.0\n",
-      "Embarked:S\n",
-      "Fare:7.25\n",
-      "Parch:0\n",
-      "Pclass:3\n",
-      "Sex:male\n",
-      "SibSp:0\n",
-      "This is the correct answer:\n",
-      "Survived: 0\n",
-      "\n",
-      "This is the Data:\n",
-      "Age:45.0\n",
-      "Embarked:S\n",
-      "Fare:13.5\n",
-      "Parch:0\n",
-      "Pclass:2\n",
-      "Sex:female\n",
-      "SibSp:0\n",
-      "This is the correct answer:\n",
-      "Survived: 1\n",
-      "\n",
-      "This is the Data:\n",
-      "Age:25.0\n",
-      "Embarked:S\n",
-      "Fare:0.0\n",
-      "Parch:0\n",
-      "Pclass:3\n",
-      "Sex:male\n",
-      "SibSp:0\n",
-      "This is the correct answer:\n",
-      "Survived: 1\n",
-      "\n",
-      "This is the Data:\n",
-      "Age:18.0\n",
-      "Embarked:S\n",
-      "Fare:20.2125\n",
-      "Parch:1\n",
-      "Pclass:3\n",
-      "Sex:male\n",
-      "SibSp:1\n",
-      "This is the correct answer:\n",
-      "Survived: 0\n",
-      "\n",
-      "This is the Data:\n",
-      "Age:33.0\n",
-      "Embarked:S\n",
-      "Fare:9.5\n",
-      "Parch:0\n",
-      "Pclass:3\n",
-      "Sex:male\n",
-      "SibSp:0\n",
-      "This is the correct answer:\n",
-      "Survived: 0\n",
-      "\n",
-      "This is the Data:\n",
-      "Age:24.0\n",
-      "Embarked:S\n",
-      "Fare:65.0\n",
-      "Parch:2\n",
-      "Pclass:2\n",
-      "Sex:female\n",
-      "SibSp:1\n",
-      "This is the correct answer:\n",
-      "Survived: 1\n",
-      "\n",
-      "This is the Data:\n",
-      "Age:26.0\n",
-      "Embarked:S\n",
-      "Fare:7.925\n",
-      "Parch:0\n",
-      "Pclass:3\n",
-      "Sex:female\n",
-      "SibSp:0\n",
-      "This is the correct answer:\n",
-      "Survived: 1\n"
-     ]
-    }
-   ],
-   "source": [
-    "# Get \"training\" prompt string\n",
-    "train_n = 10\n",
-    "eval_n = 40\n",
-    "train_str = get_train_str(train_df, train_labels, train_n=train_n)\n",
-    "print(f\"Example datapoints in `train_str`: \\n{train_str}\")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "819a06f7-3171-4edb-b90c-0a3eae308a04",
-   "metadata": {},
-   "source": [
-    "#### Do evaluation with the training prompt string"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "4a7f2202-518c-41a3-80ab-1e98bbcca903",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from sklearn.metrics import accuracy_score\n",
-    "import numpy as np\n",
-    "\n",
-    "eval_preds = get_eval_preds(train_prompt, train_str, eval_df, n=eval_n)\n",
-    "eval_label_chunk = eval_labels[:eval_n]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "64323a4d-6eea-4e40-9eac-b2deed60192b",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "ACCURACY: 0.8\n"
-     ]
-    }
-   ],
-   "source": [
-    "acc = accuracy_score(eval_label_chunk, np.array(eval_preds).round())\n",
-    "print(f\"ACCURACY: {acc}\")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "11790d28-8f34-42dd-b11f-6aad21fd5f46",
-   "metadata": {},
-   "source": [
-    "#### Do evaluation with no training prompt string! "
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "aaf993e5-c363-4f18-a28f-09761e49cb6d",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from sklearn.metrics import accuracy_score\n",
-    "import numpy as np\n",
-    "\n",
-    "eval_preds_null = get_eval_preds(train_prompt, \"\", eval_df, n=eval_n)\n",
-    "eval_label_chunk = eval_labels[:eval_n]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 11,
-   "id": "c3b8bcd5-5972-4ce5-9aa1-57460cdde199",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "ACCURACY: 0.8\n"
-     ]
-    }
-   ],
-   "source": [
-    "acc_null = accuracy_score(eval_label_chunk, np.array(eval_preds_null).round())\n",
-    "print(f\"ACCURACY: {acc_null}\")"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "8f0a5e4b-e627-4b47-a807-939813596594",
-   "metadata": {
-    "tags": []
-   },
-   "source": [
-    "## Extending with List Index"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "42a1ca28-96e9-4cd2-bd48-0673917ad057",
-   "metadata": {},
-   "source": [
-    "#### Build Index"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 12,
-   "id": "6c59b030-855d-4e27-89c3-74c972d1bf19",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from llama_index import ListIndex\n",
-    "from llama_index.schema import Document"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "id": "8f9556de-e323-4318-bb71-cff75bf8c3c1",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "index = ListIndex([])"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e27720fc-af36-40fd-8c55-41485248aa9f",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# insertion into index\n",
-    "batch_size = 40\n",
-    "num_train_chunks = 5\n",
-    "\n",
-    "for i in range(num_train_chunks):\n",
-    "    print(f\"Inserting chunk: {i}/{num_train_chunks}\")\n",
-    "    start_idx = i * batch_size\n",
-    "    end_idx = (i + 1) * batch_size\n",
-    "    train_batch = train_df.iloc[start_idx : end_idx + batch_size]\n",
-    "    labels_batch = train_labels.iloc[start_idx : end_idx + batch_size]\n",
-    "    all_train_str = get_train_str(train_batch, labels_batch, train_n=batch_size)\n",
-    "    index.insert(Document(text=all_train_str))"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "e78db088-6649-44db-b52a-766316713b96",
-   "metadata": {},
-   "source": [
-    "#### Query Index"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 15,
-   "id": "9cb90564-1de2-412f-8318-d5280855004e",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from utils import query_str, qa_data_prompt, refine_prompt"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 16,
-   "id": "77c1ae36-e0af-47bc-a656-4971af699755",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "'Which is the relationship between these features and predicting survival?'"
-      ]
-     },
-     "execution_count": 16,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "query_str"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 17,
-   "id": "c403710f-d4b3-4287-94f5-e275ea19b476",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "> Starting query: Which is the relationship between these features and predicting survival?\n"
-     ]
-    }
-   ],
-   "source": [
-    "query_engine = index.as_query_engine(\n",
-    "    text_qa_template=qa_data_prompt,\n",
-    "    refine_template=refine_prompt,\n",
-    ")\n",
-    "response = query_engine.query(\n",
-    "    query_str,\n",
-    ")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "id": "d2545ab1-980a-4fbd-8add-7ef957801644",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "\n",
-      "\n",
-      "There is no definitive answer to this question, as the relationship between the features and predicting survival will vary depending on the data. However, some possible relationships include: age (younger passengers are more likely to survive), sex (females are more likely to survive), fare (passengers who paid more for their ticket are more likely to survive), and pclass (passengers in first or second class are more likely to survive).\n"
-     ]
-    }
-   ],
-   "source": [
-    "print(response)"
-   ]
-  },
-  {
-   "attachments": {},
-   "cell_type": "markdown",
-   "id": "d0d7d260-2283-49f6-ac40-35c7071cc54d",
-   "metadata": {},
-   "source": [
-    "#### Get Predictions and Evaluate"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 26,
-   "id": "e7b98057-957c-48ef-be85-59ff9813d201",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "The following structured data is provided in \"Feature Name\":\"Feature Value\" format.\n",
-      "Each datapoint describes a passenger on the Titanic.\n",
-      "The task is to decide whether the passenger survived.\n",
-      "We discovered the following relationship between features and survival:\n",
-      "-------------------\n",
-      "{train_str}\n",
-      "-------------------\n",
-      "Given this, predict whether the following passenger survived. \n",
-      "Return answer as a number between 0 or 1. \n",
-      "{eval_str}\n",
-      "Survived: \n",
-      "\n",
-      "\n",
-      "`train_str`: \n",
-      "\n",
-      "There is no definitive answer to this question, as the relationship between the features and predicting survival will vary depending on the data. However, some possible relationships include: age (younger passengers are more likely to survive), sex (females are more likely to survive), fare (passengers who paid more for their ticket are more likely to survive), and pclass (passengers in first or second class are more likely to survive).\n"
-     ]
-    }
-   ],
-   "source": [
-    "# get eval preds\n",
-    "from utils import train_prompt_with_context\n",
-    "\n",
-    "train_str = response\n",
-    "print(train_prompt_with_context.template)\n",
-    "print(f\"\\n\\n`train_str`: {train_str}\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "659c6a3f-1c5d-4314-87dc-908e76d50e4a",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# do evaluation\n",
-    "from sklearn.metrics import accuracy_score\n",
-    "import numpy as np\n",
-    "\n",
-    "eval_n = 40\n",
-    "eval_preds = get_eval_preds(train_prompt_with_context, train_str, eval_df, n=eval_n)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 28,
-   "id": "7424e7d3-2576-42bc-b626-cf8088265004",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "ACCURACY: 0.85\n"
-     ]
-    }
-   ],
-   "source": [
-    "eval_label_chunk = eval_labels[:eval_n]\n",
-    "acc = accuracy_score(eval_label_chunk, np.array(eval_preds).round())\n",
-    "print(f\"ACCURACY: {acc}\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "e010b497-eeed-4142-a8ac-f5545e85fcc2",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "gpt_retrieve_venv",
-   "language": "python",
-   "name": "gpt_retrieve_venv"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.4"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
+    "cells": [
+        {
+            "cell_type": "code",
+            "execution_count": 1,
+            "id": "f445c1d1-acb9-431e-a7ff-50c41f064359",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stderr",
+                    "output_type": "stream",
+                    "text": [
+                        "None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.\n",
+                        "[nltk_data] Downloading package stopwords to\n",
+                        "[nltk_data]     /Users/jerryliu/nltk_data...\n",
+                        "[nltk_data]   Package stopwords is already up-to-date!\n"
+                    ]
+                }
+            ],
+            "source": [
+                "from utils import get_train_str, get_train_and_eval_data, get_eval_preds, train_prompt\n",
+                "\n",
+                "import warnings\n",
+                "\n",
+                "warnings.filterwarnings(\"ignore\")\n",
+                "warnings.simplefilter(\"ignore\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 2,
+            "id": "cf3cbd90-d5e1-4c30-a3bc-8b39fbd85d70",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# load up the titanic data\n",
+                "train_df, train_labels, eval_df, eval_labels = get_train_and_eval_data(\"data/train.csv\")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "fa2634f9-cb33-4f1e-81f9-3a3b285e2580",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "## Few-shot Prompting with GPT-3 for Titanic Dataset\n",
+                "In this section, we can show how we can prompt GPT-3 on its own (without using GPT Index) to attain ~80% accuracy on Titanic! \n",
+                "\n",
+                "We can do this by simply providing a few example inputs. Or we can simply provide no example inputs at all (zero-shot). Both achieve the same results."
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 3,
+            "id": "d0698fd2-1361-49ae-8c17-8124e9b932a4",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "The following structured data is provided in \"Feature Name\":\"Feature Value\" format.\n",
+                        "Each datapoint describes a passenger on the Titanic.\n",
+                        "The task is to decide whether the passenger survived.\n",
+                        "Some example datapoints are given below: \n",
+                        "-------------------\n",
+                        "{train_str}\n",
+                        "-------------------\n",
+                        "Given this, predict whether the following passenger survived. Return answer as a number between 0 or 1. \n",
+                        "{eval_str}\n",
+                        "Survived: \n"
+                    ]
+                }
+            ],
+            "source": [
+                "# first demonstrate the prompt template\n",
+                "print(train_prompt.template)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 4,
+            "id": "4b39e2e7-be07-42f8-a27a-3419e84cfb2c",
+            "metadata": {
+                "scrolled": true,
+                "tags": []
+            },
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "Example datapoints in `train_str`: \n",
+                        "This is the Data:\n",
+                        "Age:28.0\n",
+                        "Embarked:S\n",
+                        "Fare:7.8958\n",
+                        "Parch:0\n",
+                        "Pclass:3\n",
+                        "Sex:male\n",
+                        "SibSp:0\n",
+                        "This is the correct answer:\n",
+                        "Survived: 0\n",
+                        "\n",
+                        "This is the Data:\n",
+                        "Age:17.0\n",
+                        "Embarked:S\n",
+                        "Fare:7.925\n",
+                        "Parch:2\n",
+                        "Pclass:3\n",
+                        "Sex:female\n",
+                        "SibSp:4\n",
+                        "This is the correct answer:\n",
+                        "Survived: 1\n",
+                        "\n",
+                        "This is the Data:\n",
+                        "Age:30.0\n",
+                        "Embarked:S\n",
+                        "Fare:16.1\n",
+                        "Parch:0\n",
+                        "Pclass:3\n",
+                        "Sex:male\n",
+                        "SibSp:1\n",
+                        "This is the correct answer:\n",
+                        "Survived: 0\n",
+                        "\n",
+                        "This is the Data:\n",
+                        "Age:22.0\n",
+                        "Embarked:S\n",
+                        "Fare:7.25\n",
+                        "Parch:0\n",
+                        "Pclass:3\n",
+                        "Sex:male\n",
+                        "SibSp:0\n",
+                        "This is the correct answer:\n",
+                        "Survived: 0\n",
+                        "\n",
+                        "This is the Data:\n",
+                        "Age:45.0\n",
+                        "Embarked:S\n",
+                        "Fare:13.5\n",
+                        "Parch:0\n",
+                        "Pclass:2\n",
+                        "Sex:female\n",
+                        "SibSp:0\n",
+                        "This is the correct answer:\n",
+                        "Survived: 1\n",
+                        "\n",
+                        "This is the Data:\n",
+                        "Age:25.0\n",
+                        "Embarked:S\n",
+                        "Fare:0.0\n",
+                        "Parch:0\n",
+                        "Pclass:3\n",
+                        "Sex:male\n",
+                        "SibSp:0\n",
+                        "This is the correct answer:\n",
+                        "Survived: 1\n",
+                        "\n",
+                        "This is the Data:\n",
+                        "Age:18.0\n",
+                        "Embarked:S\n",
+                        "Fare:20.2125\n",
+                        "Parch:1\n",
+                        "Pclass:3\n",
+                        "Sex:male\n",
+                        "SibSp:1\n",
+                        "This is the correct answer:\n",
+                        "Survived: 0\n",
+                        "\n",
+                        "This is the Data:\n",
+                        "Age:33.0\n",
+                        "Embarked:S\n",
+                        "Fare:9.5\n",
+                        "Parch:0\n",
+                        "Pclass:3\n",
+                        "Sex:male\n",
+                        "SibSp:0\n",
+                        "This is the correct answer:\n",
+                        "Survived: 0\n",
+                        "\n",
+                        "This is the Data:\n",
+                        "Age:24.0\n",
+                        "Embarked:S\n",
+                        "Fare:65.0\n",
+                        "Parch:2\n",
+                        "Pclass:2\n",
+                        "Sex:female\n",
+                        "SibSp:1\n",
+                        "This is the correct answer:\n",
+                        "Survived: 1\n",
+                        "\n",
+                        "This is the Data:\n",
+                        "Age:26.0\n",
+                        "Embarked:S\n",
+                        "Fare:7.925\n",
+                        "Parch:0\n",
+                        "Pclass:3\n",
+                        "Sex:female\n",
+                        "SibSp:0\n",
+                        "This is the correct answer:\n",
+                        "Survived: 1\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# Get \"training\" prompt string\n",
+                "train_n = 10\n",
+                "eval_n = 40\n",
+                "train_str = get_train_str(train_df, train_labels, train_n=train_n)\n",
+                "print(f\"Example datapoints in `train_str`: \\n{train_str}\")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "819a06f7-3171-4edb-b90c-0a3eae308a04",
+            "metadata": {},
+            "source": [
+                "#### Do evaluation with the training prompt string"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "4a7f2202-518c-41a3-80ab-1e98bbcca903",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from sklearn.metrics import accuracy_score\n",
+                "import numpy as np\n",
+                "\n",
+                "eval_preds = get_eval_preds(train_prompt, train_str, eval_df, n=eval_n)\n",
+                "eval_label_chunk = eval_labels[:eval_n]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 7,
+            "id": "64323a4d-6eea-4e40-9eac-b2deed60192b",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "ACCURACY: 0.8\n"
+                    ]
+                }
+            ],
+            "source": [
+                "acc = accuracy_score(eval_label_chunk, np.array(eval_preds).round())\n",
+                "print(f\"ACCURACY: {acc}\")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "11790d28-8f34-42dd-b11f-6aad21fd5f46",
+            "metadata": {},
+            "source": [
+                "#### Do evaluation with no training prompt string! "
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "aaf993e5-c363-4f18-a28f-09761e49cb6d",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from sklearn.metrics import accuracy_score\n",
+                "import numpy as np\n",
+                "\n",
+                "eval_preds_null = get_eval_preds(train_prompt, \"\", eval_df, n=eval_n)\n",
+                "eval_label_chunk = eval_labels[:eval_n]"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 11,
+            "id": "c3b8bcd5-5972-4ce5-9aa1-57460cdde199",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "ACCURACY: 0.8\n"
+                    ]
+                }
+            ],
+            "source": [
+                "acc_null = accuracy_score(eval_label_chunk, np.array(eval_preds_null).round())\n",
+                "print(f\"ACCURACY: {acc_null}\")"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "8f0a5e4b-e627-4b47-a807-939813596594",
+            "metadata": {
+                "tags": []
+            },
+            "source": [
+                "## Extending with List Index"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "42a1ca28-96e9-4cd2-bd48-0673917ad057",
+            "metadata": {},
+            "source": [
+                "#### Build Index"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 12,
+            "id": "6c59b030-855d-4e27-89c3-74c972d1bf19",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from llama_index import SummaryIndex\n",
+                "from llama_index.schema import Document"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 13,
+            "id": "8f9556de-e323-4318-bb71-cff75bf8c3c1",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "index = SummaryIndex([])"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e27720fc-af36-40fd-8c55-41485248aa9f",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# insertion into index\n",
+                "batch_size = 40\n",
+                "num_train_chunks = 5\n",
+                "\n",
+                "for i in range(num_train_chunks):\n",
+                "    print(f\"Inserting chunk: {i}/{num_train_chunks}\")\n",
+                "    start_idx = i * batch_size\n",
+                "    end_idx = (i + 1) * batch_size\n",
+                "    train_batch = train_df.iloc[start_idx : end_idx + batch_size]\n",
+                "    labels_batch = train_labels.iloc[start_idx : end_idx + batch_size]\n",
+                "    all_train_str = get_train_str(train_batch, labels_batch, train_n=batch_size)\n",
+                "    index.insert(Document(text=all_train_str))"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "e78db088-6649-44db-b52a-766316713b96",
+            "metadata": {},
+            "source": [
+                "#### Query Index"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 15,
+            "id": "9cb90564-1de2-412f-8318-d5280855004e",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "from utils import query_str, qa_data_prompt, refine_prompt"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 16,
+            "id": "77c1ae36-e0af-47bc-a656-4971af699755",
+            "metadata": {},
+            "outputs": [
+                {
+                    "data": {
+                        "text/plain": [
+                            "'Which is the relationship between these features and predicting survival?'"
+                        ]
+                    },
+                    "execution_count": 16,
+                    "metadata": {},
+                    "output_type": "execute_result"
+                }
+            ],
+            "source": [
+                "query_str"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 17,
+            "id": "c403710f-d4b3-4287-94f5-e275ea19b476",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "> Starting query: Which is the relationship between these features and predicting survival?\n"
+                    ]
+                }
+            ],
+            "source": [
+                "query_engine = index.as_query_engine(\n",
+                "    text_qa_template=qa_data_prompt,\n",
+                "    refine_template=refine_prompt,\n",
+                ")\n",
+                "response = query_engine.query(\n",
+                "    query_str,\n",
+                ")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 19,
+            "id": "d2545ab1-980a-4fbd-8add-7ef957801644",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "\n",
+                        "\n",
+                        "There is no definitive answer to this question, as the relationship between the features and predicting survival will vary depending on the data. However, some possible relationships include: age (younger passengers are more likely to survive), sex (females are more likely to survive), fare (passengers who paid more for their ticket are more likely to survive), and pclass (passengers in first or second class are more likely to survive).\n"
+                    ]
+                }
+            ],
+            "source": [
+                "print(response)"
+            ]
+        },
+        {
+            "attachments": {},
+            "cell_type": "markdown",
+            "id": "d0d7d260-2283-49f6-ac40-35c7071cc54d",
+            "metadata": {},
+            "source": [
+                "#### Get Predictions and Evaluate"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 26,
+            "id": "e7b98057-957c-48ef-be85-59ff9813d201",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "The following structured data is provided in \"Feature Name\":\"Feature Value\" format.\n",
+                        "Each datapoint describes a passenger on the Titanic.\n",
+                        "The task is to decide whether the passenger survived.\n",
+                        "We discovered the following relationship between features and survival:\n",
+                        "-------------------\n",
+                        "{train_str}\n",
+                        "-------------------\n",
+                        "Given this, predict whether the following passenger survived. \n",
+                        "Return answer as a number between 0 or 1. \n",
+                        "{eval_str}\n",
+                        "Survived: \n",
+                        "\n",
+                        "\n",
+                        "`train_str`: \n",
+                        "\n",
+                        "There is no definitive answer to this question, as the relationship between the features and predicting survival will vary depending on the data. However, some possible relationships include: age (younger passengers are more likely to survive), sex (females are more likely to survive), fare (passengers who paid more for their ticket are more likely to survive), and pclass (passengers in first or second class are more likely to survive).\n"
+                    ]
+                }
+            ],
+            "source": [
+                "# get eval preds\n",
+                "from utils import train_prompt_with_context\n",
+                "\n",
+                "train_str = response\n",
+                "print(train_prompt_with_context.template)\n",
+                "print(f\"\\n\\n`train_str`: {train_str}\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "659c6a3f-1c5d-4314-87dc-908e76d50e4a",
+            "metadata": {},
+            "outputs": [],
+            "source": [
+                "# do evaluation\n",
+                "from sklearn.metrics import accuracy_score\n",
+                "import numpy as np\n",
+                "\n",
+                "eval_n = 40\n",
+                "eval_preds = get_eval_preds(train_prompt_with_context, train_str, eval_df, n=eval_n)"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": 28,
+            "id": "7424e7d3-2576-42bc-b626-cf8088265004",
+            "metadata": {},
+            "outputs": [
+                {
+                    "name": "stdout",
+                    "output_type": "stream",
+                    "text": [
+                        "ACCURACY: 0.85\n"
+                    ]
+                }
+            ],
+            "source": [
+                "eval_label_chunk = eval_labels[:eval_n]\n",
+                "acc = accuracy_score(eval_label_chunk, np.array(eval_preds).round())\n",
+                "print(f\"ACCURACY: {acc}\")"
+            ]
+        },
+        {
+            "cell_type": "code",
+            "execution_count": null,
+            "id": "e010b497-eeed-4142-a8ac-f5545e85fcc2",
+            "metadata": {},
+            "outputs": [],
+            "source": []
+        }
+    ],
+    "metadata": {
+        "kernelspec": {
+            "display_name": "gpt_retrieve_venv",
+            "language": "python",
+            "name": "gpt_retrieve_venv"
+        },
+        "language_info": {
+            "codemirror_mode": {
+                "name": "ipython",
+                "version": 3
+            },
+            "file_extension": ".py",
+            "mimetype": "text/x-python",
+            "name": "python",
+            "nbconvert_exporter": "python",
+            "pygments_lexer": "ipython3",
+            "version": "3.8.4"
+        }
+    },
+    "nbformat": 4,
+    "nbformat_minor": 5
 }
diff --git a/llama_index/__init__.py b/llama_index/__init__.py
index 19ff6e0d8933dbfc7681ceebf029adc099de11df..b03d200d7210f24cada587b862801d5ba936d058 100644
--- a/llama_index/__init__.py
+++ b/llama_index/__init__.py
@@ -29,7 +29,7 @@ from llama_index.indices.knowledge_graph import (
     KnowledgeGraphIndex,
     GPTKnowledgeGraphIndex,
 )
-from llama_index.indices.list import ListIndex, GPTListIndex
+from llama_index.indices.list import ListIndex, GPTListIndex, SummaryIndex
 from llama_index.indices.tree import TreeIndex, GPTTreeIndex
 from llama_index.indices.vector_store import VectorStoreIndex, GPTVectorStoreIndex
 from llama_index.indices.document_summary import (
@@ -151,8 +151,8 @@ __all__ = [
     "ServiceContext",
     "ComposableGraph",
     # indices
+    "SummaryIndex",
     "VectorStoreIndex",
-    "ListIndex",
     "SimpleKeywordTableIndex",
     "KeywordTableIndex",
     "RAKEKeywordTableIndex",
@@ -168,6 +168,7 @@ __all__ = [
     "GPTSimpleKeywordTableIndex",
     "GPTRAKEKeywordTableIndex",
     "GPTListIndex",
+    "ListIndex",
     "GPTEmptyIndex",
     "GPTTreeIndex",
     "GPTVectorStoreIndex",
diff --git a/llama_index/callbacks/wandb_callback.py b/llama_index/callbacks/wandb_callback.py
index 36d299976069d79aeab8507bc5bfab9e514597a7..ba33894c9667c3f121950bf0d41e9a35e3c67db0 100644
--- a/llama_index/callbacks/wandb_callback.py
+++ b/llama_index/callbacks/wandb_callback.py
@@ -35,7 +35,7 @@ if TYPE_CHECKING:
         GPTKeywordTableIndex,
         GPTSimpleKeywordTableIndex,
         GPTRAKEKeywordTableIndex,
-        GPTListIndex,
+        SummaryIndex,
         GPTEmptyIndex,
         GPTTreeIndex,
         GPTVectorStoreIndex,
@@ -47,7 +47,7 @@ if TYPE_CHECKING:
         GPTKeywordTableIndex,
         GPTSimpleKeywordTableIndex,
         GPTRAKEKeywordTableIndex,
-        GPTListIndex,
+        SummaryIndex,
         GPTEmptyIndex,
         GPTTreeIndex,
         GPTVectorStoreIndex,
@@ -133,7 +133,7 @@ class WandbCallbackHandler(BaseCallbackHandler):
             GPTKeywordTableIndex,
             GPTSimpleKeywordTableIndex,
             GPTRAKEKeywordTableIndex,
-            GPTListIndex,
+            SummaryIndex,
             GPTEmptyIndex,
             GPTTreeIndex,
             GPTVectorStoreIndex,
@@ -145,7 +145,7 @@ class WandbCallbackHandler(BaseCallbackHandler):
             GPTKeywordTableIndex,
             GPTSimpleKeywordTableIndex,
             GPTRAKEKeywordTableIndex,
-            GPTListIndex,
+            SummaryIndex,
             GPTEmptyIndex,
             GPTTreeIndex,
             GPTVectorStoreIndex,
diff --git a/llama_index/composability/joint_qa_summary.py b/llama_index/composability/joint_qa_summary.py
index 17c17825d8bf88f3afdbaf0f199f6b3776a9e977..b767f9078f19f5e78569a6e65e58acf9ec5e48e3 100644
--- a/llama_index/composability/joint_qa_summary.py
+++ b/llama_index/composability/joint_qa_summary.py
@@ -3,7 +3,7 @@
 
 from typing import Optional, Sequence
 
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.service_context import ServiceContext
 from llama_index.indices.vector_store import VectorStoreIndex
 from llama_index.query_engine.router_query_engine import RouterQueryEngine
@@ -67,7 +67,7 @@ class QASummaryQueryEngineBuilder:
             service_context=self._service_context,
             storage_context=self._storage_context,
         )
-        list_index = ListIndex(
+        list_index = SummaryIndex(
             nodes,
             service_context=self._service_context,
             storage_context=self._storage_context,
diff --git a/llama_index/evaluation/base.py b/llama_index/evaluation/base.py
index 047fec057bd5f324127c58a3976d8e1c97fef54b..c48dbbe464131501c386bd80afaddfe95f642141 100644
--- a/llama_index/evaluation/base.py
+++ b/llama_index/evaluation/base.py
@@ -6,7 +6,7 @@ from dataclasses import dataclass
 from typing import List, Optional
 
 from llama_index.indices.base import ServiceContext
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.prompts import PromptTemplate
 from llama_index.schema import Document
 from llama_index.response.schema import Response
@@ -152,7 +152,9 @@ class ResponseEvaluator:
         answer = str(response)
 
         context = self.get_context(response)
-        index = ListIndex.from_documents(context, service_context=self.service_context)
+        index = SummaryIndex.from_documents(
+            context, service_context=self.service_context
+        )
         response_txt = ""
 
         EVAL_PROMPT_TMPL = PromptTemplate(DEFAULT_EVAL_PROMPT)
@@ -194,7 +196,7 @@ class ResponseEvaluator:
         response_texts = []
 
         for context in context_list:
-            index = ListIndex.from_documents(
+            index = SummaryIndex.from_documents(
                 [context], service_context=self.service_context
             )
             response_txt = ""
@@ -283,7 +285,9 @@ class QueryResponseEvaluator(BaseEvaluator):
         answer = str(response)
 
         context = self.get_context(response)
-        index = ListIndex.from_documents(context, service_context=self.service_context)
+        index = SummaryIndex.from_documents(
+            context, service_context=self.service_context
+        )
 
         QUERY_RESPONSE_EVAL_PROMPT_TMPL = PromptTemplate(QUERY_RESPONSE_EVAL_PROMPT)
         QUERY_RESPONSE_REFINE_PROMPT_TMPL = PromptTemplate(QUERY_RESPONSE_REFINE_PROMPT)
@@ -330,7 +334,7 @@ class QueryResponseEvaluator(BaseEvaluator):
         response_texts = []
 
         for context in context_list:
-            index = ListIndex.from_documents(
+            index = SummaryIndex.from_documents(
                 [context], service_context=self.service_context
             )
             response_txt = ""
diff --git a/llama_index/evaluation/dataset_generation.py b/llama_index/evaluation/dataset_generation.py
index 195714d484b737691db9f080fbfd6d772a64367f..9c9df4de7aa276a851cca31f3b9b40dcbd434cb4 100644
--- a/llama_index/evaluation/dataset_generation.py
+++ b/llama_index/evaluation/dataset_generation.py
@@ -7,7 +7,7 @@ from typing import List, Optional
 
 from llama_index import (
     Document,
-    ListIndex,
+    SummaryIndex,
     ServiceContext,
 )
 from llama_index.llms.openai import OpenAI
@@ -116,7 +116,7 @@ class DatasetGenerator:
         for node in nodes:
             if num is not None and len(questions) >= num:
                 break
-            index = ListIndex.from_documents(
+            index = SummaryIndex.from_documents(
                 [
                     Document(
                         text=node.get_content(metadata_mode=MetadataMode.NONE),
diff --git a/llama_index/indices/__init__.py b/llama_index/indices/__init__.py
index 178cb4f2c27c3a7ef76b506ffffd53bfd2a19229..2608f98694bd97991f4ed2ab1f237c765d6ee3ac 100644
--- a/llama_index/indices/__init__.py
+++ b/llama_index/indices/__init__.py
@@ -13,14 +13,14 @@ from llama_index.indices.keyword_table.simple_base import (
     SimpleKeywordTableIndex,
     GPTSimpleKeywordTableIndex,
 )
-from llama_index.indices.list.base import GPTListIndex, ListIndex
+from llama_index.indices.list.base import GPTListIndex, SummaryIndex, ListIndex
 from llama_index.indices.tree.base import TreeIndex, GPTTreeIndex
 
 __all__ = [
     "KeywordTableIndex",
     "SimpleKeywordTableIndex",
     "RAKEKeywordTableIndex",
-    "ListIndex",
+    "SummaryIndex",
     "TreeIndex",
     # legacy
     "GPTKeywordTableIndex",
@@ -28,4 +28,5 @@ __all__ = [
     "GPTRAKEKeywordTableIndex",
     "GPTListIndex",
     "GPTTreeIndex",
+    "ListIndex",
 ]
diff --git a/llama_index/indices/document_summary/retrievers.py b/llama_index/indices/document_summary/retrievers.py
index 8e729e901fc9d1e186434cb6bebe26c4f0059ae6..f93ce132b8afb6d5cb883813d57f6e4b6d6369ce 100644
--- a/llama_index/indices/document_summary/retrievers.py
+++ b/llama_index/indices/document_summary/retrievers.py
@@ -97,7 +97,7 @@ class DocumentSummaryIndexEmbeddingRetriever(BaseRetriever):
 
     Generates embeddings on the fly, attaches to each summary node.
 
-    NOTE: implementation is similar to ListIndexEmbeddingRetriever.
+    NOTE: implementation is similar to SummaryIndexEmbeddingRetriever.
 
     Args:
         index (DocumentSummaryIndex): The index to retrieve from.
diff --git a/llama_index/indices/list/README.md b/llama_index/indices/list/README.md
index e9103216c47d52578e9311f16013653ddbc8df59..f36256b3f77c8755b64f48a34c4046f0d95123f8 100644
--- a/llama_index/indices/list/README.md
+++ b/llama_index/indices/list/README.md
@@ -1,8 +1,8 @@
-## 🔗 ListIndex
+## 🔗 SummaryIndex
 
 ### Index Construction
 
-ListIndex is a simple list-based data structure. During index construction, ListIndex takes in a dataset of text documents as input, chunks them up into smaller document chunks, and concatenates them into a list. GPT is not called at all during index construction.
+SummaryIndex is a simple list-based data structure. During index construction, SummaryIndex takes in a dataset of text documents as input, chunks them up into smaller document chunks, and concatenates them into a list. GPT is not called at all during index construction.
 
 ### Query
 
@@ -11,11 +11,11 @@ During query-time, List Index constructs an answer using the _create and refine_
 **Usage**
 
 ```python
-from llama_index import ListIndex, SimpleDirectoryReader
+from llama_index import SummaryIndex, SimpleDirectoryReader
 
 # build index
 documents = SimpleDirectoryReader('data').load_data()
-index = ListIndex.from_documents(documents)
+index = SummaryIndex.from_documents(documents)
 # query
 query_engine = index.as_query_engine()
 response = query_engine.query("<question text>")
diff --git a/llama_index/indices/list/__init__.py b/llama_index/indices/list/__init__.py
index 5da91abf3824fc2d32e4d5a6ab4765e158b819a9..231c5f35d1abfc2fef8a84a2fe354d4a8973acd5 100644
--- a/llama_index/indices/list/__init__.py
+++ b/llama_index/indices/list/__init__.py
@@ -1,17 +1,24 @@
 """List-based data structures."""
 
-from llama_index.indices.list.base import GPTListIndex, ListIndex
+from llama_index.indices.list.base import GPTListIndex, ListIndex, SummaryIndex
 from llama_index.indices.list.retrievers import (
+    SummaryIndexRetriever,
+    SummaryIndexEmbeddingRetriever,
+    SummaryIndexLLMRetriever,
     ListIndexEmbeddingRetriever,
     ListIndexLLMRetriever,
     ListIndexRetriever,
 )
 
 __all__ = [
+    "SummaryIndex",
+    "SummaryIndexRetriever",
+    "SummaryIndexEmbeddingRetriever",
+    "SummaryIndexLLMRetriever",
+    # legacy
     "ListIndex",
+    "GPTListIndex",
     "ListIndexRetriever",
     "ListIndexEmbeddingRetriever",
     "ListIndexLLMRetriever",
-    # legacy
-    "GPTListIndex",
 ]
diff --git a/llama_index/indices/list/base.py b/llama_index/indices/list/base.py
index 1eb213cb6d1b34d24364068e9451177fbaa5c43c..2b5e4a495c61dc94eb15fe95b8577ce34b88ca6c 100644
--- a/llama_index/indices/list/base.py
+++ b/llama_index/indices/list/base.py
@@ -23,10 +23,10 @@ class ListRetrieverMode(str, Enum):
     LLM = "llm"
 
 
-class ListIndex(BaseIndex[IndexList]):
-    """List Index.
+class SummaryIndex(BaseIndex[IndexList]):
+    """Summary Index.
 
-    The list index is a simple data structure where nodes are stored in
+    The summary index is a simple data structure where nodes are stored in
     a sequence. During index construction, the document texts are
     chunked up, converted to nodes, and stored in a list.
 
@@ -67,17 +67,17 @@ class ListIndex(BaseIndex[IndexList]):
         **kwargs: Any,
     ) -> BaseRetriever:
         from llama_index.indices.list.retrievers import (
-            ListIndexEmbeddingRetriever,
-            ListIndexLLMRetriever,
-            ListIndexRetriever,
+            SummaryIndexEmbeddingRetriever,
+            SummaryIndexLLMRetriever,
+            SummaryIndexRetriever,
         )
 
         if retriever_mode == ListRetrieverMode.DEFAULT:
-            return ListIndexRetriever(self, **kwargs)
+            return SummaryIndexRetriever(self, **kwargs)
         elif retriever_mode == ListRetrieverMode.EMBEDDING:
-            return ListIndexEmbeddingRetriever(self, **kwargs)
+            return SummaryIndexEmbeddingRetriever(self, **kwargs)
         elif retriever_mode == ListRetrieverMode.LLM:
-            return ListIndexLLMRetriever(self, **kwargs)
+            return SummaryIndexLLMRetriever(self, **kwargs)
         else:
             raise ValueError(f"Unknown retriever mode: {retriever_mode}")
 
@@ -133,4 +133,7 @@ class ListIndex(BaseIndex[IndexList]):
 
 
 # Legacy
-GPTListIndex = ListIndex
+GPTListIndex = SummaryIndex
+
+# New name
+ListIndex = SummaryIndex
diff --git a/llama_index/indices/list/retrievers.py b/llama_index/indices/list/retrievers.py
index 7d1eefcc7e9f7f5e8552421ea92119d0fa66dd74..b21340e0813541e144605407c9722d6d10d945d2 100644
--- a/llama_index/indices/list/retrievers.py
+++ b/llama_index/indices/list/retrievers.py
@@ -1,9 +1,9 @@
-"""Retrievers for ListIndex."""
+"""Retrievers for SummaryIndex."""
 import logging
 from typing import Any, Callable, List, Optional, Tuple
 
 from llama_index.indices.base_retriever import BaseRetriever
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.query.embedding_utils import get_top_k_embeddings
 from llama_index.indices.query.schema import QueryBundle
 from llama_index.indices.service_context import ServiceContext
@@ -20,15 +20,15 @@ from llama_index.schema import BaseNode, NodeWithScore, MetadataMode
 logger = logging.getLogger(__name__)
 
 
-class ListIndexRetriever(BaseRetriever):
-    """Simple retriever for ListIndex that returns all nodes.
+class SummaryIndexRetriever(BaseRetriever):
+    """Simple retriever for SummaryIndex that returns all nodes.
 
     Args:
-        index (ListIndex): The index to retrieve from.
+        index (SummaryIndex): The index to retrieve from.
 
     """
 
-    def __init__(self, index: ListIndex, **kwargs: Any) -> None:
+    def __init__(self, index: SummaryIndex, **kwargs: Any) -> None:
         self._index = index
 
     def _retrieve(
@@ -43,21 +43,21 @@ class ListIndexRetriever(BaseRetriever):
         return [NodeWithScore(node=node) for node in nodes]
 
 
-class ListIndexEmbeddingRetriever(BaseRetriever):
-    """Embedding based retriever for ListIndex.
+class SummaryIndexEmbeddingRetriever(BaseRetriever):
+    """Embedding based retriever for SummaryIndex.
 
     Generates embeddings in a lazy fashion for all
     nodes that are traversed.
 
     Args:
-        index (ListIndex): The index to retrieve from.
+        index (SummaryIndex): The index to retrieve from.
         similarity_top_k (Optional[int]): The number of top nodes to return.
 
     """
 
     def __init__(
         self,
-        index: ListIndex,
+        index: SummaryIndex,
         similarity_top_k: Optional[int] = 1,
         **kwargs: Any,
     ) -> None:
@@ -118,11 +118,11 @@ class ListIndexEmbeddingRetriever(BaseRetriever):
         return query_bundle.embedding, node_embeddings
 
 
-class ListIndexLLMRetriever(BaseRetriever):
-    """LLM retriever for ListIndex.
+class SummaryIndexLLMRetriever(BaseRetriever):
+    """LLM retriever for SummaryIndex.
 
     Args:
-        index (ListIndex): The index to retrieve from.
+        index (SummaryIndex): The index to retrieve from.
         choice_select_prompt (Optional[PromptTemplate]): A Choice-Select Prompt
            (see :ref:`Prompt-Templates`).)
         choice_batch_size (int): The number of nodes to query at a time.
@@ -136,7 +136,7 @@ class ListIndexLLMRetriever(BaseRetriever):
 
     def __init__(
         self,
-        index: ListIndex,
+        index: SummaryIndex,
         choice_select_prompt: Optional[PromptTemplate] = None,
         choice_batch_size: int = 10,
         format_node_batch_fn: Optional[Callable] = None,
@@ -189,3 +189,9 @@ class ListIndexLLMRetriever(BaseRetriever):
                 ]
             )
         return results
+
+
+# for backwards compatibility
+ListIndexEmbeddingRetriever = SummaryIndexEmbeddingRetriever
+ListIndexLLMRetriever = SummaryIndexLLMRetriever
+ListIndexRetriever = SummaryIndexRetriever
diff --git a/llama_index/indices/registry.py b/llama_index/indices/registry.py
index 64b9d1f7dabaa4f9c28417770005971279e938a2..924ad13527c88c681772b304b0c4756e3ddaea0d 100644
--- a/llama_index/indices/registry.py
+++ b/llama_index/indices/registry.py
@@ -8,7 +8,7 @@ from llama_index.indices.document_summary.base import DocumentSummaryIndex
 from llama_index.indices.empty.base import EmptyIndex
 from llama_index.indices.keyword_table.base import KeywordTableIndex
 from llama_index.indices.knowledge_graph.base import KnowledgeGraphIndex
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.struct_store.pandas import PandasIndex
 from llama_index.indices.struct_store.sql import SQLStructStoreIndex
 from llama_index.indices.tree.base import TreeIndex
@@ -16,7 +16,7 @@ from llama_index.indices.vector_store.base import VectorStoreIndex
 
 INDEX_STRUCT_TYPE_TO_INDEX_CLASS: Dict[IndexStructType, Type[BaseIndex]] = {
     IndexStructType.TREE: TreeIndex,
-    IndexStructType.LIST: ListIndex,
+    IndexStructType.LIST: SummaryIndex,
     IndexStructType.KEYWORD_TABLE: KeywordTableIndex,
     IndexStructType.VECTOR_STORE: VectorStoreIndex,
     IndexStructType.SQL: SQLStructStoreIndex,
diff --git a/llama_index/playground/base.py b/llama_index/playground/base.py
index c3003622a90d16a7dcd396ec7e450847999553be..abb090f49da936b4de6cfc134fdbfe0dde3071b0 100644
--- a/llama_index/playground/base.py
+++ b/llama_index/playground/base.py
@@ -9,7 +9,7 @@ from llama_index.bridge.langchain import get_color_mapping, print_text
 
 from llama_index.callbacks import CallbackManager, TokenCountingHandler
 from llama_index.indices.base import BaseIndex
-from llama_index.indices.list.base import ListIndex, ListRetrieverMode
+from llama_index.indices.list.base import SummaryIndex, ListRetrieverMode
 from llama_index.indices.tree.base import TreeIndex, TreeRetrieverMode
 from llama_index.indices.vector_store import VectorStoreIndex
 from llama_index.llm_predictor import LLMPredictor
@@ -18,14 +18,14 @@ from llama_index.schema import Document
 DEFAULT_INDEX_CLASSES: List[Type[BaseIndex]] = [
     VectorStoreIndex,
     TreeIndex,
-    ListIndex,
+    SummaryIndex,
 ]
 
 INDEX_SPECIFIC_QUERY_MODES_TYPE = Dict[Type[BaseIndex], List[str]]
 
 DEFAULT_MODES: INDEX_SPECIFIC_QUERY_MODES_TYPE = {
     TreeIndex: [e.value for e in TreeRetrieverMode],
-    ListIndex: [e.value for e in ListRetrieverMode],
+    SummaryIndex: [e.value for e in ListRetrieverMode],
     VectorStoreIndex: ["default"],
 }
 
diff --git a/llama_index/query_engine/retry_source_query_engine.py b/llama_index/query_engine/retry_source_query_engine.py
index b7277f3194d4ba1f7fcabf9d2e76a0bea2d5f2f4..1c8d71eb4cc92407e0cc20979a0d188bd59f7aa5 100644
--- a/llama_index/query_engine/retry_source_query_engine.py
+++ b/llama_index/query_engine/retry_source_query_engine.py
@@ -3,7 +3,7 @@ from typing import Optional
 
 from llama_index.callbacks.base import CallbackManager
 from llama_index.evaluation.base import QueryResponseEvaluator
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.query.base import BaseQueryEngine
 from llama_index.indices.query.schema import QueryBundle
 from llama_index.indices.service_context import ServiceContext
@@ -58,7 +58,7 @@ class RetrySourceQueryEngine(BaseQueryEngine):
                     new_docs.append(Document(text=node.node.get_content()))
             if len(new_docs) == 0:
                 raise ValueError("No source nodes passed evaluation.")
-            new_index = ListIndex.from_documents(
+            new_index = SummaryIndex.from_documents(
                 new_docs,
                 service_context=self._service_context,
             )
diff --git a/llama_index/retrievers/__init__.py b/llama_index/retrievers/__init__.py
index 23d09d6dccfaa09ca60d4e7e426de0cf6662ef8e..90bd0b09b3b4c2c9a84335370a6dd0d682d23ada 100644
--- a/llama_index/retrievers/__init__.py
+++ b/llama_index/retrievers/__init__.py
@@ -8,6 +8,9 @@ from llama_index.indices.knowledge_graph.retrievers import (
 from llama_index.indices.list.retrievers import (
     ListIndexEmbeddingRetriever,
     ListIndexRetriever,
+    SummaryIndexEmbeddingRetriever,
+    SummaryIndexLLMRetriever,
+    SummaryIndexRetriever,
 )
 from llama_index.indices.tree.all_leaf_retriever import TreeAllLeafRetriever
 from llama_index.indices.tree.select_leaf_embedding_retriever import (
@@ -28,8 +31,9 @@ from llama_index.retrievers.bm25_retriever import BM25Retriever
 __all__ = [
     "VectorIndexRetriever",
     "VectorIndexAutoRetriever",
-    "ListIndexEmbeddingRetriever",
-    "ListIndexRetriever",
+    "SummaryIndexRetriever",
+    "SummaryIndexEmbeddingRetriever",
+    "SummaryIndexLLMRetriever",
     "KGTableRetriever",
     "KnowledgeGraphRAGRetriever",
     "EmptyIndexRetriever",
@@ -44,4 +48,7 @@ __all__ = [
     "AutoMergingRetriever",
     "RouterRetriever",
     "BM25Retriever",
+    # legacy
+    "ListIndexEmbeddingRetriever",
+    "ListIndexRetriever",
 ]
diff --git a/llama_index/storage/docstore/keyval_docstore.py b/llama_index/storage/docstore/keyval_docstore.py
index 3ef2dc9bda1823ce8fd7c9da3c88794fdd5338a8..1e1fdb6a1770cc3ed1420b06e502dbfda1451956 100644
--- a/llama_index/storage/docstore/keyval_docstore.py
+++ b/llama_index/storage/docstore/keyval_docstore.py
@@ -26,7 +26,7 @@ class KVDocumentStore(BaseDocumentStore):
         docstore.add_documents(nodes)
         storage_context = StorageContext.from_defaults(docstore=docstore)
 
-        list_index = ListIndex(nodes, storage_context=storage_context)
+        list_index = SummaryIndex(nodes, storage_context=storage_context)
         vector_index = VectorStoreIndex(nodes, storage_context=storage_context)
         keyword_table_index = SimpleKeywordTableIndex(
             nodes,
diff --git a/tests/indices/list/test_index.py b/tests/indices/list/test_index.py
index 518c9f8ae3c5b41da8116545460663d04239a5b8..ef654c28851a6d20a646c4f0ec07d0fabf43273d 100644
--- a/tests/indices/list/test_index.py
+++ b/tests/indices/list/test_index.py
@@ -3,7 +3,7 @@
 from typing import Dict, List, Tuple
 
 from llama_index.indices.base_retriever import BaseRetriever
-from llama_index.indices.list.base import ListIndex, ListRetrieverMode
+from llama_index.indices.list.base import SummaryIndex, ListRetrieverMode
 from llama_index.indices.service_context import ServiceContext
 from llama_index.schema import Document
 from llama_index.schema import BaseNode
@@ -13,7 +13,7 @@ def test_build_list(
     documents: List[Document], mock_service_context: ServiceContext
 ) -> None:
     """Test build list."""
-    list_index = ListIndex.from_documents(
+    list_index = SummaryIndex.from_documents(
         documents, service_context=mock_service_context
     )
     assert len(list_index.index_struct.nodes) == 4
@@ -39,7 +39,7 @@ def test_refresh_list(
         more_documents[i].doc_id = str(i)  # type: ignore[misc]
 
     # create index
-    list_index = ListIndex.from_documents(
+    list_index = SummaryIndex.from_documents(
         more_documents, service_context=mock_service_context
     )
 
@@ -68,7 +68,7 @@ def test_build_list_multiple(mock_service_context: ServiceContext) -> None:
         Document(text="Hello world.\nThis is a test."),
         Document(text="This is another test.\nThis is a test v2."),
     ]
-    list_index = ListIndex.from_documents(
+    list_index = SummaryIndex.from_documents(
         documents, service_context=mock_service_context
     )
     assert len(list_index.index_struct.nodes) == 4
@@ -85,7 +85,7 @@ def test_list_insert(
     mock_service_context: ServiceContext,
 ) -> None:
     """Test insert to list."""
-    list_index = ListIndex([], service_context=mock_service_context)
+    list_index = SummaryIndex([], service_context=mock_service_context)
     assert len(list_index.index_struct.nodes) == 0
     list_index.insert(documents[0])
     nodes = list_index.docstore.get_nodes(list_index.index_struct.nodes)
@@ -98,7 +98,7 @@ def test_list_insert(
     # test insert with ID
     document = documents[0]
     document.doc_id = "test_id"  # type: ignore[misc]
-    list_index = ListIndex([])
+    list_index = SummaryIndex([])
     list_index.insert(document)
     # check contents of nodes
     nodes = list_index.docstore.get_nodes(list_index.index_struct.nodes)
@@ -118,7 +118,7 @@ def test_list_delete(
         Document(text="This is a test v2.", id_="test_id_3"),
     ]
 
-    list_index = ListIndex.from_documents(
+    list_index = SummaryIndex.from_documents(
         new_documents, service_context=mock_service_context
     )
 
@@ -139,7 +139,7 @@ def test_list_delete(
     source_doc = list_index.docstore.get_document("test_id_1", raise_error=False)
     assert source_doc is None
 
-    list_index = ListIndex.from_documents(
+    list_index = SummaryIndex.from_documents(
         new_documents, service_context=mock_service_context
     )
     list_index.delete_ref_doc("test_id_2")
@@ -174,7 +174,7 @@ def test_as_retriever(
     documents: List[Document],
     mock_service_context: ServiceContext,
 ) -> None:
-    list_index = ListIndex.from_documents(
+    list_index = SummaryIndex.from_documents(
         documents, service_context=mock_service_context
     )
     default_retriever = list_index.as_retriever(
diff --git a/tests/indices/list/test_retrievers.py b/tests/indices/list/test_retrievers.py
index c830dbc4a1fbe4d1e6cca866ccc6eafeda0f433d..9c092c0a14585499896a1304f11518bde0c74307 100644
--- a/tests/indices/list/test_retrievers.py
+++ b/tests/indices/list/test_retrievers.py
@@ -1,8 +1,8 @@
 from typing import Any, List
 from unittest.mock import patch
 
-from llama_index.indices.list.base import ListIndex
-from llama_index.indices.list.retrievers import ListIndexEmbeddingRetriever
+from llama_index.indices.list.base import SummaryIndex
+from llama_index.indices.list.retrievers import SummaryIndexEmbeddingRetriever
 from llama_index.indices.service_context import ServiceContext
 from llama_index.llm_predictor.base import LLMPredictor
 from llama_index.prompts import BasePromptTemplate
@@ -14,7 +14,7 @@ def test_retrieve_default(
     documents: List[Document], mock_service_context: ServiceContext
 ) -> None:
     """Test list query."""
-    index = ListIndex.from_documents(documents, service_context=mock_service_context)
+    index = SummaryIndex.from_documents(documents, service_context=mock_service_context)
 
     query_str = "What is?"
     retriever = index.as_retriever(retriever_mode="default")
@@ -25,7 +25,7 @@ def test_retrieve_default(
 
 
 @patch.object(
-    ListIndexEmbeddingRetriever,
+    SummaryIndexEmbeddingRetriever,
     "_get_embeddings",
     side_effect=_get_embeddings,
 )
@@ -35,7 +35,7 @@ def test_embedding_query(
     mock_service_context: ServiceContext,
 ) -> None:
     """Test embedding query."""
-    index = ListIndex.from_documents(documents, service_context=mock_service_context)
+    index = SummaryIndex.from_documents(documents, service_context=mock_service_context)
 
     # test embedding query
     query_str = "What is?"
@@ -63,7 +63,7 @@ def test_llm_query(
     mock_service_context: ServiceContext,
 ) -> None:
     """Test llm query."""
-    index = ListIndex.from_documents(documents, service_context=mock_service_context)
+    index = SummaryIndex.from_documents(documents, service_context=mock_service_context)
 
     # test llm query (batch size 10)
     query_str = "What is?"
diff --git a/tests/indices/query/test_compose.py b/tests/indices/query/test_compose.py
index 53b64c3ac399b997436dee9e14a13e7a0619266d..4e3b4e424151508b3c4456000fb78df8ac1d43a0 100644
--- a/tests/indices/query/test_compose.py
+++ b/tests/indices/query/test_compose.py
@@ -4,7 +4,7 @@ from typing import Dict, List
 
 from llama_index.indices.composability.graph import ComposableGraph
 from llama_index.indices.keyword_table.simple_base import SimpleKeywordTableIndex
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.service_context import ServiceContext
 from llama_index.indices.tree.base import TreeIndex
 from llama_index.schema import Document
@@ -19,16 +19,16 @@ def test_recursive_query_list_tree(
     list_kwargs = index_kwargs["list"]
     tree_kwargs = index_kwargs["tree"]
     # try building a list for every two, then a tree
-    list1 = ListIndex.from_documents(
+    list1 = SummaryIndex.from_documents(
         documents[0:2], service_context=mock_service_context, **list_kwargs
     )
-    list2 = ListIndex.from_documents(
+    list2 = SummaryIndex.from_documents(
         documents[2:4], service_context=mock_service_context, **list_kwargs
     )
-    list3 = ListIndex.from_documents(
+    list3 = SummaryIndex.from_documents(
         documents[4:6], service_context=mock_service_context, **list_kwargs
     )
-    list4 = ListIndex.from_documents(
+    list4 = SummaryIndex.from_documents(
         documents[6:8], service_context=mock_service_context, **list_kwargs
     )
 
@@ -89,7 +89,7 @@ def test_recursive_query_tree_list(
     # there are two root nodes in this tree: one containing [list1, list2]
     # and the other containing [list3, list4]
     graph = ComposableGraph.from_indices(
-        ListIndex,
+        SummaryIndex,
         [tree1, tree2],
         index_summaries=summaries,
         service_context=mock_service_context,
@@ -128,7 +128,7 @@ def test_recursive_query_table_list(
     ]
 
     graph = ComposableGraph.from_indices(
-        ListIndex,
+        SummaryIndex,
         [table1, table2],
         index_summaries=summaries,
         service_context=mock_service_context,
@@ -156,16 +156,16 @@ def test_recursive_query_list_table(
     # try building a tree for a group of 4, then a list
     # use a diff set of documents
     # try building a list for every two, then a tree
-    list1 = ListIndex.from_documents(
+    list1 = SummaryIndex.from_documents(
         documents[0:2], service_context=mock_service_context, **list_kwargs
     )
-    list2 = ListIndex.from_documents(
+    list2 = SummaryIndex.from_documents(
         documents[2:4], service_context=mock_service_context, **list_kwargs
     )
-    list3 = ListIndex.from_documents(
+    list3 = SummaryIndex.from_documents(
         documents[4:6], service_context=mock_service_context, **list_kwargs
     )
-    list4 = ListIndex.from_documents(
+    list4 = SummaryIndex.from_documents(
         documents[6:8], service_context=mock_service_context, **list_kwargs
     )
     summaries = [
diff --git a/tests/indices/query/test_query_bundle.py b/tests/indices/query/test_query_bundle.py
index ac1cd2174af0a9083678b751f5ae3162ff1e64d8..108191683307fc6a9af11f7a3f8b698c5ea3259f 100644
--- a/tests/indices/query/test_query_bundle.py
+++ b/tests/indices/query/test_query_bundle.py
@@ -5,7 +5,7 @@ from typing import Dict, List
 import pytest
 
 from llama_index.embeddings.base import BaseEmbedding
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.query.schema import QueryBundle
 from llama_index.indices.service_context import ServiceContext
 from llama_index.schema import Document
@@ -78,7 +78,7 @@ def test_embedding_query(
 ) -> None:
     """Test embedding query."""
     mock_service_context.embed_model = MockEmbedding()
-    index = ListIndex.from_documents(documents, service_context=mock_service_context)
+    index = SummaryIndex.from_documents(documents, service_context=mock_service_context)
 
     # test embedding query
     query_bundle = QueryBundle(
diff --git a/tests/indices/struct_store/test_base.py b/tests/indices/struct_store/test_base.py
index d0709c86986d3f23315271b9b607ba9e37f2f32a..3952985b855df830ef38677fdb7bb201fef7141d 100644
--- a/tests/indices/struct_store/test_base.py
+++ b/tests/indices/struct_store/test_base.py
@@ -13,7 +13,7 @@ from sqlalchemy import (
     select,
 )
 
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.query.schema import QueryBundle
 from llama_index.indices.service_context import ServiceContext
 from llama_index.indices.struct_store.sql import (
@@ -278,10 +278,10 @@ def test_sql_index_with_derive_index(mock_service_context: ServiceContext) -> No
         sql_database, context_dict=table_context_dict
     )
     context_index_no_ignore = context_builder.derive_index_from_context(
-        ListIndex,
+        SummaryIndex,
     )
     context_index_with_ignore = context_builder.derive_index_from_context(
-        ListIndex, ignore_db_schema=True
+        SummaryIndex, ignore_db_schema=True
     )
     assert len(context_index_with_ignore.index_struct.nodes) == 1
     assert len(context_index_no_ignore.index_struct.nodes) > 1
@@ -313,7 +313,7 @@ def test_sql_index_with_index_context(
         sql_database, context_dict=table_context_dict
     )
     context_index = context_builder.derive_index_from_context(
-        ListIndex, ignore_db_schema=True
+        SummaryIndex, ignore_db_schema=True
     )
     # NOTE: the response only contains the first line (metadata), since
     # with the mock patch, newlines are treated as separate calls.
diff --git a/tests/indices/test_loading.py b/tests/indices/test_loading.py
index 34970679355eaf5504c11cd0aaa543756f622456..bef364d31496353cc86c02ccaeafe93c086d454f 100644
--- a/tests/indices/test_loading.py
+++ b/tests/indices/test_loading.py
@@ -3,7 +3,7 @@ from typing import List
 
 import pytest
 
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.loading import (
     load_index_from_storage,
     load_indices_from_storage,
@@ -69,7 +69,7 @@ def test_load_index_from_storage_multiple(
     )
     vector_id = vector_index.index_id
 
-    list_index = ListIndex(
+    list_index = SummaryIndex(
         nodes=nodes,
         storage_context=storage_context,
         service_context=mock_service_context,
diff --git a/tests/indices/test_loading_graph.py b/tests/indices/test_loading_graph.py
index f6e17b9643dd5a89662cff1e2e0de54be28053cd..ac3c38fa8a0a95c671dc8c19cb75241d82d20b4b 100644
--- a/tests/indices/test_loading_graph.py
+++ b/tests/indices/test_loading_graph.py
@@ -2,7 +2,7 @@ from pathlib import Path
 from typing import List
 
 from llama_index.indices.composability.graph import ComposableGraph
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.loading import load_graph_from_storage
 from llama_index.indices.service_context import ServiceContext
 from llama_index.indices.vector_store.base import VectorStoreIndex
@@ -33,7 +33,7 @@ def test_load_graph_from_storage_simple(
     )
 
     # construct index
-    list_index = ListIndex.from_documents(
+    list_index = SummaryIndex.from_documents(
         documents=documents,
         storage_context=storage_context,
         service_context=mock_service_context,
@@ -41,7 +41,7 @@ def test_load_graph_from_storage_simple(
 
     # construct graph
     graph = ComposableGraph.from_indices(
-        ListIndex,
+        SummaryIndex,
         children_indices=[vector_index_1, vector_index_2, list_index],
         index_summaries=["vector index 1", "vector index 2", "list index"],
         storage_context=storage_context,
diff --git a/tests/objects/test_base.py b/tests/objects/test_base.py
index 0f73685e377a6085bf1d72f5c5b9c2b3743652b2..3c7897301f621640c18fa00195f908b2720b7059 100644
--- a/tests/objects/test_base.py
+++ b/tests/objects/test_base.py
@@ -1,7 +1,7 @@
 """Test object index."""
 
 from llama_index.objects.base import ObjectIndex
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 
 from llama_index.objects.base_node_mapping import SimpleObjectNodeMapping
 from llama_index.objects.tool_node_mapping import SimpleToolNodeMapping
@@ -14,7 +14,7 @@ def test_object_index(mock_service_context: ServiceContext) -> None:
 
     object_mapping = SimpleObjectNodeMapping.from_objects(["a", "b", "c"])
     obj_index = ObjectIndex.from_objects(
-        ["a", "b", "c"], object_mapping, index_cls=ListIndex
+        ["a", "b", "c"], object_mapping, index_cls=SummaryIndex
     )
     # should just retrieve everything
     assert obj_index.as_retriever().retrieve("test") == ["a", "b", "c"]
@@ -33,6 +33,6 @@ def test_object_index_with_tools(mock_service_context: ServiceContext) -> None:
     object_mapping = SimpleToolNodeMapping.from_objects([tool1, tool2])
 
     obj_retriever = ObjectIndex.from_objects(
-        [tool1, tool2], object_mapping, index_cls=ListIndex
+        [tool1, tool2], object_mapping, index_cls=SummaryIndex
     )
     assert obj_retriever.as_retriever().retrieve("test") == [tool1, tool2]
diff --git a/tests/playground/test_base.py b/tests/playground/test_base.py
index 30821ce9915b596acb39ce0001f781ef5867a6f8..ab4751b35f91470868528eb19b7a3cc7d83a507a 100644
--- a/tests/playground/test_base.py
+++ b/tests/playground/test_base.py
@@ -5,7 +5,7 @@ from typing import List
 import pytest
 
 from llama_index.embeddings.base import BaseEmbedding
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.service_context import ServiceContext
 from llama_index.indices.tree.base import TreeIndex
 from llama_index.indices.vector_store.base import VectorStoreIndex
@@ -63,7 +63,7 @@ def test_get_set_compare(
         VectorStoreIndex.from_documents(
             documents=documents, service_context=mock_service_context
         ),
-        ListIndex.from_documents(documents, service_context=mock_service_context),
+        SummaryIndex.from_documents(documents, service_context=mock_service_context),
         TreeIndex.from_documents(
             documents=documents, service_context=mock_service_context
         ),
@@ -117,7 +117,9 @@ def test_validation() -> None:
         _ = Playground(indices=["VectorStoreIndex"])  # type: ignore
 
     with pytest.raises(ValueError):
-        _ = Playground(indices=[VectorStoreIndex, ListIndex, TreeIndex])  # type: ignore
+        _ = Playground(
+            indices=[VectorStoreIndex, SummaryIndex, TreeIndex]  # type: ignore
+        )
 
     with pytest.raises(ValueError):
         _ = Playground(indices=[])  # type: ignore
diff --git a/tests/storage/docstore/test_redis_docstore.py b/tests/storage/docstore/test_redis_docstore.py
index fb902856f1e554b550e27ba18cacbcd55e4ca64f..07837dea85873a71270688edca70e09eeb7df7a4 100644
--- a/tests/storage/docstore/test_redis_docstore.py
+++ b/tests/storage/docstore/test_redis_docstore.py
@@ -79,7 +79,7 @@ def test_redis_docstore_deserialization(
     redis_docstore: RedisDocumentStore, documents: List[Document]
 ) -> None:
     from llama_index import (
-        ListIndex,
+        SummaryIndex,
         StorageContext,
         Document,
     )
@@ -93,7 +93,7 @@ def test_redis_docstore_deserialization(
 
     storage_context = StorageContext.from_defaults(docstore=ds, index_store=idxs)
 
-    index = ListIndex.from_documents(
+    index = SummaryIndex.from_documents(
         [Document(text="hello world2")], storage_context=storage_context
     )
     # fails here
diff --git a/tests/token_predictor/test_base.py b/tests/token_predictor/test_base.py
index bb40805de5a14a1b1c356b5ccbc17c8cea2e679b..94fa8b37bda6b79453b696f7fccb768ff303b544 100644
--- a/tests/token_predictor/test_base.py
+++ b/tests/token_predictor/test_base.py
@@ -4,7 +4,7 @@ from typing import Any
 from unittest.mock import patch
 
 from llama_index.indices.keyword_table.base import KeywordTableIndex
-from llama_index.indices.list.base import ListIndex
+from llama_index.indices.list.base import SummaryIndex
 from llama_index.indices.service_context import ServiceContext
 from llama_index.indices.tree.base import TreeIndex
 from llama_index.llm_predictor.mock import MockLLMPredictor
@@ -41,6 +41,8 @@ def test_token_predictor(mock_split: Any) -> None:
     query_engine.query("What is?")
 
     # test list index
-    index_list = ListIndex.from_documents([document], service_context=service_context)
+    index_list = SummaryIndex.from_documents(
+        [document], service_context=service_context
+    )
     query_engine = index_list.as_query_engine()
     query_engine.query("What is?")