export default (function(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,_,$,aa,ab,ac,ad,ae,af,ag,ah,ai,aj,ak,al,am,an,ao,ap,aq,ar,as,at,au,av,aw,ax,ay,az,aA,aB,aC,aD,aE,aF,aG,aH,aI,aJ,aK,aL,aM,aN,aO,aP,aQ,aR,aS,aT,aU,aV,aW,aX,aY,aZ,a_,a$,ba,bb,bc,bd,be,bf,bg,bh,bi,bj,bk,bl,bm,bn,bo,bp,bq,br,bs,bt,bu,bv,bw,bx,by,bz,bA,bB,bC,bD,bE,bF,bG,bH,bI,bJ,bK,bL,bM,bN,bO,bP,bQ,bR,bS,bT,bU,bV,bW,bX){a_.ogImageSrc=a$;a_.ogImageAlt=ba;a_.twitterImageSrc=a$;a_.twitterImageAlt=ba;a_.noindex=d;a_.description=$;a_.ogTitle=A;a_.ogDescription=$;a_.twitterTitle=b;a_.twitterDescription=b;a_.twitterCardType=b;bm.ogImageSrc=b;bm.ogImageAlt=b;bm.twitterImageSrc=b;bm.twitterImageAlt=b;bm.noindex=d;bm.description=u;bm.ogTitle=ar;bm.ogDescription=u;bm.twitterTitle=b;bm.twitterDescription=b;bm.twitterCardType=bn;return {data:{navigation:{navigation:[{type:f,meta:a,id:"661",createdAt:g,updatedAt:g,position:k,starred:d,bucketKey:"header_tertiary",featured:{type:h,meta:a,id:N,published:c,createdAt:aC,updatedAt:aD,title:aE,linkType:i,label:D,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:aF,slug:R,title:aG,scope:b,nestedSlug:R}]}}},{type:f,meta:a,id:"666",createdAt:e,updatedAt:e,position:k,starred:d,bucketKey:aH,featured:{type:h,meta:a,id:aq,published:c,createdAt:aI,updatedAt:aJ,title:aK,linkType:i,label:D,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:S,slug:T,title:aL,scope:aM,nestedSlug:T}]}}},{type:f,meta:a,id:"673",createdAt:e,updatedAt:e,position:k,starred:d,bucketKey:U,featured:{type:h,meta:a,id:E,published:c,createdAt:aN,updatedAt:aO,title:F,linkType:i,label:F,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:N,slug:V,title:F,scope:b,nestedSlug:V}]}}},{type:f,meta:a,id:"668",createdAt:e,updatedAt:e,position:k,starred:d,bucketKey:W,featured:{type:h,meta:a,id:S,published:c,createdAt:aP,updatedAt:aQ,title:aR,linkType:i,label:D,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:aS,slug:G,title:X,scope:b,nestedSlug:G}]}}},{type:f,meta:a,id:"672",createdAt:e,updatedAt:e,position:k,starred:d,bucketKey:"footer_tertiary",featured:{type:h,meta:a,id:N,published:c,createdAt:aC,updatedAt:aD,title:aE,linkType:i,label:D,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:aF,slug:R,title:aG,scope:b,nestedSlug:R}]}}},{type:f,meta:a,id:"655",createdAt:g,updatedAt:g,position:k,starred:d,bucketKey:aT,featured:{type:h,meta:a,id:aq,published:c,createdAt:aI,updatedAt:aJ,title:aK,linkType:i,label:D,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:S,slug:T,title:aL,scope:aM,nestedSlug:T}]}}},{type:f,meta:a,id:"662",createdAt:g,updatedAt:g,position:k,starred:d,bucketKey:Y,featured:{type:h,meta:a,id:E,published:c,createdAt:aN,updatedAt:aO,title:F,linkType:i,label:F,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:N,slug:V,title:F,scope:b,nestedSlug:V}]}}},{type:f,meta:a,id:"657",createdAt:g,updatedAt:g,position:k,starred:d,bucketKey:Z,featured:{type:h,meta:a,id:S,published:c,createdAt:aP,updatedAt:aQ,title:aR,linkType:i,label:D,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:aS,slug:G,title:X,scope:b,nestedSlug:G}]}}},{type:f,meta:a,id:"677",createdAt:e,updatedAt:e,position:k,starred:d,bucketKey:"footer_additional",featured:{type:l,meta:a,id:"18",slug:aU,title:"Terms & policies",scope:b,nestedSlug:aU}},{type:f,meta:a,id:"669",createdAt:e,updatedAt:e,position:z,starred:d,bucketKey:W,featured:{type:h,meta:a,id:aV,published:c,createdAt:aW,updatedAt:aX,title:A,linkType:i,label:A,download:b,url:b,relatedItems:{links:[{type:aY,meta:a,id:q,published:c,createdAt:_,updatedAt:aZ,title:A,description:$,seo:a_,media:a,ctas:a,customerStories:a}]}}},{type:f,meta:a,id:"658",createdAt:g,updatedAt:g,position:z,starred:d,bucketKey:Z,featured:{type:h,meta:a,id:aV,published:c,createdAt:aW,updatedAt:aX,title:A,linkType:i,label:A,download:b,url:b,relatedItems:{links:[{type:aY,meta:a,id:q,published:c,createdAt:_,updatedAt:aZ,title:A,description:$,seo:a_,media:a,ctas:a,customerStories:a}]}}},{type:f,meta:a,id:"656",createdAt:g,updatedAt:g,position:z,starred:d,bucketKey:aT,featured:{type:h,meta:a,id:q,published:c,createdAt:bb,updatedAt:bc,title:bd,linkType:i,label:be,download:b,url:b,relatedItems:{links:[{type:bf,meta:a,id:q,slug:bg,title:bh}]}}},{type:f,meta:a,id:"663",createdAt:g,updatedAt:g,position:z,starred:d,bucketKey:Y,featured:{type:h,meta:a,id:O,published:c,createdAt:bi,updatedAt:bj,title:H,linkType:i,label:H,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:E,slug:aa,title:H,scope:b,nestedSlug:aa}]}}},{type:f,meta:a,id:"667",createdAt:e,updatedAt:e,position:z,starred:d,bucketKey:aH,featured:{type:h,meta:a,id:q,published:c,createdAt:bb,updatedAt:bc,title:bd,linkType:i,label:be,download:b,url:b,relatedItems:{links:[{type:bf,meta:a,id:q,slug:bg,title:bh}]}}},{type:f,meta:a,id:"674",createdAt:e,updatedAt:e,position:z,starred:d,bucketKey:U,featured:{type:h,meta:a,id:O,published:c,createdAt:bi,updatedAt:bj,title:H,linkType:i,label:H,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:E,slug:aa,title:H,scope:b,nestedSlug:aa}]}}},{type:f,meta:a,id:"664",createdAt:g,updatedAt:g,position:P,starred:d,bucketKey:Y,featured:{type:bk,meta:a,id:q,published:c,createdAt:_,updatedAt:bl,title:ar,description:b,seo:bm,blocks:a,media:a,ctas:a}},{type:f,meta:a,id:"670",createdAt:e,updatedAt:e,position:P,starred:d,bucketKey:W,featured:{type:h,meta:a,id:bo,published:c,createdAt:bp,updatedAt:bq,title:I,linkType:i,label:I,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:br,slug:ab,title:I,scope:b,nestedSlug:ab}]}}},{type:f,meta:a,id:"675",createdAt:e,updatedAt:e,position:P,starred:d,bucketKey:U,featured:{type:bk,meta:a,id:q,published:c,createdAt:_,updatedAt:bl,title:ar,description:b,seo:bm,blocks:a,media:a,ctas:a}},{type:f,meta:a,id:"659",createdAt:g,updatedAt:g,position:P,starred:d,bucketKey:Z,featured:{type:h,meta:a,id:bo,published:c,createdAt:bp,updatedAt:bq,title:I,linkType:i,label:I,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:br,slug:ab,title:I,scope:b,nestedSlug:ab}]}}},{type:f,meta:a,id:"660",createdAt:g,updatedAt:g,position:Q,starred:d,bucketKey:Z,featured:{type:h,meta:a,id:bs,published:c,createdAt:bt,updatedAt:bu,title:J,linkType:i,label:J,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:bv,slug:ac,title:J,scope:b,nestedSlug:ac}]}}},{type:f,meta:a,id:"665",createdAt:g,updatedAt:g,position:Q,starred:d,bucketKey:Y,featured:{type:h,meta:a,id:as,published:c,createdAt:bw,updatedAt:bx,title:ad,linkType:i,label:ad,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:O,slug:ae,title:by,scope:b,nestedSlug:ae}]}}},{type:f,meta:a,id:"671",createdAt:e,updatedAt:e,position:Q,starred:d,bucketKey:W,featured:{type:h,meta:a,id:bs,published:c,createdAt:bt,updatedAt:bu,title:J,linkType:i,label:J,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:bv,slug:ac,title:J,scope:b,nestedSlug:ac}]}}},{type:f,meta:a,id:"676",createdAt:e,updatedAt:e,position:Q,starred:d,bucketKey:U,featured:{type:h,meta:a,id:as,published:c,createdAt:bw,updatedAt:bx,title:ad,linkType:i,label:ad,download:b,url:b,relatedItems:{links:[{type:l,meta:a,id:O,slug:ae,title:by,scope:b,nestedSlug:ae}]}}}],social:[{type:K,meta:a,id:q,section:L,key:"twitter",value:"https:\u002F\u002Ftwitter.com\u002FOpenAI",media:{}},{type:K,meta:a,id:as,section:L,key:"youtube",value:"https:\u002F\u002Fyoutube.com\u002FOpenAI",media:{}},{type:K,meta:a,id:aq,section:L,key:"github",value:"https:\u002F\u002Fgithub.com\u002Fopenai",media:{}},{type:K,meta:a,id:N,section:L,key:"soundcloud",value:"https:\u002F\u002Fsoundcloud.com\u002Fopenai_audio",media:{}},{type:K,meta:a,id:E,section:L,key:"linkedin",value:"https:\u002F\u002Fwww.linkedin.com\u002Fcompany\u002Fopenai",media:{}},{type:K,meta:a,id:O,section:L,key:"facebook",value:b,media:{}}]},"blog-detail-new-and-improved-embedding-model":{type:"blog-details",meta:a,id:"74",slug:"new-and-improved-embedding-model",published:c,createdAt:"2022-12-20T18:28:09.000000Z",updatedAt:"2023-01-27T00:37:43.000000Z",title:bz,descriptionHtml:"\u003Cp\u003EWe are excited to announce a new embedding model which is significantly more capable, cost effective, and simpler to use.\u003Cbr class=\"softbreak\"\u003E\u003C\u002Fp\u003E",description:af,descriptionRichText:{type:v,props:[],children:[{type:n,props:[],children:[af,{type:w,props:{class:x},children:[]}]}]},seo:{ogImageSrc:ag,ogImageAlt:ah,twitterImageSrc:ag,twitterImageAlt:ah,noindex:d,description:af,ogTitle:bz,ogDescription:af,twitterTitle:b,twitterDescription:b,twitterCardType:bn},tags:[{id:bA,type:ai,slug:G,title:X},{id:k,type:ai,slug:bB,title:bC}],colorTheme:"dark-gray",publicationDate:"2022-12-15T00:00:00-08:00",publicationDateFormatted:"December 15, 2022",acknowledgments:{title1:b,text1:b,title2:b,text2:b},hero:{assetType:"image-small"},blocks:{default:[{type:m,meta:{browsers:b},id:"4497",blockType:B,editorName:j,childKey:b,position:k,content:{html:"\u003Cp\u003EThe new model, \u003Ccode\u003Etext-embedding-ada-002\u003C\u002Fcode\u003E, replaces five separate models for text search, text similarity, and code search, and outperforms our previous most capable model, Davinci, at most tasks, while being priced 99.8% lower.\u003Cbr class=\"softbreak\"\u003E\u003C\u002Fp\u003E",richText:{type:v,props:[],children:[{type:n,props:[],children:["The new model, ",{type:p,props:[],children:[at]},", replaces five separate models for text search, text similarity, and code search, and outperforms our previous most capable model, Davinci, at most tasks, while being priced 99.8% lower.",{type:w,props:{class:x},children:[]}]}]},colorTheme:o},blocks:{},media:{},files:a,relatedItems:{}},{type:m,meta:{browsers:b},id:"4067",blockType:B,editorName:j,childKey:b,position:z,content:{html:"\u003Cp\u003EEmbeddings are numerical representations of concepts converted to number sequences, which make it easy for computers to understand the relationships between those concepts. Since the \u003Ca href=\"https:\u002F\u002Fopenai.com\u002Fblog\u002Fintroducing-text-and-code-embeddings\u002F\" rel=\"noopener noreferrer\" target=\"_blank\"\u003Einitial launch\u003C\u002Fa\u003E of the OpenAI \u003Ca href=\"https:\u002F\u002Fbeta.openai.com\u002Fdocs\u002Fapi-reference\u002Fembeddings\" rel=\"noopener noreferrer\" target=\"_blank\"\u003E\u002Fembeddings\u003C\u002Fa\u003E endpoint, many applications have incorporated embeddings to personalize, recommend, and search content.\u003Cbr class=\"softbreak\"\u003E\u003C\u002Fp\u003E",richText:{type:v,props:[],children:[{type:n,props:[],children:["Embeddings are numerical representations of concepts converted to number sequences, which make it easy for computers to understand the relationships between those concepts. Since the ",{type:r,props:{href:"https:\u002F\u002Fopenai.com\u002Fblog\u002Fintroducing-text-and-code-embeddings\u002F",rel:s,target:t},children:["initial launch"]}," of the OpenAI ",{type:r,props:{href:au,rel:s,target:t},children:[av]}," endpoint, many applications have incorporated embeddings to personalize, recommend, and search content.",{type:w,props:{class:x},children:[]}]}]},colorTheme:o},blocks:{},media:{},files:a,relatedItems:{}},{type:m,meta:{browsers:{snippet:[110]}},id:"4078",blockType:aw,editorName:j,childKey:b,position:P,content:{data:b,display:M,colorTheme:o},blocks:{},media:{},files:a,relatedItems:{snippet:[{type:ax,meta:a,id:"110",published:c,title:"New and improved embedding model - TextAsVector",display:M,component:"NewAndImprovedEmbeddingModelTextAsVector",data:b,media:{},files:a}]}},{type:m,meta:{browsers:b},id:"4068",blockType:B,editorName:j,childKey:b,position:Q,content:{html:"\u003Cp\u003EYou can query the \u003Ca href=\"https:\u002F\u002Fbeta.openai.com\u002Fdocs\u002Fapi-reference\u002Fembeddings\" rel=\"noopener noreferrer\" target=\"_blank\"\u003E\u002Fembeddings\u003C\u002Fa\u003E endpoint for the new model with two lines of code using our \u003Ca href=\"https:\u002F\u002Fgithub.com\u002Fopenai\u002Fopenai-python\" rel=\"noopener noreferrer\" target=\"_blank\"\u003EOpenAI Python Library\u003C\u002Fa\u003E, just like you could with previous models:\u003Cbr class=\"softbreak\"\u003E\u003C\u002Fp\u003E",richText:{type:v,props:[],children:[{type:n,props:[],children:["You can query the ",{type:r,props:{href:au,rel:s,target:t},children:[av]}," endpoint for the new model with two lines of code using our ",{type:r,props:{href:"https:\u002F\u002Fgithub.com\u002Fopenai\u002Fopenai-python",rel:s,target:t},children:["OpenAI Python Library"]},", just like you could with previous models:",{type:w,props:{class:x},children:[]}]}]},colorTheme:o},blocks:{},media:{},files:a,relatedItems:{}},{type:m,meta:{browsers:{snippet:[112]}},id:"4079",blockType:aw,editorName:j,childKey:b,position:bA,content:{data:b,display:M,colorTheme:o},blocks:{},media:{},files:a,relatedItems:{snippet:[{type:ax,meta:a,id:"112",published:c,title:"New and improved embedding model - QueryCode",display:M,component:"NewAndImprovedEmbeddingModelQueryCode",data:b,media:{},files:a}]}},{type:m,meta:{browsers:b},id:"4069",blockType:ay,editorName:j,childKey:b,position:6,content:{heading:"Model improvements",level:k,colorTheme:o},blocks:{},media:{},files:a,relatedItems:{}},{type:m,meta:{browsers:b},id:"4070",blockType:B,editorName:j,childKey:b,position:7,content:{html:"\u003Cp\u003E\u003Cstrong\u003EStronger performance\u003C\u002Fstrong\u003E. \u003Ccode\u003Etext-embedding-ada-002\u003C\u002Fcode\u003E outperforms all the old embedding models on text search, code search, and sentence similarity tasks and gets comparable performance on text classification. For each task category, we evaluate the models on the datasets used in \u003Ca href=\"https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.10005\" rel=\"noopener noreferrer\" target=\"_blank\"\u003Eold embeddings\u003C\u002Fa\u003E.\u003Cbr class=\"softbreak\"\u003E\u003C\u002Fp\u003E",richText:{type:v,props:[],children:[{type:n,props:[],children:[{type:C,props:[],children:["Stronger performance"]},". ",{type:p,props:[],children:[at]}," outperforms all the old embedding models on text search, code search, and sentence similarity tasks and gets comparable performance on text classification. For each task category, we evaluate the models on the datasets used in ",{type:r,props:{href:"https:\u002F\u002Farxiv.org\u002Fabs\u002F2201.10005",rel:s,target:t},children:["old embeddings"]},".",{type:w,props:{class:x},children:[]}]}]},colorTheme:o},blocks:{},media:{},files:a,relatedItems:{}},{type:m,meta:{browsers:{snippet:[111]}},id:"4080",blockType:aw,editorName:j,childKey:b,position:8,content:{data:b,display:M,colorTheme:o},blocks:{},media:{},files:a,relatedItems:{snippet:[{type:ax,meta:a,id:"111",published:c,title:"New and improved embedding model - Improvements",display:M,component:"NewAndImprovedEmbeddingModelImprovements",data:b,media:{},files:a}]}},{type:m,meta:{browsers:b},id:"4071",blockType:B,editorName:j,childKey:b,position:9,content:{html:"\u003Cp\u003E\u003Cstrong\u003EUnification of capabilities\u003C\u002Fstrong\u003E. We have significantly simplified the interface of the \u003Ca href=\"https:\u002F\u002Fbeta.openai.com\u002Fdocs\u002Fapi-reference\u002Fembeddings\" rel=\"noopener noreferrer\" target=\"_blank\"\u003E\u002Fembeddings\u003C\u002Fa\u003E endpoint by merging the five separate models shown above (\u003Ccode\u003Etext-similarity\u003C\u002Fcode\u003E, \u003Ccode\u003Etext-search-query\u003C\u002Fcode\u003E, \u003Ccode\u003Etext-search-doc\u003C\u002Fcode\u003E, \u003Ccode\u003Ecode-search-text\u003C\u002Fcode\u003E and \u003Ccode\u003Ecode-search-code\u003C\u002Fcode\u003E) into a single new model. This single representation performs better than our previous embedding models across a diverse set of text search, sentence similarity, and code search benchmarks.\u003C\u002Fp\u003E\u003Cp\u003E\u003Cstrong\u003ELonger context.\u003C\u002Fstrong\u003E The context length of the new model is increased by a factor of four, from 2048 to 8192, making it more convenient to work with long documents.\u003C\u002Fp\u003E\u003Cp\u003E\u003Cstrong\u003ESmaller embedding size.\u003C\u002Fstrong\u003E The new embeddings have only 1536 dimensions, one-eighth the size of \u003Ccode\u003Edavinci-001\u003C\u002Fcode\u003E embeddings, making the new embeddings more cost effective in working with vector databases.\u003C\u002Fp\u003E\u003Cp\u003E\u003Cstrong\u003EReduced price.\u003C\u002Fstrong\u003E We have reduced the price of new embedding models by 90% compared to old models of the same size. The new model achieves better or similar performance as the old Davinci models at a 99.8% lower price.\u003C\u002Fp\u003E\u003Cp\u003EOverall, the new embedding model is a much more powerful tool for natural language processing and code tasks. We are excited to see how our customers will use it to create even more capable applications in their respective fields.\u003Cbr class=\"softbreak\"\u003E\u003C\u002Fp\u003E",richText:{type:v,props:[],children:[{type:n,props:[],children:[{type:C,props:[],children:["Unification of capabilities"]},". We have significantly simplified the interface of the ",{type:r,props:{href:au,rel:s,target:t},children:[av]}," endpoint by merging the five separate models shown above (",{type:p,props:[],children:["text-similarity"]},az,{type:p,props:[],children:["text-search-query"]},az,{type:p,props:[],children:["text-search-doc"]},az,{type:p,props:[],children:["code-search-text"]}," and ",{type:p,props:[],children:["code-search-code"]},") into a single new model. This single representation performs better than our previous embedding models across a diverse set of text search, sentence similarity, and code search benchmarks."]},{type:n,props:[],children:[{type:C,props:[],children:["Longer context."]}," The context length of the new model is increased by a factor of four, from 2048 to 8192, making it more convenient to work with long documents."]},{type:n,props:[],children:[{type:C,props:[],children:["Smaller embedding size."]}," The new embeddings have only 1536 dimensions, one-eighth the size of ",{type:p,props:[],children:["davinci-001"]}," embeddings, making the new embeddings more cost effective in working with vector databases."]},{type:n,props:[],children:[{type:C,props:[],children:["Reduced price."]}," We have reduced the price of new embedding models by 90% compared to old models of the same size. The new model achieves better or similar performance as the old Davinci models at a 99.8% lower price."]},{type:n,props:[],children:["Overall, the new embedding model is a much more powerful tool for natural language processing and code tasks. We are excited to see how our customers will use it to create even more capable applications in their respective fields.",{type:w,props:{class:x},children:[]}]}]},colorTheme:o},blocks:{},media:{},files:a,relatedItems:{}},{type:m,meta:{browsers:b},id:"4072",blockType:ay,editorName:j,childKey:b,position:10,content:{heading:"Limitations",level:k,colorTheme:o},blocks:{},media:{},files:a,relatedItems:{}},{type:m,meta:{browsers:b},id:"4073",blockType:B,editorName:j,childKey:b,position:11,content:{html:"\u003Cp\u003EThe new \u003Ccode\u003Etext-embedding-ada-002\u003C\u002Fcode\u003E model is not outperforming \u003Ccode\u003Etext-similarity-davinci-001\u003C\u002Fcode\u003E on the SentEval linear probing classification benchmark. For tasks that require training a light-weighted linear layer on top of embedding vectors for classification prediction, we suggest comparing the new model to \u003Ccode\u003Etext-similarity-davinci-001\u003C\u002Fcode\u003E and choosing whichever model gives optimal performance.\u003C\u002Fp\u003E\u003Cp\u003ECheck the \u003Ca href=\"https:\u002F\u002Fbeta.openai.com\u002Fdocs\u002Fguides\u002Fembeddings\u002Flimitations-risks\" rel=\"noopener noreferrer\" target=\"_blank\"\u003ELimitations & Risks\u003C\u002Fa\u003E section in the embeddings documentation for general limitations of our embedding models.\u003Cbr class=\"softbreak\"\u003E\u003C\u002Fp\u003E",richText:{type:v,props:[],children:[{type:n,props:[],children:["The new ",{type:p,props:[],children:[at]}," model is not outperforming ",{type:p,props:[],children:[bD]}," on the SentEval linear probing classification benchmark. For tasks that require training a light-weighted linear layer on top of embedding vectors for classification prediction, we suggest comparing the new model to ",{type:p,props:[],children:[bD]}," and choosing whichever model gives optimal performance."]},{type:n,props:[],children:["Check the ",{type:r,props:{href:"https:\u002F\u002Fbeta.openai.com\u002Fdocs\u002Fguides\u002Fembeddings\u002Flimitations-risks",rel:s,target:t},children:["Limitations & Risks"]}," section in the embeddings documentation for general limitations of our embedding models.",{type:w,props:{class:x},children:[]}]}]},colorTheme:o},blocks:{},media:{},files:a,relatedItems:{}},{type:m,meta:{browsers:b},id:"4074",blockType:ay,editorName:j,childKey:b,position:12,content:{heading:"Examples of the embeddings API in action",level:k,colorTheme:o},blocks:{},media:{},files:a,relatedItems:{}},{type:m,meta:{browsers:b},id:"4075",blockType:B,editorName:j,childKey:b,position:13,content:{html:"\u003Cp\u003E\u003Ca href=\"https:\u002F\u002Fkalendar.ai\u002F\" rel=\"noopener noreferrer\" target=\"_blank\"\u003E\u003Cstrong\u003EKalendar AI\u003C\u002Fstrong\u003E\u003C\u002Fa\u003E is a sales outreach product that uses embeddings to match the right sales pitch to the right customers out of a dataset containing 340M profiles. This automation relies on similarity between embeddings of customer profiles and sale pitches to rank up most suitable matches, eliminating 40–56% of unwanted targeting compared to their old approach.\u003C\u002Fp\u003E\u003Cp\u003E\u003Ca href=\"https:\u002F\u002Fwww.notion.so\u002F\" rel=\"noopener noreferrer\" target=\"_blank\"\u003E\u003Cstrong\u003ENotion\u003C\u002Fstrong\u003E\u003C\u002Fa\u003E, the online workspace company, will use OpenAI’s new embeddings to improve Notion search beyond today’s keyword matching systems.\u003Cbr class=\"softbreak\"\u003E\u003C\u002Fp\u003E",richText:{type:v,props:[],children:[{type:n,props:[],children:[{type:r,props:{href:"https:\u002F\u002Fkalendar.ai\u002F",rel:s,target:t},children:[{type:C,props:[],children:["Kalendar AI"]}]}," is a sales outreach product that uses embeddings to match the right sales pitch to the right customers out of a dataset containing 340M profiles. This automation relies on similarity between embeddings of customer profiles and sale pitches to rank up most suitable matches, eliminating 40–56% of unwanted targeting compared to their old approach."]},{type:n,props:[],children:[{type:r,props:{href:"https:\u002F\u002Fwww.notion.so\u002F",rel:s,target:t},children:[{type:C,props:[],children:["Notion"]}]},", the online workspace company, will use OpenAI’s new embeddings to improve Notion search beyond today’s keyword matching systems.",{type:w,props:{class:x},children:[]}]}]},colorTheme:o},blocks:{},media:{},files:a,relatedItems:{}},{type:m,meta:{browsers:b},id:"4076",blockType:bE,editorName:j,childKey:b,position:14,content:{colorTheme:o},blocks:{default:[{type:m,meta:{browsers:b},id:"4077",blockType:bF,editorName:j,childKey:bF,position:k,content:{label:bG,style:bH,url:bI,target:bJ,download:d,disabled:d,position:k},blocks:{},media:{},files:a,relatedItems:{}}]},media:{},files:a,relatedItems:{}}]},media:{heroImage:[{"full-width":{type:aj,meta:{role:ak,crop:bK,uuid:al},id:"8534",createdAt:y,updatedAt:y,uuid:al,filename:bL,role:ak,crop:bK,ratio:"16:9",lqip:b,src:"https:\u002F\u002Fopenaicom.imgix.net\u002Fced1351b-4ddb-4484-8646-da8bc571fd30\u002Fnew-and-improved-embeddings-api.jpg?auto=compress%2Cformat&fit=min&fm=jpg&q=80&rect=0%2C704%2C2048%2C1152",originalSrc:bM,width:aA,height:1152,alt:bN,caption:bO,video:u,metadata:[]},"half-width":{type:aj,meta:{role:ak,crop:bP,uuid:al},id:"8535",createdAt:y,updatedAt:y,uuid:al,filename:bL,role:ak,crop:bP,ratio:aB,lqip:b,src:"https:\u002F\u002Fopenaicom.imgix.net\u002Fced1351b-4ddb-4484-8646-da8bc571fd30\u002Fnew-and-improved-embeddings-api.jpg?auto=compress%2Cformat&fit=min&fm=jpg&q=80&rect=0%2C0%2C2048%2C2048",originalSrc:bM,width:aA,height:aA,alt:bN,caption:bO,video:u,metadata:[]}}],seoOgImage:[{default:{type:aj,meta:{role:bQ,crop:j,uuid:am},id:"8536",createdAt:y,updatedAt:y,uuid:am,filename:bR,role:bQ,crop:j,ratio:aB,lqip:b,src:ag,originalSrc:bS,width:an,height:an,alt:ah,caption:u,video:u,metadata:[]}}],seoTwitterImage:[{default:{type:aj,meta:{role:bT,crop:j,uuid:am},id:"8537",createdAt:y,updatedAt:y,uuid:am,filename:bR,role:bT,crop:j,ratio:aB,lqip:b,src:ag,originalSrc:bS,width:an,height:an,alt:ah,caption:u,video:u,metadata:[]}}]},files:{},ctas:[],links:[{type:bE,meta:a,id:"35",label:bG,style:bH,url:bI,target:bJ,download:d,disabled:d,position:k}],authors:[{type:ao,meta:a,id:"189",slug:"ryan-greene",published:c,title:bU,firstname:"Ryan",lastname:"Greene",fullname:bU,description:b,scope:ap,blogDetailsCount:b,authorGroupsCount:b,media:{},blogDetails:a,authorGroups:a},{type:ao,meta:a,id:"190",slug:"ted-sanders",published:c,title:bV,firstname:"Ted",lastname:"Sanders",fullname:bV,description:b,scope:ap,blogDetailsCount:b,authorGroupsCount:b,media:{},blogDetails:a,authorGroups:a},{type:ao,meta:a,id:"97",slug:"lilian-weng",published:c,title:bW,firstname:"Lilian",lastname:"Weng",fullname:bW,description:b,scope:ap,blogDetailsCount:b,authorGroupsCount:b,media:{},blogDetails:a,authorGroups:a},{type:ao,meta:a,id:"100",slug:"arvind-neelakantan",published:c,title:bX,firstname:"Arvind",lastname:"Neelakantan",fullname:bX,description:b,scope:ap,blogDetailsCount:b,authorGroupsCount:b,media:{},blogDetails:a,authorGroups:a}],topics:[{type:ai,meta:a,id:E,slug:G,published:c,title:X,researchPublicationsCount:b,blogDetailsCount:19,customerStoriesCount:b,researchPublications:a,blogDetails:a,customerStories:a},{type:ai,meta:a,id:q,slug:bB,published:c,title:bC,researchPublicationsCount:b,blogDetailsCount:41,customerStoriesCount:b,researchPublications:a,blogDetails:a,customerStories:a}],blogSeries:a,footnotes:[],references:[]}},prerenderedAt:1678238641043}}(void 0,null,true,false,"2023-03-07T01:14:57.000000Z","features","2023-03-01T22:37:08.000000Z","custom-links","internal","default",1,"landings","blocks","p","none","code","1","a","noopener noreferrer","_blank","","div","br","softbreak","2023-01-27T00:37:44.000000Z",2,"Customer stories","text","strong","Overview","5","About","product","Careers","Safety standards","Pricing","settings","social","auto","4","6",3,4,"safety-and-responsibility","9","overview","footer_quarternary","about","footer_secondary","Product","header_quarternary","header_secondary","2022-08-11T15:20:11.000000Z","Explore how our diverse network of customers is using our technology to advance their goals.","careers","safety-standards","pricing","Charter","charter","We are excited to announce a new embedding model which is significantly more capable, cost effective, and simpler to use.","https:\u002F\u002Fopenaicom.imgix.net\u002F34ae00a4-a048-4301-8d80-61a8b267484a\u002Fnew-and-improved-embeddings-api-og.jpg?auto=compress%2Cformat&fit=min&fm=jpg&q=80&rect=432%2C0%2C576%2C576","New And Improved Embeddings Api Og","topics","media","hero_image","ced1351b-4ddb-4484-8646-da8bc571fd30\u002Fnew-and-improved-embeddings-api.jpg","34ae00a4-a048-4301-8d80-61a8b267484a\u002Fnew-and-improved-embeddings-api-og.jpg",576,"people","author","3","Blog","2","text-embedding-ada-002","https:\u002F\u002Fbeta.openai.com\u002Fdocs\u002Fapi-reference\u002Fembeddings","\u002Fembeddings","code-snippet","snippets","heading",", ",2048,"1:1","2022-09-23T23:32:19.000000Z","2022-09-23T23:36:52.000000Z","Overview (Safety)","7","Safety & responsibility","footer_primary","2022-09-23T23:20:10.000000Z","2023-01-30T22:04:17.000000Z","Overview (Research)","Research","research","2022-09-23T23:33:12.000000Z","2022-09-23T23:36:14.000000Z","2022-10-14T21:35:13.000000Z","2022-10-14T21:35:58.000000Z","Overview (Product)","14","header_primary","policies","11","2022-10-14T21:41:01.000000Z","2022-10-14T21:41:20.000000Z","customer-story-landings","2023-02-17T23:59:01.000000Z",{},"https:\u002F\u002Fopenaicom.imgix.net\u002F5fc814d7-be40-4395-9b46-6b9293d5689d\u002Fwaymark.png?auto=compress%2Cformat&fit=min&fm=jpg&q=80&rect=425%2C0%2C1133%2C1132","Waymark","2022-09-23T22:52:44.000000Z","2022-09-23T23:36:26.000000Z","Index (Research)","Index","research-indices","research-index","Research index","2022-09-23T23:35:55.000000Z","2022-09-23T23:36:05.000000Z","blog-indices","2023-01-28T00:57:32.000000Z",{},"summary_large_image","10","2022-10-14T21:40:38.000000Z","2022-10-14T21:40:52.000000Z","17","15","2022-10-19T21:13:43.000000Z","2022-10-19T21:19:53.000000Z","20","2022-09-23T23:08:42.000000Z","2022-09-23T23:36:33.000000Z","OpenAI Charter","New and improved embedding model",5,"announcements","Announcements","text-similarity-davinci-001","links","link","Read documentation","bounding-box","https:\u002F\u002Fbeta.openai.com\u002Fdocs\u002Fguides\u002Fembeddings","blank","full-width","new-and-improved-embeddings-api.jpg","https:\u002F\u002Fopenaicomproductionae4b.blob.core.windows.net\u002Fproduction-twill-01\u002Fced1351b-4ddb-4484-8646-da8bc571fd30\u002Fnew-and-improved-embeddings-api.jpg","New And Improved Embeddings Api","\u003Cp\u003ERuby Chen\u003Cbr class=\"softbreak\"\u003E\u003C\u002Fp\u003E","half-width","seo_og_image","new-and-improved-embeddings-api-og.jpg","https:\u002F\u002Fopenaicomproductionae4b.blob.core.windows.net\u002Fproduction-twill-01\u002F34ae00a4-a048-4301-8d80-61a8b267484a\u002Fnew-and-improved-embeddings-api-og.jpg","seo_twitter_image","Ryan Greene","Ted Sanders","Lilian Weng","Arvind Neelakantan"))