From 13374a12e9c6b9feeb3108f3231d94f19c007ddd Mon Sep 17 00:00:00 2001 From: Saket Aryan <94069182+whysosaket@users.noreply.github.com> Date: Tue, 19 Nov 2024 23:53:58 +0530 Subject: [PATCH] (Feature) Vercel AI SDK (#2024) --- .../vercel-ai-sdk-chat-app/.gitattributes | 2 + examples/vercel-ai-sdk-chat-app/.gitignore | 29 ++ .../vercel-ai-sdk-chat-app/components.json | 20 ++ .../vercel-ai-sdk-chat-app/eslint.config.js | 28 ++ examples/vercel-ai-sdk-chat-app/index.html | 13 + examples/vercel-ai-sdk-chat-app/package.json | 51 +++ .../vercel-ai-sdk-chat-app/postcss.config.js | 6 + .../public/mem0_logo.jpeg | Bin 0 -> 8607 bytes examples/vercel-ai-sdk-chat-app/src/App.tsx | 13 + .../src/assets/mem0_logo.jpeg | Bin 0 -> 8607 bytes .../src/assets/react.svg | 1 + .../src/assets/user.jpg | Bin 0 -> 15547 bytes .../src/components/api-settings-popup.tsx | 91 +++++ .../src/components/chevron-toggle.tsx | 35 ++ .../src/components/header.tsx | 81 +++++ .../src/components/input-area.tsx | 107 ++++++ .../src/components/memories.tsx | 93 +++++ .../src/components/messages.tsx | 102 ++++++ .../src/components/ui/avatar.tsx | 50 +++ .../src/components/ui/badge.tsx | 36 ++ .../src/components/ui/button.tsx | 57 +++ .../src/components/ui/card.tsx | 76 ++++ .../src/components/ui/dialog.tsx | 120 +++++++ .../src/components/ui/input.tsx | 25 ++ .../src/components/ui/label.tsx | 24 ++ .../src/components/ui/scroll-area.tsx | 46 +++ .../src/components/ui/select.tsx | 164 +++++++++ .../src/contexts/GlobalContext.tsx | 324 ++++++++++++++++++ examples/vercel-ai-sdk-chat-app/src/index.css | 97 ++++++ examples/vercel-ai-sdk-chat-app/src/main.tsx | 10 + examples/vercel-ai-sdk-chat-app/src/page.tsx | 14 + .../vercel-ai-sdk-chat-app/src/pages/home.tsx | 41 +++ examples/vercel-ai-sdk-chat-app/src/types.ts | 22 ++ .../vercel-ai-sdk-chat-app/src/vite-env.d.ts | 1 + .../vercel-ai-sdk-chat-app/tailwind.config.js | 62 ++++ .../vercel-ai-sdk-chat-app/tsconfig.app.json | 32 ++ examples/vercel-ai-sdk-chat-app/tsconfig.json | 13 + .../vercel-ai-sdk-chat-app/tsconfig.node.json | 24 ++ .../vercel-ai-sdk-chat-app/vite.config.ts | 13 + vercel-ai-sdk/.gitattributes | 2 + vercel-ai-sdk/.gitignore | 10 + vercel-ai-sdk/README.md | 228 ++++++++++++ vercel-ai-sdk/config/test-config.ts | 105 ++++++ vercel-ai-sdk/jest.config.js | 6 + vercel-ai-sdk/nodemon.json | 5 + vercel-ai-sdk/package.json | 69 ++++ vercel-ai-sdk/src/index.ts | 4 + vercel-ai-sdk/src/mem0-chat-language-model.ts | 150 ++++++++ vercel-ai-sdk/src/mem0-chat-settings.ts | 36 ++ .../src/mem0-completion-language-model.ts | 150 ++++++++ vercel-ai-sdk/src/mem0-completion-settings.ts | 19 + vercel-ai-sdk/src/mem0-facade.ts | 36 ++ .../src/mem0-generic-language-model.ts | 148 ++++++++ vercel-ai-sdk/src/mem0-provider-selector.ts | 34 ++ vercel-ai-sdk/src/mem0-provider.ts | 145 ++++++++ vercel-ai-sdk/src/mem0-utils.ts | 114 ++++++ .../src/provider-response-provider.ts | 113 ++++++ vercel-ai-sdk/src/stream-utils.ts | 28 ++ vercel-ai-sdk/teardown.ts | 12 + .../tests/anthropic-structured-ouput.test.ts | 110 ++++++ vercel-ai-sdk/tests/anthropic.test.ts | 61 ++++ vercel-ai-sdk/tests/cohere.test.ts | 60 ++++ vercel-ai-sdk/tests/generate-output.test.ts | 86 +++++ vercel-ai-sdk/tests/groq.test.ts | 61 ++++ vercel-ai-sdk/tests/memory-core.test.ts | 75 ++++ .../tests/openai-structured-ouput.test.ts | 110 ++++++ vercel-ai-sdk/tests/openai.test.ts | 58 ++++ vercel-ai-sdk/tests/text-properties.test.ts | 77 +++++ vercel-ai-sdk/tsconfig.json | 29 ++ vercel-ai-sdk/tsup.config.ts | 10 + 70 files changed, 4074 insertions(+) create mode 100644 examples/vercel-ai-sdk-chat-app/.gitattributes create mode 100644 examples/vercel-ai-sdk-chat-app/.gitignore create mode 100644 examples/vercel-ai-sdk-chat-app/components.json create mode 100644 examples/vercel-ai-sdk-chat-app/eslint.config.js create mode 100644 examples/vercel-ai-sdk-chat-app/index.html create mode 100644 examples/vercel-ai-sdk-chat-app/package.json create mode 100644 examples/vercel-ai-sdk-chat-app/postcss.config.js create mode 100644 examples/vercel-ai-sdk-chat-app/public/mem0_logo.jpeg create mode 100644 examples/vercel-ai-sdk-chat-app/src/App.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/assets/mem0_logo.jpeg create mode 100644 examples/vercel-ai-sdk-chat-app/src/assets/react.svg create mode 100644 examples/vercel-ai-sdk-chat-app/src/assets/user.jpg create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/api-settings-popup.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/chevron-toggle.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/header.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/input-area.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/memories.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/messages.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/ui/avatar.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/ui/badge.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/ui/button.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/ui/card.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/ui/dialog.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/ui/input.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/ui/label.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/ui/scroll-area.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/components/ui/select.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/contexts/GlobalContext.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/index.css create mode 100644 examples/vercel-ai-sdk-chat-app/src/main.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/page.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/pages/home.tsx create mode 100644 examples/vercel-ai-sdk-chat-app/src/types.ts create mode 100644 examples/vercel-ai-sdk-chat-app/src/vite-env.d.ts create mode 100644 examples/vercel-ai-sdk-chat-app/tailwind.config.js create mode 100644 examples/vercel-ai-sdk-chat-app/tsconfig.app.json create mode 100644 examples/vercel-ai-sdk-chat-app/tsconfig.json create mode 100644 examples/vercel-ai-sdk-chat-app/tsconfig.node.json create mode 100644 examples/vercel-ai-sdk-chat-app/vite.config.ts create mode 100644 vercel-ai-sdk/.gitattributes create mode 100644 vercel-ai-sdk/.gitignore create mode 100644 vercel-ai-sdk/README.md create mode 100644 vercel-ai-sdk/config/test-config.ts create mode 100644 vercel-ai-sdk/jest.config.js create mode 100644 vercel-ai-sdk/nodemon.json create mode 100644 vercel-ai-sdk/package.json create mode 100644 vercel-ai-sdk/src/index.ts create mode 100644 vercel-ai-sdk/src/mem0-chat-language-model.ts create mode 100644 vercel-ai-sdk/src/mem0-chat-settings.ts create mode 100644 vercel-ai-sdk/src/mem0-completion-language-model.ts create mode 100644 vercel-ai-sdk/src/mem0-completion-settings.ts create mode 100644 vercel-ai-sdk/src/mem0-facade.ts create mode 100644 vercel-ai-sdk/src/mem0-generic-language-model.ts create mode 100644 vercel-ai-sdk/src/mem0-provider-selector.ts create mode 100644 vercel-ai-sdk/src/mem0-provider.ts create mode 100644 vercel-ai-sdk/src/mem0-utils.ts create mode 100644 vercel-ai-sdk/src/provider-response-provider.ts create mode 100644 vercel-ai-sdk/src/stream-utils.ts create mode 100644 vercel-ai-sdk/teardown.ts create mode 100644 vercel-ai-sdk/tests/anthropic-structured-ouput.test.ts create mode 100644 vercel-ai-sdk/tests/anthropic.test.ts create mode 100644 vercel-ai-sdk/tests/cohere.test.ts create mode 100644 vercel-ai-sdk/tests/generate-output.test.ts create mode 100644 vercel-ai-sdk/tests/groq.test.ts create mode 100644 vercel-ai-sdk/tests/memory-core.test.ts create mode 100644 vercel-ai-sdk/tests/openai-structured-ouput.test.ts create mode 100644 vercel-ai-sdk/tests/openai.test.ts create mode 100644 vercel-ai-sdk/tests/text-properties.test.ts create mode 100644 vercel-ai-sdk/tsconfig.json create mode 100644 vercel-ai-sdk/tsup.config.ts diff --git a/examples/vercel-ai-sdk-chat-app/.gitattributes b/examples/vercel-ai-sdk-chat-app/.gitattributes new file mode 100644 index 0000000000..dfe0770424 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/.gitattributes @@ -0,0 +1,2 @@ +# Auto detect text files and perform LF normalization +* text=auto diff --git a/examples/vercel-ai-sdk-chat-app/.gitignore b/examples/vercel-ai-sdk-chat-app/.gitignore new file mode 100644 index 0000000000..9767597e36 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/.gitignore @@ -0,0 +1,29 @@ +**/.env +**/node_modules +**/dist +**/.DS_Store + +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? diff --git a/examples/vercel-ai-sdk-chat-app/components.json b/examples/vercel-ai-sdk-chat-app/components.json new file mode 100644 index 0000000000..0b03196d3a --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/components.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": false, + "tsx": true, + "tailwind": { + "config": "tailwind.config.js", + "css": "src/index.css", + "baseColor": "zinc", + "cssVariables": true, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + } +} \ No newline at end of file diff --git a/examples/vercel-ai-sdk-chat-app/eslint.config.js b/examples/vercel-ai-sdk-chat-app/eslint.config.js new file mode 100644 index 0000000000..092408a9f0 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/eslint.config.js @@ -0,0 +1,28 @@ +import js from '@eslint/js' +import globals from 'globals' +import reactHooks from 'eslint-plugin-react-hooks' +import reactRefresh from 'eslint-plugin-react-refresh' +import tseslint from 'typescript-eslint' + +export default tseslint.config( + { ignores: ['dist'] }, + { + extends: [js.configs.recommended, ...tseslint.configs.recommended], + files: ['**/*.{ts,tsx}'], + languageOptions: { + ecmaVersion: 2020, + globals: globals.browser, + }, + plugins: { + 'react-hooks': reactHooks, + 'react-refresh': reactRefresh, + }, + rules: { + ...reactHooks.configs.recommended.rules, + 'react-refresh/only-export-components': [ + 'warn', + { allowConstantExport: true }, + ], + }, + }, +) diff --git a/examples/vercel-ai-sdk-chat-app/index.html b/examples/vercel-ai-sdk-chat-app/index.html new file mode 100644 index 0000000000..e2135b1c43 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/index.html @@ -0,0 +1,13 @@ + + +
+ + + +b5Cnn|kZBl?y}l95y9 zuQQYBZ=(eCJ+1r@$f1a$P2U@Nw)ByIiEx**Ds0B7yC3XQN@(YHWINAlH;|M$gDyd^ ztyL!nJ7Reua<# 8Z2Lbia$_1X?w5_IUHAsYNm{&5qm|F;G7c>vrJ*+>fY10PxDv{? Iui{H`G9V zu?6+Sj+=u2e2Q*5ec8m$8aF7ZN1lWdj}wPEkU7Qr()@sRkL|M_E>@?Fy#ipsuTjtI zPHWiN(`mojWDr9X6C#+>468jzK98Y~ZR{y^J128~R?B|x@%!!s_4`c=?uy1dotsp< zgX5K`>xl7r@38mMZ(xvPZuH_~nd7ndeIq~0PZbVuCP&!hO}xg 0b)ei*m4JIZ?mwdVNTEXW@w8??)k!}3X@CGE^_?4rwZG~(?b4b };e z &l49IP>Rf6wq`==0!^{`lU7IJR3-ZR&Hdg_x|_; z`bTjdu)Oy2ZY}jmb4VFyH2s87l8?Wbm>by*RC*)&Ngj(tS)W97ALn8$)~Z;hzc1A! zDzP50Cb}40wp5upPv~fG3%Pzg40$>_1qGahN!~+m3d0A#*@D&FmgvhS`PP6BV*8YU zKs?r0B4+QMtr@>3Wh9v=KJU2h|BcXSgHiOIST7!Kdg1yZ^D-rorfHkR*(Fj3$J4?U zw4%Hw{L2h`546Wp)g!l qFm^(Z9)eY4*7fFoG9~h0+B!#&Bfi^I9{KGiM zdUeRNwMQKDd_Bbe(XTIYU2x&Jwv_mjS=~GapdVrMiJ7Wt1+|B(EmB^JI^y+V4V62z z }LjO!& $NSke{w$IuUqRL2+d-$rBW|0}ub5>`v`y1y|Whg2lnf zgXt>(uhtdd>0=i=@`%rL-tp2DxmzYlB=3#j^jSf`a`_4E;ik7D5eg4U?IY17))+t1 z44#1XLE;Ki_ppcJR#u%)MKV7l6^EYtBEt8?yzu>N?Q*cnmS? 9D zsgiVeizHuq5-=o^@w}S2p4ySBxIle@28Ci^gTO)-y}Al2fA%93$>QDKa7TPpV(-hV zwIB=7*an8Dw elK1XGCy 3v@-Sf&V)36=1!)oRlh?ETV0W!CXIfI3>n +pvbI=OmVwkjVgSC;W`C}p@7 -HIoSc~59?q_X$aY=H z?@@-{ovdeCO?BbDZ8jD7Q=5-nW1-A2xt~Z>Jj^9TnlERLr8&hdf8csVkTPg1m2`w5 zuN#fH*mGL9Ar;>)8CEiZe$P>10g1pS&L2g3ls${X+Iw}nBre22+UA9k-%+bC-(E{5 zAJ?)=d#=e*ZLZTEJN!azTp!dgfq;`m$18&i5z#aa>Lxp@MALO@15tH}F}IV;e&!H# zBYv&XS_YoeiSIeBNox&K*4IbNR)cXA(}^e6xO` J*goKN7X_`;5k&kpURl?DD5%>p z^LC0s>f;um#xq{fde;_>!h-Z9VPhe@a{WgX5;vd>osYu`I0)obMNVvXa~NaI3tNf; z7d%*s68Q5=`j`<_`Yj#JlFV^~)`#L1x4f=_N`uW4L`aTTwsp4&j<(IbokaV-U&pQR z`~tGsbzbrxJCy8>42Mi8UvE60Ey(QM@|-Fc6jHhL)+sW2RQ&$rHu&LC+f4_I!|j8> z{1j}5r8J3$aWvmIKjYc_M&!d4LyEi7+H#ZRAUfr+$acDj(#@dIXyB{t10(aTBsYem z#@2SUR(72wJ%3@r!0=G(#0x#T?UhRUL?$u0nVuEjX5E@==s293v^xhksJftJ4?f10 zvpe`_Moyfi-)c;|ZoSWWvnI2%A9m*m%F7%RTl&Z$G%d2uD}!VHx|xHyF_xass9< Nql5R${SKQ$XLs->7;Ata>7%L;TuqVT* zMPOe6= 3c)i$#YanxZrCLJi>Z7H8sZ2+eGiAB=YrP^C+7`?6 z&Yf*UGRw82@3CbF*dAVz-63m!i7O|k(kA`VZeM-ycQPK-72weoz&_mc z@R9wftYq(!GAz@^Mc!~dJD9BC2tUHZNU)t7aj1-kmlT;=u`h@kYy%9UAhL?v#{1XW zqFg 0~9C(ofOCHenkj?M0v13KcpP4x-X4q>~dCOc`bu5$f1c zQ2XDn97L~E6GVeW_dUxR~#(lAM=E&MA$1^t_Sqnv`8J{GAZ8j_2C zs+qdskH%H=rjW^2N{WmkKc2*BC5Nq8&Qq~p5Eq^uLN@chuX}K*3{9?1tMcJ|GDk*A z8LHqM#{3$CoSe> J*}ED}vWEzgaswIPP!e`e@|3UI)JM@|l{ zf8NHv$8~2bOzSqvNCl&?x6evHwgvHjjyd=dk~i&?TRqyc%&}sD5NP|^6Jf)Q+#%}y zW&Mp*6CCw1ZSm7GMA`XyF(u7(io%WU3K8FnKCBN?XCobz6+mjT0VD!?EbZq?WWx0m zbMqSpcdo(W2x vxrNkz-t7VCb1l2vw_o|$AyV<{tI4JrwOU)Rz4W%3n z4IZA%#jy3DDv~{W((Z@5ea;v3y7^-fTEUxsJK{VFlsr_s?-}DDOp>;~-5vEaTXa(; z)Z})@>&vo&U(WLV-1;TE&i!h#4XQphRgfaFt|ebnel(`cL9-Ur{$21WMlbRj3Chn? zNK0^D4KNMO-jS6(7c$UZeGuh6+Oz5)RB;@~Fk2Y)Yod$jj!~|kEJYm$52)E%i=*e* zj!QTK#fsi+cIiNcC_ r#Ns=OLg9sm8ZDk~=WEZ#3F&34^h zmM{VWSIQQ(KOh|)8k;=-i1~$S!x0Cqa?IDX851y2m0Qj}#5%*i(2HJ>DdIQEyJ%`i zHOUt=(8R&K@@1=5^5!s5RUqP-g_N}R%rlRi#OpyQ(>1KS{;Yr^p_|09arZK#FEHt; zxsGeZitTIS%Ml>YjS8*UN$ h6pW>@TETiY~S#_exLj<&DZt&s$s~7 zU*_ruYZwj89rm;mjeGhm`PUx~wfJePYxQ09O7|!)2dDb%RbC=j_pbm;18Y}+v7EX3 zvzV2Ocdb_d=e#SxW6f8;;Nk_1%*eV_gY9cIkh+R5ko5iC-!PKdIZOxIoG^n*!wro- zkMT3sn#tZ`pVHouG_vi^3y60_hSWe&d=B-++nUU6jy*?#ns6L zNtQ$H%1VP}n9efy$iz#VM=N72f%jh0wh1Btq@e@9W*~%O1`J+TGdYXlJqgLUwX =#mXM@zGJ(YjrOOxzyE97>9in?8 z2T_E!$_O9xCe9aE0K;&Mq^!(B_1!-fsTY=KWx ukxtGf!V$j z?3n3D{!%aOMUdgfUyqcIGi}x7NWL!Lrs7&DBMI62EXNIh+R`)YLy8bN%(97srSu8I z(iK1hYfRXs&Y|rXbU2LsZo4tqLXeQ|_~%kr3&j6gX)a7w=PDYgXa%pJ)FRgsbpN!# zEw%Dn^{<%?UEG4)TWV}Bv>vKS1Ta))EF>6>S4kbo!@Id*F^#X@7*Ef41xnqg3UfwB zSXT7HZPLH$lJ_I!?hT$y&CHezI9LS-c_#_@Iqp0pzkn|7SF}p6K#!J!E|gC7UnTEX zp6wwoGcLIKz^D2bCqJ 0NvmCEuBXK(Qh!>$?*5Fmn%dL)3*Cu z^-d*{n?FM;2VOKK8xLm)A0a6(fY-A}#mU~u^|CQbxkv_)S%oSuh;C8^`zz`)GTqd! zwmuuK57Fl$?I-eh?O0Q>w`cz3Y|1AD&<&qz=lR^%QN|enzd2gUzx{MZ3x-PH)*pOg zjY_#O`%5hdajj 1;k)F7FZ{ zS&({MEx5bq&APT>bIAE1WFeL-ZJ?`28j)G6-jwk{+K9|QNGJLJ4ZZ7agxcbh>WoIB zktv5QNKINA0>QO3SY}OnvAdXKhwgPalWAX1vE}4>tZ)UWZ6j(zP!6 qix`Z%itsrqMAdOYDh!vM{w|I_x1j6P{nx#>{ar@ zn4KO7Hc|!3nFD>OD@6P#x3E&YXq$FDu=sWc<^Ei1wEI{PAz{Mu+}nbsX`R%Ul>-)U zQA-N(aG7MS1Dj~L%+8DPGatNc?)2~Qa(VgZHbOq3Wp^rww4rJvY?>`4m)2<;sQ2i! z*lb~E!6%+Gx+Jkt58nHu%fn9R{rfOFGU|?D#(VF!&IpoUb68 ;Pt*~d`y z4`0@JHI95rne4%)Qs3hDXHzrdns2c0{%jbTo_^({Mi($l7N$@-I>jP(PSGaPciu;_ z?|bw-lx2oxnl%EB-ZfkjSv3UnD@lUmPyU2K&P2U&r?E_7iEo3s2d);70mmipKgoBg ze>gqu?IT4OuT2)F7TIseD(21GB1(4NhqM3n$bca@ID~9QoSjPbV*)F` 9XMre ztbFjpS^Ug|R-|EL`%W}$eArh}(3>A^+wnef)J#3 zW;wKL_jZk gJxWO|3akP4F!}x$0P+`*FW7~kwFvjNO#d6P+g>5{1_dxj%%LKr`t-wk$n-tQ z^a!yIMP{}cdjCe#qeM1bIe9s^vTg &+ z(+LK}yk4r_uwziO|8nm)oQPbcA{iU^208Rh?nmLVF`ZK +2BkkC_??LA*Fyj6QuZn4aw&A-qbOdIeOq0i z8JA)$n%gbZ5QOVai3&?x)Hxp{sWy*AW>7{Fi1E81!1T#9d(hWJQo$9f4OZl{20}IH zmxm!IblXrbvVp|ez+_E@A}LkFZLg%G+Fi*$uXpe-^SMqEol+KHgIF`mqvXCA?bd78 zyCX_eEbED^AE$fb8n^Cs1cfTP>ij;}nH4oBAqnLd$wuPvQ%d5Y@-dH!6Okg qh`JFwFclCTw5p8aw+5OdfeSZy?ed`T-Rubtg1rE#MeP0*^__rTpveZsMak1V*T zl3w>r0%ji2K9K95{$P& |KxEcuzZg!FBC`A~>gds4Pob3MZE#4a6`! za0c>`96%|;Gu$X84T+Oj=GrSnyh09gbUin+iFt1eacV1)T<*|)rm@-2O~EkoMoNRe zdM2mkY%g|j64WO3CVKy1OqiM-nYQ-ByY0Bht&=R`p+MT?9)ail>vLwdr{X*w`)ZWf zbBo$W(SSVc)Ekpj=;cj6n95)VFSdS}IC!&bZKu9o$~floZHo2yyfnYUiHI+O`c#ty z9b4Vcacxq0%eXh#P@+u6DSyc>!X6t+WH~+{N=*>#-k?FVC`Ibc>}rzIm^lq^KbJz- z?=kG8F6*?i(6QQwZID7=lj7PEZfli$Oou83jw;7Mw0eDoUtCxP_?!Ix3=H0Wx9t0F z!NWS`^2+6Ztgmlec*?rB?@DY)HWvwcGA>GzgQb3cN)OB*o{frOQorWRV? zNFf2>)It-KyEpDVWYLW@X54)delOJqb#N!q9pdo$hy?7slmw5S#s+ii?>BT^2Xlif z7_q@z)v&K5)(g@M8O5VkpArX!x|W^lWu=UBFTcFC9b;jXQuGk?Ws^bIu?Ik6yOpU@ z+6!+VY9Z7K$BTPCR+s8qd&Q!>pLhRK?(0~8jADg^mW}}g;>-!9UlnFBsu!yxV8j{D z*msNM2=2RE0%oQ7ew_KPHA>-dyEdHQDWN1Giz?L>prkN+stOw8%`Y{Oh0@XQ3Znz} zSuY 6gJH7s?E^H zr>W;07T!^$0?nrvJtSToqxB}xWjOKx{1N7tlh;2g8ABEo oz zIq1g8|F$aif%brY^$7+<-}3RuZxRoCzT>Z|@u0AsV*$VD1JH<~&b1VZLUS+AN+3A) zA#wS}vKCnldQh{%lsr>!0XY{^#*Xl}45quB(=0SB^z9X*MYOjSQUYGO9@M)h9?($r z&YC*|P1LP$K}C{EzbC0mo9=E8rsY#=?F|fdDQgp#fQh;ugy>K;h680^W5^;+;{O8u z!iIe*KQ|;cmc}Z{Dp3Be3Xj*6C;sd1WTrx)W;ci1QpBOw73)zl=_~6CL7?o~!N|_` zby8!oeH~9Bej$85tn&8*1OdFH@}8CRBl-!gt5vidw{)C(1hb!@&O4U@7%)@L*O|j+ z=LyyM*`b!}b3IiX_M6yqy=*8?vT}1HEm?S`eBwk>$mC{XX^{SBe2 (cDFgUyBJgNTH4PG9wK;(5oVsGi--)|Cl85@E;zOLmk)pkBGPA zBR>tnZ#^BW8s0hsHBh6X4XhJqM+}lKXs-ZO$bHtqrqpGZ#mvQVv{U$!E50$`1g4Mr zVGl(kjj7*IWRu)k#EWMJ_rqNqOS{3=C1Re-BzQjye}*io^gp0QkO5g8B9~^LW+!Nk zU0Ri|n8zgrPps<^1l@u#ec@qYt ziRU;4L7Tff%!ZO5*5MT6!y@s4I416JcZ*uB3L0>*=?a~9kn|m`_mzcdR`;-{xjysv zi|okZc6`DEv8h;}O6Y=R6R30pdq@j2#s=x~+ks|+I>8L8z}kr6f-?tcHC6FbQvarx zNA((rTm75OB|* O;G!PZ3PI%xj a7;YP@+J1?#X0Dh~5fVSEunt#T0c&h+CV5gE z0Q *nG(y@l2I@%h;@rs~ 941TX^ASue?-) zTfLjAbw17eR_ym)9?a)5syCb~Ym4^q*Iz=sqxK>VEsFB}9>zJFtDe5MUKlwcv(Ju4 z4kEebcA9ZDl)1u*!?7~ | zw=Q(;5vqK 9*;(c1kjsTO+4 zHPt#hGU#30ojHx63Lg&rPZpzNz~Wj{TUVos5Fal?>ie{ARRNofb+>k}09Ge6)MRJW zk=W3iUBU(Duxi;xhL&$WUPnYijxp-&8`nXxsn^RI^5%ESDl>)Jn#B5EM3qO}Kah1v zPP#2?m6%RDGYKU9X;6eI3Bv*FtFv6QA^9?sVqF7dJAWJ1pqx+G@JvJIy0q1?<^6uX zDf{UsJ68ajNP_1+&f>a^t#%z1s+ln8&2MR5yt+Ed#`p!*11sFJjW7a*2;tk5>JuZ| zdcM} MqCOws#BOJj}D`H|nb> zw7l+Vuw`~mR8kr{b3B`4Cw&+ nd-$hAhKE?UWpIwn={k=qaDs|s zY$b1v2Af6|2XSP@rcqg7^SI_TFUcFweOqQ9&^^YYTAt_Zi5avf&-zwLH1VPitQ!Yz z)l2q<Ni2vuE3do$Eh8O0&jB!U#i~Jsb78J_Z);z|*TW!^^aK$0m~`Sde$#H zMqAB69>5pN?0VRwurCpp9~J={1n#g@i>J?RpSMBw$ri^ZJMW12 yMvz37`{6!8#OU&!Y{xtVnp1nM{FWO0QA+sa9#=2@R*qx?$z7qDrK zbw6#2eeebXvjU3*(o=HZp&GpCZ(OG}j*c2|1ZY=PWd)`H z2J4 zmDM)U*-JOgdWX&Hq`j#b-irmT*ix(^FN@1Hcv(dv3!4j?j hG);}EJmhY<)n)G=K$B*~@p(?cy4?hse z>0)Yu6X66mzB&p4NPVerm_7eSbvDxU9nHh=a8 IPl^{ke2M0t}NdI%CSfRW-v%*#r)&YQ2e3Rk G`NbEoR7pO<11g4LT1eB2llm|M7se ztW%G)X?IhAY;LY$CjDWT%jm4GiEbM|ud2{S{iER3vj^rgbB31E&CQn9t~6yK{6H#7 tdk^4VT+gK&W##w|u}&XyM@QHym3!GVEi7mUM?0bauMOruyYpU+{|{;G(pmrj literal 0 HcmV?d00001 diff --git a/examples/vercel-ai-sdk-chat-app/src/components/api-settings-popup.tsx b/examples/vercel-ai-sdk-chat-app/src/components/api-settings-popup.tsx new file mode 100644 index 0000000000..75cea86cd1 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/api-settings-popup.tsx @@ -0,0 +1,91 @@ +import { Dispatch, SetStateAction, useContext, useEffect, useState } from 'react' +import { Button } from "@/components/ui/button" +import { Input } from "@/components/ui/input" +import { Label } from "@/components/ui/label" +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select" +import { Dialog, DialogContent, DialogHeader, DialogTitle, DialogFooter } from "@/components/ui/dialog" +import GlobalContext from '@/contexts/GlobalContext' + +export default function ApiSettingsPopup(props: { isOpen: boolean, setIsOpen: Dispatch > }) { + const {isOpen, setIsOpen} = props + const [mem0ApiKey, setMem0ApiKey] = useState('') + const [providerApiKey, setProviderApiKey] = useState('') + const [provider, setProvider] = useState('OpenAI') + const { selectorHandler, selectedOpenAIKey, selectedMem0Key, selectedProvider } = useContext(GlobalContext); + + const handleSave = () => { + // Here you would typically save the settings to your backend or local storage + selectorHandler(mem0ApiKey, providerApiKey, provider); + setIsOpen(false) + } + + useEffect(() => { + if (selectedOpenAIKey) { + setProviderApiKey(selectedOpenAIKey); + } + if (selectedMem0Key) { + setMem0ApiKey(selectedMem0Key); + } + if (selectedProvider) { + setProvider(selectedProvider); + } + }, [selectedOpenAIKey, selectedMem0Key, selectedProvider]); + + + + return ( + <> + + > + ) +} \ No newline at end of file diff --git a/examples/vercel-ai-sdk-chat-app/src/components/chevron-toggle.tsx b/examples/vercel-ai-sdk-chat-app/src/components/chevron-toggle.tsx new file mode 100644 index 0000000000..7b8b128ea9 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/chevron-toggle.tsx @@ -0,0 +1,35 @@ +import { Button } from "@/components/ui/button"; +import { ChevronLeft, ChevronRight } from "lucide-react"; +import React from "react"; + +const ChevronToggle = (props: { + isMemoriesExpanded: boolean; + setIsMemoriesExpanded: React.Dispatch >; +}) => { + const { isMemoriesExpanded, setIsMemoriesExpanded } = props; + return ( + <> + ++ > + ); +}; + +export default ChevronToggle; diff --git a/examples/vercel-ai-sdk-chat-app/src/components/header.tsx b/examples/vercel-ai-sdk-chat-app/src/components/header.tsx new file mode 100644 index 0000000000..7ddbd37d1d --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/header.tsx @@ -0,0 +1,81 @@ +import { Button } from "@/components/ui/button"; +import { ChevronRight, X, RefreshCcw, Settings } from "lucide-react"; +import { Dispatch, SetStateAction, useContext, useEffect, useState } from "react"; +import GlobalContext from "../contexts/GlobalContext"; +import { Input } from "./ui/input"; + +const Header = (props: { + setIsSettingsOpen: Dispatch+ ++>; +}) => { + const { setIsSettingsOpen } = props; + const { selectUserHandler, clearUserHandler, selectedUser, clearConfiguration } = useContext(GlobalContext); + const [userId, setUserId] = useState (""); + + const handleSelectUser = (e: React.ChangeEvent ) => { + setUserId(e.target.value); + }; + + const handleClearUser = () => { + clearUserHandler(); + setUserId(""); + }; + + const handleSubmit = () => { + selectUserHandler(userId); + }; + + // New function to handle key down events + const handleKeyDown = (e: React.KeyboardEvent ) => { + if (e.key === 'Enter') { + e.preventDefault(); // Prevent form submission if it's in a form + handleSubmit(); + } + }; + + useEffect(() => { + if (selectedUser) { + setUserId(selectedUser); + } + }, [selectedUser]); + + return ( + <> + + + > + ); +}; + +export default Header; diff --git a/examples/vercel-ai-sdk-chat-app/src/components/input-area.tsx b/examples/vercel-ai-sdk-chat-app/src/components/input-area.tsx new file mode 100644 index 0000000000..877e19a28e --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/input-area.tsx @@ -0,0 +1,107 @@ +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import GlobalContext from "@/contexts/GlobalContext"; +import { FileInfo } from "@/types"; +import { Images, Send, X } from "lucide-react"; +import { useContext, useRef, useState } from "react"; + +const InputArea = () => { + const [inputValue, setInputValue] = useState(""); + const { handleSend, selectedFile, setSelectedFile, setFile } = useContext(GlobalContext); + const [loading, setLoading] = useState(false); + + const ref = useRef+ Mem0 Assistant +++++ + + +++ + ++(null); + const fileInputRef = useRef (null) + + const handleFileChange = (event: React.ChangeEvent ) => { + const file = event.target.files?.[0] + if (file) { + setSelectedFile({ + name: file.name, + type: file.type, + size: file.size + }) + setFile(file) + } + } + + const handleSendController = async () => { + setLoading(true); + setInputValue(""); + await handleSend(inputValue); + setLoading(false); + + // focus on input + setTimeout(() => { + ref.current?.focus(); + }, 0); + }; + + const handleClosePopup = () => { + setSelectedFile(null) + if (fileInputRef.current) { + fileInputRef.current.value = '' + } + } + + return ( + <> + ++ > + ); +}; + +const FileInfoPopup = ({ file, onClose }: { file: FileInfo, onClose: () => void }) => { + return ( +++++ setInputValue(e.target.value)} + onKeyDown={(e) => e.key === "Enter" && handleSendController()} + placeholder="Type a message..." + className="flex-1 pl-10 rounded-3xl" + disabled={loading} + ref={ref} + /> ++ + + {selectedFile &&+} + + ++++ ) +} + +export default InputArea; diff --git a/examples/vercel-ai-sdk-chat-app/src/components/memories.tsx b/examples/vercel-ai-sdk-chat-app/src/components/memories.tsx new file mode 100644 index 0000000000..20a2f22e8e --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/memories.tsx @@ -0,0 +1,93 @@ +import { Badge } from "@/components/ui/badge"; +import { Card } from "@/components/ui/card"; +import { ScrollArea } from "@radix-ui/react-scroll-area"; +import { Memory } from "../types"; +import GlobalContext from "@/contexts/GlobalContext"; +import { useContext, useEffect, useState } from "react"; +import { AnimatePresence, motion } from "framer-motion"; + + +// eslint-disable-next-line @typescript-eslint/no-unused-vars +const MemoryItem = ({ memory, index }: { memory: Memory; index: number }) => { + return ( +++++{file.name}
+ +Type: {file.type}
+Size: {(file.size / 1024).toFixed(2)} KB
++ + ); +}; + +const Memories = (props: { isMemoriesExpanded: boolean }) => { + const { isMemoriesExpanded } = props; + const { memories } = useContext(GlobalContext); + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const [prevMemories, setPrevMemories] = useState++{memory.content}
++ {new Date(memory.timestamp).toLocaleString()} +++ {memory.tags.map((tag) => ( +++ {tag} + + ))} +([]); + + // Track memory positions for animation + useEffect(() => { + setPrevMemories(memories); + }, [memories]); + + return ( + + + ); +}; + +export default Memories; \ No newline at end of file diff --git a/examples/vercel-ai-sdk-chat-app/src/components/messages.tsx b/examples/vercel-ai-sdk-chat-app/src/components/messages.tsx new file mode 100644 index 0000000000..38e5a59e12 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/messages.tsx @@ -0,0 +1,102 @@ +import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import { Message } from "../types"; +import { useContext, useEffect, useRef } from "react"; +import GlobalContext from "@/contexts/GlobalContext"; +import Markdown from "react-markdown"; +import Mem00Logo from "../assets/mem0_logo.jpeg"; +import UserLogo from "../assets/user.jpg"; + +const Messages = () => { + const { messages, thinking } = useContext(GlobalContext); + const scrollAreaRef = useRef+ + Relevant Memories ({memories.length}) + ++ {memories.length === 0 && ( ++ No relevant memories found. + + )} +
+ Only the relevant memories will be displayed here. ++ ++ ++ {memories.map((memory: Memory, index: number) => ( + ++ ))} + (null); + + // scroll to bottom + useEffect(() => { + if (scrollAreaRef.current) { + scrollAreaRef.current.scrollTop += 40; // Scroll down by 40 pixels + } + }, [messages, thinking]); + + return ( + <> + + + > + ); +}; + +export default Messages; diff --git a/examples/vercel-ai-sdk-chat-app/src/components/ui/avatar.tsx b/examples/vercel-ai-sdk-chat-app/src/components/ui/avatar.tsx new file mode 100644 index 0000000000..51e507ba9d --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/ui/avatar.tsx @@ -0,0 +1,50 @@ +"use client" + +import * as React from "react" +import * as AvatarPrimitive from "@radix-ui/react-avatar" + +import { cn } from "@/lib/utils" + +const Avatar = React.forwardRef< + React.ElementRef+ {messages.map((message: Message) => ( ++++ ))} + {thinking && ( ++++++ ++ + {message.sender === "assistant" ? "AI" : "U"} + ++ {message.image && ( +++ ++ )} +{message.content} + + {message.timestamp} + +++ )} ++++ ++ {"AI"} ++++ + + ++, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +Avatar.displayName = AvatarPrimitive.Root.displayName + +const AvatarImage = React.forwardRef< + React.ElementRef , + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +AvatarImage.displayName = AvatarPrimitive.Image.displayName + +const AvatarFallback = React.forwardRef< + React.ElementRef , + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +AvatarFallback.displayName = AvatarPrimitive.Fallback.displayName + +export { Avatar, AvatarImage, AvatarFallback } diff --git a/examples/vercel-ai-sdk-chat-app/src/components/ui/badge.tsx b/examples/vercel-ai-sdk-chat-app/src/components/ui/badge.tsx new file mode 100644 index 0000000000..e87d62bf1a --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/ui/badge.tsx @@ -0,0 +1,36 @@ +import * as React from "react" +import { cva, type VariantProps } from "class-variance-authority" + +import { cn } from "@/lib/utils" + +const badgeVariants = cva( + "inline-flex items-center rounded-md border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2", + { + variants: { + variant: { + default: + "border-transparent bg-primary text-primary-foreground shadow hover:bg-primary/80", + secondary: + "border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80", + destructive: + "border-transparent bg-destructive text-destructive-foreground shadow hover:bg-destructive/80", + outline: "text-foreground", + }, + }, + defaultVariants: { + variant: "default", + }, + } +) + +export interface BadgeProps + extends React.HTMLAttributes , + VariantProps {} + +function Badge({ className, variant, ...props }: BadgeProps) { + return ( + + ) +} + +export { Badge, badgeVariants } diff --git a/examples/vercel-ai-sdk-chat-app/src/components/ui/button.tsx b/examples/vercel-ai-sdk-chat-app/src/components/ui/button.tsx new file mode 100644 index 0000000000..65d4fcd9ca --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/ui/button.tsx @@ -0,0 +1,57 @@ +import * as React from "react" +import { Slot } from "@radix-ui/react-slot" +import { cva, type VariantProps } from "class-variance-authority" + +import { cn } from "@/lib/utils" + +const buttonVariants = cva( + "inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0", + { + variants: { + variant: { + default: + "bg-primary text-primary-foreground shadow hover:bg-primary/90", + destructive: + "bg-destructive text-destructive-foreground shadow-sm hover:bg-destructive/90", + outline: + "border border-input bg-background shadow-sm hover:bg-accent hover:text-accent-foreground", + secondary: + "bg-secondary text-secondary-foreground shadow-sm hover:bg-secondary/80", + ghost: "hover:bg-accent hover:text-accent-foreground", + link: "text-primary underline-offset-4 hover:underline", + }, + size: { + default: "h-9 px-4 py-2", + sm: "h-8 rounded-md px-3 text-xs", + lg: "h-10 rounded-md px-8", + icon: "h-9 w-9", + }, + }, + defaultVariants: { + variant: "default", + size: "default", + }, + } +) + +export interface ButtonProps + extends React.ButtonHTMLAttributes , + VariantProps { + asChild?: boolean +} + +const Button = React.forwardRef ( + ({ className, variant, size, asChild = false, ...props }, ref) => { + const Comp = asChild ? Slot : "button" + return ( + + ) + } +) +Button.displayName = "Button" + +export { Button, buttonVariants } diff --git a/examples/vercel-ai-sdk-chat-app/src/components/ui/card.tsx b/examples/vercel-ai-sdk-chat-app/src/components/ui/card.tsx new file mode 100644 index 0000000000..77e9fb789b --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/ui/card.tsx @@ -0,0 +1,76 @@ +import * as React from "react" + +import { cn } from "@/lib/utils" + +const Card = React.forwardRef< + HTMLDivElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +Card.displayName = "Card" + +const CardHeader = React.forwardRef< + HTMLDivElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +CardHeader.displayName = "CardHeader" + +const CardTitle = React.forwardRef< + HTMLParagraphElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +CardTitle.displayName = "CardTitle" + +const CardDescription = React.forwardRef< + HTMLParagraphElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +CardDescription.displayName = "CardDescription" + +const CardContent = React.forwardRef< + HTMLDivElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +CardContent.displayName = "CardContent" + +const CardFooter = React.forwardRef< + HTMLDivElement, + React.HTMLAttributes +>(({ className, ...props }, ref) => ( + +)) +CardFooter.displayName = "CardFooter" + +export { Card, CardHeader, CardFooter, CardTitle, CardDescription, CardContent } diff --git a/examples/vercel-ai-sdk-chat-app/src/components/ui/dialog.tsx b/examples/vercel-ai-sdk-chat-app/src/components/ui/dialog.tsx new file mode 100644 index 0000000000..5d16351fa7 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/ui/dialog.tsx @@ -0,0 +1,120 @@ +import * as React from "react" +import * as DialogPrimitive from "@radix-ui/react-dialog" +import { Cross2Icon } from "@radix-ui/react-icons" + +import { cn } from "@/lib/utils" + +const Dialog = DialogPrimitive.Root + +const DialogTrigger = DialogPrimitive.Trigger + +const DialogPortal = DialogPrimitive.Portal + +const DialogClose = DialogPrimitive.Close + +const DialogOverlay = React.forwardRef< + React.ElementRef , + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +DialogOverlay.displayName = DialogPrimitive.Overlay.displayName + +const DialogContent = React.forwardRef< + React.ElementRef , + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + +)) +DialogContent.displayName = DialogPrimitive.Content.displayName + +const DialogHeader = ({ + className, + ...props +}: React.HTMLAttributes+ + {children} + ++ ++ Close + ) => ( + +) +DialogHeader.displayName = "DialogHeader" + +const DialogFooter = ({ + className, + ...props +}: React.HTMLAttributes ) => ( + +) +DialogFooter.displayName = "DialogFooter" + +const DialogTitle = React.forwardRef< + React.ElementRef , + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +DialogTitle.displayName = DialogPrimitive.Title.displayName + +const DialogDescription = React.forwardRef< + React.ElementRef , + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +DialogDescription.displayName = DialogPrimitive.Description.displayName + +export { + Dialog, + DialogPortal, + DialogOverlay, + DialogTrigger, + DialogClose, + DialogContent, + DialogHeader, + DialogFooter, + DialogTitle, + DialogDescription, +} diff --git a/examples/vercel-ai-sdk-chat-app/src/components/ui/input.tsx b/examples/vercel-ai-sdk-chat-app/src/components/ui/input.tsx new file mode 100644 index 0000000000..5af26b2c1a --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/ui/input.tsx @@ -0,0 +1,25 @@ +import * as React from "react" + +import { cn } from "@/lib/utils" + +export interface InputProps + extends React.InputHTMLAttributes {} + +const Input = React.forwardRef ( + ({ className, type, ...props }, ref) => { + return ( + + ) + } +) +Input.displayName = "Input" + +export { Input } diff --git a/examples/vercel-ai-sdk-chat-app/src/components/ui/label.tsx b/examples/vercel-ai-sdk-chat-app/src/components/ui/label.tsx new file mode 100644 index 0000000000..683faa7938 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/ui/label.tsx @@ -0,0 +1,24 @@ +import * as React from "react" +import * as LabelPrimitive from "@radix-ui/react-label" +import { cva, type VariantProps } from "class-variance-authority" + +import { cn } from "@/lib/utils" + +const labelVariants = cva( + "text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70" +) + +const Label = React.forwardRef< + React.ElementRef , + React.ComponentPropsWithoutRef & + VariantProps +>(({ className, ...props }, ref) => ( + +)) +Label.displayName = LabelPrimitive.Root.displayName + +export { Label } diff --git a/examples/vercel-ai-sdk-chat-app/src/components/ui/scroll-area.tsx b/examples/vercel-ai-sdk-chat-app/src/components/ui/scroll-area.tsx new file mode 100644 index 0000000000..cf253cf170 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/ui/scroll-area.tsx @@ -0,0 +1,46 @@ +import * as React from "react" +import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area" + +import { cn } from "@/lib/utils" + +const ScrollArea = React.forwardRef< + React.ElementRef , + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + +)) +ScrollArea.displayName = ScrollAreaPrimitive.Root.displayName + +const ScrollBar = React.forwardRef< + React.ElementRef+ {children} + ++ + , + React.ComponentPropsWithoutRef +>(({ className, orientation = "vertical", ...props }, ref) => ( + + +)) +ScrollBar.displayName = ScrollAreaPrimitive.ScrollAreaScrollbar.displayName + +export { ScrollArea, ScrollBar } diff --git a/examples/vercel-ai-sdk-chat-app/src/components/ui/select.tsx b/examples/vercel-ai-sdk-chat-app/src/components/ui/select.tsx new file mode 100644 index 0000000000..ac2a8f2b9c --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/components/ui/select.tsx @@ -0,0 +1,164 @@ +"use client" + +import * as React from "react" +import { + CaretSortIcon, + CheckIcon, + ChevronDownIcon, + ChevronUpIcon, +} from "@radix-ui/react-icons" +import * as SelectPrimitive from "@radix-ui/react-select" + +import { cn } from "@/lib/utils" + +const Select = SelectPrimitive.Root + +const SelectGroup = SelectPrimitive.Group + +const SelectValue = SelectPrimitive.Value + +const SelectTrigger = React.forwardRef< + React.ElementRef+ , + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + span]:line-clamp-1", + className + )} + {...props} + > + {children} + +)) +SelectTrigger.displayName = SelectPrimitive.Trigger.displayName + +const SelectScrollUpButton = React.forwardRef< + React.ElementRef+ ++ , + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + +)) +SelectScrollUpButton.displayName = SelectPrimitive.ScrollUpButton.displayName + +const SelectScrollDownButton = React.forwardRef< + React.ElementRef+ , + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + +)) +SelectScrollDownButton.displayName = + SelectPrimitive.ScrollDownButton.displayName + +const SelectContent = React.forwardRef< + React.ElementRef+ , + React.ComponentPropsWithoutRef +>(({ className, children, position = "popper", ...props }, ref) => ( + + +)) +SelectContent.displayName = SelectPrimitive.Content.displayName + +const SelectLabel = React.forwardRef< + React.ElementRef+ ++ + {children} + ++ , + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +SelectLabel.displayName = SelectPrimitive.Label.displayName + +const SelectItem = React.forwardRef< + React.ElementRef , + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + + +)) +SelectItem.displayName = SelectPrimitive.Item.displayName + +const SelectSeparator = React.forwardRef< + React.ElementRef+ + ++ {children} +, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)) +SelectSeparator.displayName = SelectPrimitive.Separator.displayName + +export { + Select, + SelectGroup, + SelectValue, + SelectTrigger, + SelectContent, + SelectLabel, + SelectItem, + SelectSeparator, + SelectScrollUpButton, + SelectScrollDownButton, +} diff --git a/examples/vercel-ai-sdk-chat-app/src/contexts/GlobalContext.tsx b/examples/vercel-ai-sdk-chat-app/src/contexts/GlobalContext.tsx new file mode 100644 index 0000000000..10f1b683b2 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/contexts/GlobalContext.tsx @@ -0,0 +1,324 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { createContext, useEffect, useState } from "react"; +import { createMem0, searchMemories } from "@mem0/vercel-ai-provider"; +import { LanguageModelV1Prompt, streamText } from "ai"; +import { Message, Memory, FileInfo } from "@/types"; +import { Buffer } from 'buffer'; + +const GlobalContext = createContext ({}); + +const WelcomeMessage: Message = { + id: "1", + content: + "👋 Hi there! I'm your personal assistant. How can I help you today? 😊", + sender: "assistant", + timestamp: new Date().toLocaleTimeString(), +}; + +const InvalidConfigMessage: Message = { + id: "2", + content: + "Invalid configuration. Please check your API keys, and add a user and try again.", + sender: "assistant", + timestamp: new Date().toLocaleTimeString(), +}; + +const SomethingWentWrongMessage: Message = { + id: "3", + content: "Something went wrong. Please try again.", + sender: "assistant", + timestamp: new Date().toLocaleTimeString(), +}; + +const models = { + "openai": "gpt-4o", + "anthropic": "claude-3-haiku-20240307", + "cohere": "command-r-plus", + "groq": "gemma2-9b-it" +} + +const getModel = (provider: string) => { + switch (provider) { + case "openai": + return models.openai; + case "anthropic": + return models.anthropic; + case "cohere": + return models.cohere; + case "groq": + return models.groq; + default: + return models.openai; + } +} + +const GlobalState = (props: any) => { + const [memories, setMemories] = useState ([]); + const [messages, setMessages] = useState ([]); + const [selectedUser, setSelectedUser] = useState (""); + const [thinking, setThinking] = useState (false); + const [selectedOpenAIKey, setSelectedOpenAIKey] = useState (""); + const [selectedMem0Key, setSelectedMem0Key] = useState (""); + const [selectedProvider, setSelectedProvider] = useState ("openai"); + const [selectedFile, setSelectedFile] = useState (null) + const [file, setFile] = useState (null) + + const mem0 = createMem0({ + provider: selectedProvider, + mem0ApiKey: selectedMem0Key, + apiKey: selectedOpenAIKey, + }); + + const clearConfiguration = () => { + localStorage.removeItem("mem0ApiKey"); + localStorage.removeItem("openaiApiKey"); + localStorage.removeItem("provider"); + setSelectedMem0Key(""); + setSelectedOpenAIKey(""); + setSelectedProvider("openai"); + setSelectedUser(""); + setMessages([WelcomeMessage]); + setMemories([]); + setFile(null); + }; + + const selectorHandler = (mem0: string, openai: string, provider: string) => { + setSelectedMem0Key(mem0); + setSelectedOpenAIKey(openai); + setSelectedProvider(provider); + localStorage.setItem("mem0ApiKey", mem0); + localStorage.setItem("openaiApiKey", openai); + localStorage.setItem("provider", provider); + }; + + + useEffect(() => { + const mem0 = localStorage.getItem("mem0ApiKey"); + const openai = localStorage.getItem("openaiApiKey"); + const provider = localStorage.getItem("provider"); + const user = localStorage.getItem("user"); + if (mem0 && openai && provider) { + selectorHandler(mem0, openai, provider); + } + if (user) { + setSelectedUser(user); + } + }, []); + + const selectUserHandler = (user: string) => { + setSelectedUser(user); + localStorage.setItem("user", user); + }; + + const clearUserHandler = () => { + setSelectedUser(""); + setMemories([]); + }; + + const getMemories = async (messages: LanguageModelV1Prompt) => { + try { + const smemories = await searchMemories(messages, { + user_id: selectedUser || "", + mem0ApiKey: import.meta.env.VITE_MEM0_API_KEY, + }); + + const newMemories = smemories.map((memory: any) => ({ + id: memory.id, + content: memory.memory, + timestamp: memory.updated_at, + tags: memory.categories, + })); + setMemories(newMemories); + } catch (error) { + console.error("Error in getMemories:", error); + } + }; + + const handleSend = async (inputValue: string) => { + if (!inputValue.trim() && !file) return; + if (!selectedUser) { + const newMessage: Message = { + id: Date.now().toString(), + content: inputValue, + sender: "user", + timestamp: new Date().toLocaleTimeString(), + }; + setMessages((prev) => [...prev, newMessage, InvalidConfigMessage]); + return; + } + + const userMessage: Message = { + id: Date.now().toString(), + content: inputValue, + sender: "user", + timestamp: new Date().toLocaleTimeString(), + }; + + let fileData; + if (file) { + if (file.type.startsWith("image/")) { + // Convert image to Base64 + fileData = await convertToBase64(file); + userMessage.image = fileData; + } else if (file.type.startsWith("audio/")) { + // Convert audio to ArrayBuffer + fileData = await getFileBuffer(file); + userMessage.audio = fileData; + } + } + + // Update the state with the new user message + setMessages((prev) => [...prev, userMessage]); + setThinking(true); + + // Transform messages into the required format + const messagesForPrompt: LanguageModelV1Prompt = []; + messages.map((message) => { + const messageContent: any = { + role: message.sender, + content: [ + { + type: "text", + text: message.content, + }, + ], + }; + if (message.image) { + messageContent.content.push({ + type: "image", + image: message.image, + }); + } + if (message.audio) { + messageContent.content.push({ + type: 'file', + mimeType: 'audio/mpeg', + data: message.audio, + }); + } + if(!message.audio) messagesForPrompt.push(messageContent); + }); + + const newMessage: any = { + role: "user", + content: [ + { + type: "text", + text: inputValue, + }, + ], + }; + if (file) { + if (file.type.startsWith("image/")) { + newMessage.content.push({ + type: "image", + image: userMessage.image, + }); + } else if (file.type.startsWith("audio/")) { + newMessage.content.push({ + type: 'file', + mimeType: 'audio/mpeg', + data: userMessage.audio, + }); + } + } + + messagesForPrompt.push(newMessage); + getMemories(messagesForPrompt); + + setFile(null); + setSelectedFile(null); + + try { + const { textStream } = await streamText({ + model: mem0(getModel(selectedProvider), { + user_id: selectedUser || "", + }), + messages: messagesForPrompt, + }); + + const assistantMessageId = Date.now() + 1; + const assistantMessage: Message = { + id: assistantMessageId.toString(), + content: "", + sender: "assistant", + timestamp: new Date().toLocaleTimeString(), + }; + + setMessages((prev) => [...prev, assistantMessage]); + + // Stream the text part by part + for await (const textPart of textStream) { + assistantMessage.content += textPart; + setThinking(false); + setFile(null); + setSelectedFile(null); + + setMessages((prev) => + prev.map((msg) => + msg.id === assistantMessageId.toString() + ? { ...msg, content: assistantMessage.content } + : msg + ) + ); + } + + setThinking(false); + } catch (error) { + console.error("Error in handleSend:", error); + setMessages((prev) => [...prev, SomethingWentWrongMessage]); + setThinking(false); + setFile(null); + setSelectedFile(null); + } + }; + + useEffect(() => { + setMessages([WelcomeMessage]); + }, []); + + return ( + + {props.children} + + ); +}; + +export default GlobalContext; +export { GlobalState }; + + +const convertToBase64 = (file: File): Promise=> { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.readAsDataURL(file); + reader.onload = () => resolve(reader.result as string); // Resolve with Base64 string + reader.onerror = error => reject(error); // Reject on error + }); +}; + +async function getFileBuffer(file: any) { + const response = await fetch(file); + const arrayBuffer = await response.arrayBuffer(); + const buffer = Buffer.from(arrayBuffer); + return buffer; +} \ No newline at end of file diff --git a/examples/vercel-ai-sdk-chat-app/src/index.css b/examples/vercel-ai-sdk-chat-app/src/index.css new file mode 100644 index 0000000000..405a75d58d --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/index.css @@ -0,0 +1,97 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; +@layer base { + :root { + --background: 0 0% 100%; + --foreground: 240 10% 3.9%; + --card: 0 0% 100%; + --card-foreground: 240 10% 3.9%; + --popover: 0 0% 100%; + --popover-foreground: 240 10% 3.9%; + --primary: 240 5.9% 10%; + --primary-foreground: 0 0% 98%; + --secondary: 240 4.8% 95.9%; + --secondary-foreground: 240 5.9% 10%; + --muted: 240 4.8% 95.9%; + --muted-foreground: 240 3.8% 46.1%; + --accent: 240 4.8% 95.9%; + --accent-foreground: 240 5.9% 10%; + --destructive: 0 84.2% 60.2%; + --destructive-foreground: 0 0% 98%; + --border: 240 5.9% 90%; + --input: 240 5.9% 90%; + --ring: 240 10% 3.9%; + --chart-1: 12 76% 61%; + --chart-2: 173 58% 39%; + --chart-3: 197 37% 24%; + --chart-4: 43 74% 66%; + --chart-5: 27 87% 67%; + --radius: 0.5rem + } + .dark { + --background: 240 10% 3.9%; + --foreground: 0 0% 98%; + --card: 240 10% 3.9%; + --card-foreground: 0 0% 98%; + --popover: 240 10% 3.9%; + --popover-foreground: 0 0% 98%; + --primary: 0 0% 98%; + --primary-foreground: 240 5.9% 10%; + --secondary: 240 3.7% 15.9%; + --secondary-foreground: 0 0% 98%; + --muted: 240 3.7% 15.9%; + --muted-foreground: 240 5% 64.9%; + --accent: 240 3.7% 15.9%; + --accent-foreground: 0 0% 98%; + --destructive: 0 62.8% 30.6%; + --destructive-foreground: 0 0% 98%; + --border: 240 3.7% 15.9%; + --input: 240 3.7% 15.9%; + --ring: 240 4.9% 83.9%; + --chart-1: 220 70% 50%; + --chart-2: 160 60% 45%; + --chart-3: 30 80% 55%; + --chart-4: 280 65% 60%; + --chart-5: 340 75% 55% + } +} +@layer base { + * { + @apply border-border; + } + body { + @apply bg-background text-foreground; + } +} + +.loader { + display: flex; + align-items: flex-end; + gap: 5px; +} + +.ball { + width: 6px; + height: 6px; + background-color: #4e4e4e; + border-radius: 50%; + animation: bounce 0.6s infinite alternate; +} + +.ball:nth-child(2) { + animation-delay: 0.2s; +} + +.ball:nth-child(3) { + animation-delay: 0.4s; +} + +@keyframes bounce { + from { + transform: translateY(0); + } + to { + transform: translateY(-4px); + } +} diff --git a/examples/vercel-ai-sdk-chat-app/src/main.tsx b/examples/vercel-ai-sdk-chat-app/src/main.tsx new file mode 100644 index 0000000000..bef5202a32 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/main.tsx @@ -0,0 +1,10 @@ +import { StrictMode } from 'react' +import { createRoot } from 'react-dom/client' +import './index.css' +import App from './App.tsx' + +createRoot(document.getElementById('root')!).render( + + , +) diff --git a/examples/vercel-ai-sdk-chat-app/src/page.tsx b/examples/vercel-ai-sdk-chat-app/src/page.tsx new file mode 100644 index 0000000000..1f99e8561c --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/page.tsx @@ -0,0 +1,14 @@ +"use client"; +import { GlobalState } from "./contexts/GlobalContext"; +import Component from "./pages/home"; + + +export default function Home() { + return ( ++ ++ ); +} diff --git a/examples/vercel-ai-sdk-chat-app/src/pages/home.tsx b/examples/vercel-ai-sdk-chat-app/src/pages/home.tsx new file mode 100644 index 0000000000..f72b175ee8 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/pages/home.tsx @@ -0,0 +1,41 @@ +import { useState } from "react"; +import ApiSettingsPopup from "../components/api-settings-popup"; +import Memories from "../components/memories"; +import Header from "../components/header"; +import Messages from "../components/messages"; +import InputArea from "../components/input-area"; +import ChevronToggle from "../components/chevron-toggle"; + + +export default function Home() { + const [isMemoriesExpanded, setIsMemoriesExpanded] = useState(true); + const [isSettingsOpen, setIsSettingsOpen] = useState(false); + + return ( + <> ++ ++ + + {/* Main Chat Area */} ++ > + ); +} diff --git a/examples/vercel-ai-sdk-chat-app/src/types.ts b/examples/vercel-ai-sdk-chat-app/src/types.ts new file mode 100644 index 0000000000..770bc23f7d --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/types.ts @@ -0,0 +1,22 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +export interface Memory { + id: string; + content: string; + timestamp: string; + tags: string[]; +} + +export interface Message { + id: string; + content: string; + sender: "user" | "assistant"; + timestamp: string; + image?: string; + audio?: any; +} + +export interface FileInfo { + name: string; + type: string; + size: number; +} \ No newline at end of file diff --git a/examples/vercel-ai-sdk-chat-app/src/vite-env.d.ts b/examples/vercel-ai-sdk-chat-app/src/vite-env.d.ts new file mode 100644 index 0000000000..11f02fe2a0 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/src/vite-env.d.ts @@ -0,0 +1 @@ +///+ {/* Header */} ++ + {/* Chevron Toggle */} ++ + {/* Messages */} + + + {/* Input Area */} + + + + {/* Memories Sidebar */} + + diff --git a/examples/vercel-ai-sdk-chat-app/tailwind.config.js b/examples/vercel-ai-sdk-chat-app/tailwind.config.js new file mode 100644 index 0000000000..150128518e --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/tailwind.config.js @@ -0,0 +1,62 @@ +// tailwind.config.js +/* eslint-env node */ + +/** @type {import('tailwindcss').Config} */ +import tailwindcssAnimate from 'tailwindcss-animate'; + +export default { + darkMode: ["class"], + content: ["./index.html", "./src/**/*.{ts,tsx,js,jsx}"], + theme: { + extend: { + borderRadius: { + lg: 'var(--radius)', + md: 'calc(var(--radius) - 2px)', + sm: 'calc(var(--radius) - 4px)', + }, + colors: { + background: 'hsl(var(--background))', + foreground: 'hsl(var(--foreground))', + card: { + DEFAULT: 'hsl(var(--card))', + foreground: 'hsl(var(--card-foreground))', + }, + popover: { + DEFAULT: 'hsl(var(--popover))', + foreground: 'hsl(var(--popover-foreground))', + }, + primary: { + DEFAULT: 'hsl(var(--primary))', + foreground: 'hsl(var(--primary-foreground))', + }, + secondary: { + DEFAULT: 'hsl(var(--secondary))', + foreground: 'hsl(var(--secondary-foreground))', + }, + muted: { + DEFAULT: 'hsl(var(--muted))', + foreground: 'hsl(var(--muted-foreground))', + }, + accent: { + DEFAULT: 'hsl(var(--accent))', + foreground: 'hsl(var(--accent-foreground))', + }, + destructive: { + DEFAULT: 'hsl(var(--destructive))', + foreground: 'hsl(var(--destructive-foreground))', + }, + border: 'hsl(var(--border))', + input: 'hsl(var(--input))', + ring: 'hsl(var(--ring))', + chart: { + '1': 'hsl(var(--chart-1))', + '2': 'hsl(var(--chart-2))', + '3': 'hsl(var(--chart-3))', + '4': 'hsl(var(--chart-4))', + '5': 'hsl(var(--chart-5))', + }, + }, + }, + }, + plugins: [tailwindcssAnimate], +}; diff --git a/examples/vercel-ai-sdk-chat-app/tsconfig.app.json b/examples/vercel-ai-sdk-chat-app/tsconfig.app.json new file mode 100644 index 0000000000..6d0c89af2c --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/tsconfig.app.json @@ -0,0 +1,32 @@ +{ + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", + "target": "ES2020", + "useDefineForClassFields": true, + "lib": ["ES2020", "DOM", "DOM.Iterable"], + "module": "ESNext", + "skipLibCheck": true, + "baseUrl": ".", + "paths": { + "@/*": [ + "./src/*" + ] + }, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "isolatedModules": true, + "moduleDetection": "force", + "noEmit": true, + "jsx": "react-jsx", + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + }, + "include": ["src"] +} diff --git a/examples/vercel-ai-sdk-chat-app/tsconfig.json b/examples/vercel-ai-sdk-chat-app/tsconfig.json new file mode 100644 index 0000000000..fec8c8e5c2 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/tsconfig.json @@ -0,0 +1,13 @@ +{ + "files": [], + "references": [ + { "path": "./tsconfig.app.json" }, + { "path": "./tsconfig.node.json" } + ], + "compilerOptions": { + "baseUrl": ".", + "paths": { + "@/*": ["./src/*"] + } + } +} diff --git a/examples/vercel-ai-sdk-chat-app/tsconfig.node.json b/examples/vercel-ai-sdk-chat-app/tsconfig.node.json new file mode 100644 index 0000000000..abcd7f0dac --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/tsconfig.node.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", + "target": "ES2022", + "lib": ["ES2023"], + "module": "ESNext", + "skipLibCheck": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "isolatedModules": true, + "moduleDetection": "force", + "noEmit": true, + + /* Linting */ + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + }, + "include": ["vite.config.ts"] +} diff --git a/examples/vercel-ai-sdk-chat-app/vite.config.ts b/examples/vercel-ai-sdk-chat-app/vite.config.ts new file mode 100644 index 0000000000..a761a87054 --- /dev/null +++ b/examples/vercel-ai-sdk-chat-app/vite.config.ts @@ -0,0 +1,13 @@ +import path from "path" +import react from "@vitejs/plugin-react" +import { defineConfig } from "vite" + +export default defineConfig({ + plugins: [react()], + resolve: { + alias: { + "@": path.resolve(__dirname, "./src"), + buffer: 'buffer' + }, + }, +}) diff --git a/vercel-ai-sdk/.gitattributes b/vercel-ai-sdk/.gitattributes new file mode 100644 index 0000000000..dfe0770424 --- /dev/null +++ b/vercel-ai-sdk/.gitattributes @@ -0,0 +1,2 @@ +# Auto detect text files and perform LF normalization +* text=auto diff --git a/vercel-ai-sdk/.gitignore b/vercel-ai-sdk/.gitignore new file mode 100644 index 0000000000..72d04c8773 --- /dev/null +++ b/vercel-ai-sdk/.gitignore @@ -0,0 +1,10 @@ +**/.env +**/node_modules +**/.DS_Store + +# Ignore test-related files +**/coverage.data +**/coverage/ + +# Build files +**/dist \ No newline at end of file diff --git a/vercel-ai-sdk/README.md b/vercel-ai-sdk/README.md new file mode 100644 index 0000000000..aaff79efe5 --- /dev/null +++ b/vercel-ai-sdk/README.md @@ -0,0 +1,228 @@ +# Mem0 AI SDK Provider + +The **Mem0 AI SDK Provider** is a community-maintained library developed by [Mem0](https://mem0.ai/) to integrate with the Vercel AI SDK. This library brings enhanced AI interaction capabilities to your applications by introducing persistent memory functionality. With Mem0, language model conversations gain memory, enabling more contextualized and personalized responses based on past interactions. + +Discover more of **Mem0** on [GitHub](https://github.com/mem0ai). +Explore the [Mem0 Documentation](https://docs.mem0.ai/overview) to gain deeper control and flexibility in managing your memories. + +For detailed information on using the Vercel AI SDK, refer to Vercel’s [API Reference](https://sdk.vercel.ai/docs/reference) and [Documentation](https://sdk.vercel.ai/docs). + +## Features + +- 🧠 Persistent memory storage for AI conversations +- 🔄 Seamless integration with Vercel AI SDK +- 🚀 Support for multiple LLM providers +- 📝 Rich message format support +- ⚡ Streaming capabilities +- 🔍 Context-aware responses + +## Installation + +```bash +npm install @mem0/vercel-ai-provider +``` + +## Before We Begin + +### Setting Up Mem0 + +1. Obtain your [Mem0 API Key](https://app.mem0.ai/dashboard/api-keys) from the Mem0 dashboard. + +2. Initialize the Mem0 Client: + +```typescript +import { createMem0 } from "@mem0/vercel-ai-provider"; + +const mem0 = createMem0({ + provider: "openai", + mem0ApiKey: "m0-xxx", + apiKey: "openai-api-key", + config: { + compatibility: "strict", + // Additional model-specific configuration options can be added here. + }, +}); +``` + +### Note +By default, the `openai` provider is used, so specifying it is optional: +```typescript +const mem0 = createMem0(); +``` +For better security, consider setting `MEM0_API_KEY` and `OPENAI_API_KEY` as environment variables. + +3. Add Memories to Enhance Context: + +```typescript +import { LanguageModelV1Prompt } from "ai"; +import { addMemories } from "@mem0/vercel-ai-provider"; + +const messages: LanguageModelV1Prompt = [ + { + role: "user", + content: [ + { type: "text", text: "I love red cars." }, + { type: "text", text: "I like Toyota Cars." }, + { type: "text", text: "I prefer SUVs." }, + ], + }, +]; + +await addMemories(messages, { user_id: "borat" }); +``` + +These memories are now stored in your profile. You can view and manage them on the [Mem0 Dashboard](https://app.mem0.ai/dashboard/users). + +### Note: + +For standalone features, such as `addMemories` and `retrieveMemories`, +you must either set `MEM0_API_KEY` as an environment variable or pass it directly in the function call. + +Example: + +```typescript +await addMemories(messages, { user_id: "borat", mem0ApiKey: "m0-xxx" }); +await retrieveMemories(prompt, { user_id: "borat", mem0ApiKey: "m0-xxx" }); +``` + +## Usage Examples + +### 1. Basic Text Generation with Memory Context + +```typescript +import { generateText } from "ai"; +import { createMem0 } from "@mem0/vercel-ai-provider"; + +const mem0 = createMem0(); + +const { text } = await generateText({ + model: mem0("gpt-4-turbo", { + user_id: "borat", + }), + prompt: "Suggest me a good car to buy!", +}); +``` + +### 2. Combining OpenAI Provider with Memory Utils + +```typescript +import { generateText } from "ai"; +import { openai } from "@ai-sdk/openai"; +import { retrieveMemories } from "@mem0/vercel-ai-provider"; + +const prompt = "Suggest me a good car to buy."; +const memories = await retrieveMemories(prompt, { user_id: "borat" }); + +const { text } = await generateText({ + model: openai("gpt-4-turbo"), + prompt: prompt, + system: memories, +}); +``` + +### 3. Structured Message Format with Memory + +```typescript +import { generateText } from "ai"; +import { createMem0 } from "@mem0/vercel-ai-provider"; + +const mem0 = createMem0(); + +const { text } = await generateText({ + model: mem0("gpt-4-turbo", { + user_id: "borat", + }), + messages: [ + { + role: "user", + content: [ + { type: "text", text: "Suggest me a good car to buy." }, + { type: "text", text: "Why is it better than the other cars for me?" }, + { type: "text", text: "Give options for every price range." }, + ], + }, + ], +}); +``` + +### 4. Advanced Memory Integration with OpenAI + +```typescript +import { generateText, LanguageModelV1Prompt } from "ai"; +import { openai } from "@ai-sdk/openai"; +import { retrieveMemories } from "@mem0/vercel-ai-provider"; + +// New format using system parameter for memory context +const messages: LanguageModelV1Prompt = [ + { + role: "user", + content: [ + { type: "text", text: "Suggest me a good car to buy." }, + { type: "text", text: "Why is it better than the other cars for me?" }, + { type: "text", text: "Give options for every price range." }, + ], + }, +]; + +const memories = await retrieveMemories(messages, { user_id: "borat" }); + +const { text } = await generateText({ + model: openai("gpt-4-turbo"), + messages: messages, + system: memories, +}); +``` + +### 5. Streaming Responses with Memory Context + +```typescript +import { streamText } from "ai"; +import { createMem0 } from "@mem0/vercel-ai-provider"; + +const mem0 = createMem0(); + +const { textStream } = await streamText({ + model: mem0("gpt-4-turbo", { + user_id: "borat", + }), + prompt: + "Suggest me a good car to buy! Why is it better than the other cars for me? Give options for every price range.", +}); + +for await (const textPart of textStream) { + process.stdout.write(textPart); +} +``` + +## Core Functions + +- `createMem0()`: Initializes a new mem0 provider instance with optional configuration +- `retrieveMemories()`: Enriches prompts with relevant memories +- `addMemories()`: Add memories to your profile + +## Configuration Options + +```typescript +const mem0 = createMem0({ + config: { + ... + // Additional model-specific configuration options can be added here. + }, +}); +``` + +## Best Practices + +1. **User Identification**: Always provide a unique `user_id` identifier for consistent memory retrieval +2. **Context Management**: Use appropriate context window sizes to balance performance and memory +3. **Error Handling**: Implement proper error handling for memory operations +4. **Memory Cleanup**: Regularly clean up unused memory contexts to optimize performance + +We also have support for `agent_id`, `app_id`, and `run_id`. Refer [Docs](https://docs.mem0.ai/api-reference/memory/add-memories). + +## Notes + +- Requires proper API key configuration for underlying providers (e.g., OpenAI) +- Memory features depend on proper user identification via `user_id` +- Supports both streaming and non-streaming responses +- Compatible with all Vercel AI SDK features and patterns diff --git a/vercel-ai-sdk/config/test-config.ts b/vercel-ai-sdk/config/test-config.ts new file mode 100644 index 0000000000..b59fcbafe6 --- /dev/null +++ b/vercel-ai-sdk/config/test-config.ts @@ -0,0 +1,105 @@ +import dotenv from "dotenv"; +import { createMem0 } from "../src"; + +dotenv.config(); + +export interface Provider { + name: string; + activeModel: string; + apiKey: string | undefined; +} + +export const testConfig = { + apiKey: process.env.MEM0_API_KEY, + userId: "mem0-ai-sdk-test-user-1134774", + deleteId: "", + providers: [ + { + name: "openai", + activeModel: "gpt-4-turbo", + apiKey: process.env.OPENAI_API_KEY, + } + , + { + name: "anthropic", + activeModel: "claude-3-5-sonnet-20240620", + apiKey: process.env.ANTHROPIC_API_KEY, + }, + // { + // name: "groq", + // activeModel: "gemma2-9b-it", + // apiKey: process.env.GROQ_API_KEY, + // }, + { + name: "cohere", + activeModel: "command-r-plus", + apiKey: process.env.COHERE_API_KEY, + } + ], + models: { + openai: "gpt-4-turbo", + anthropic: "claude-3-haiku-20240307", + groq: "gemma2-9b-it", + cohere: "command-r-plus" + }, + apiKeys: { + openai: process.env.OPENAI_API_KEY, + anthropic: process.env.ANTHROPIC_API_KEY, + groq: process.env.GROQ_API_KEY, + cohere: process.env.COHERE_API_KEY, + }, + + createTestClient: (provider: Provider) => { + return createMem0({ + provider: provider.name, + mem0ApiKey: process.env.MEM0_API_KEY, + apiKey: provider.apiKey, + }); + }, + fetchDeleteId: async function () { + const options = { + method: 'GET', + headers: { + Authorization: `Token ${this.apiKey}`, + }, + }; + + try { + const response = await fetch('https://api.mem0.ai/v1/entities/', options); + const data = await response.json(); + const entity = data.results.find((item: any) => item.name === this.userId); + if (entity) { + this.deleteId = entity.id; + } else { + console.error("No matching entity found for userId:", this.userId); + } + } catch (error) { + console.error("Error fetching deleteId:", error); + throw error; + } + }, + deleteUser: async function () { + if (!this.deleteId) { + console.error("deleteId is not set. Ensure fetchDeleteId is called first."); + return; + } + + const options = { + method: 'DELETE', + headers: { + Authorization: `Token ${this.apiKey}`, + }, + }; + + try { + const response = await fetch(`https://api.mem0.ai/v1/entities/user/${this.deleteId}/`, options); + if (!response.ok) { + throw new Error(`Failed to delete user: ${response.statusText}`); + } + await response.json(); + } catch (error) { + console.error("Error deleting user:", error); + throw error; + } + }, +}; diff --git a/vercel-ai-sdk/jest.config.js b/vercel-ai-sdk/jest.config.js new file mode 100644 index 0000000000..49b6d5a60f --- /dev/null +++ b/vercel-ai-sdk/jest.config.js @@ -0,0 +1,6 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + globalTeardown: './teardown.ts', +}; + \ No newline at end of file diff --git a/vercel-ai-sdk/nodemon.json b/vercel-ai-sdk/nodemon.json new file mode 100644 index 0000000000..3cb5efa707 --- /dev/null +++ b/vercel-ai-sdk/nodemon.json @@ -0,0 +1,5 @@ +{ + "watch": ["src"], + "ext": ".ts,.js", + "exec": "ts-node ./example/index.ts" +} \ No newline at end of file diff --git a/vercel-ai-sdk/package.json b/vercel-ai-sdk/package.json new file mode 100644 index 0000000000..22f7aacc31 --- /dev/null +++ b/vercel-ai-sdk/package.json @@ -0,0 +1,69 @@ +{ + "name": "@mem0/vercel-ai-provider", + "version": "0.0.7", + "description": "Vercel AI Provider for providing memory to LLMs", + "main": "./dist/index.js", + "module": "./dist/index.mjs", + "types": "./dist/index.d.ts", + "files": [ + "dist/**/*" + ], + "scripts": { + "build": "tsup", + "clean": "rm -rf dist", + "dev": "nodemon", + "lint": "eslint \"./**/*.ts*\"", + "type-check": "tsc --noEmit", + "prettier-check": "prettier --check \"./**/*.ts*\"", + "test": "jest", + "test:edge": "vitest --config vitest.edge.config.js --run", + "test:node": "vitest --config vitest.node.config.js --run" + }, + "keywords": [ + "ai", + "vercel-ai" + ], + "author": "Saket Aryan ", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/anthropic": "^0.0.54", + "@ai-sdk/cohere": "^0.0.28", + "@ai-sdk/groq": "^0.0.3", + "@ai-sdk/openai": "^0.0.71", + "@ai-sdk/provider": "^0.0.26", + "@ai-sdk/provider-utils": "^1.0.22", + "ai": "^3.4.31", + "dotenv": "^16.4.5", + "partial-json": "0.1.7", + "ts-node": "^10.9.2", + "zod": "^3.0.0" + }, + "devDependencies": { + "@edge-runtime/vm": "^3.2.0", + "@types/jest": "^29.5.14", + "@types/node": "^18.19.46", + "jest": "^29.7.0", + "nodemon": "^3.1.7", + "ts-jest": "^29.2.5", + "tsup": "^8.3.0", + "typescript": "5.5.4" + }, + "peerDependencies": { + "zod": "^3.0.0" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + }, + "engines": { + "node": ">=18" + }, + "publishConfig": { + "access": "public" + }, + "directories": { + "example": "example", + "test": "tests" + } +} diff --git a/vercel-ai-sdk/src/index.ts b/vercel-ai-sdk/src/index.ts new file mode 100644 index 0000000000..584d73f014 --- /dev/null +++ b/vercel-ai-sdk/src/index.ts @@ -0,0 +1,4 @@ +export * from './mem0-facade' +export type { Mem0Provider, Mem0ProviderSettings } from './mem0-provider' +export { createMem0, mem0 } from './mem0-provider' +export {addMemories, retrieveMemories, searchMemories } from './mem0-utils' \ No newline at end of file diff --git a/vercel-ai-sdk/src/mem0-chat-language-model.ts b/vercel-ai-sdk/src/mem0-chat-language-model.ts new file mode 100644 index 0000000000..3ff53efcf3 --- /dev/null +++ b/vercel-ai-sdk/src/mem0-chat-language-model.ts @@ -0,0 +1,150 @@ +/* eslint-disable camelcase */ +import { + LanguageModelV1, + LanguageModelV1CallOptions, + LanguageModelV1CallWarning, + LanguageModelV1FinishReason, + LanguageModelV1FunctionToolCall, + LanguageModelV1LogProbs, + LanguageModelV1ProviderMetadata, + LanguageModelV1StreamPart, +} from "@ai-sdk/provider"; + +import { Mem0ChatModelId, Mem0ChatSettings } from "./mem0-chat-settings"; +import { Mem0ClassSelector } from "./mem0-provider-selector"; +import { filterStream } from "./stream-utils"; +import { Mem0Config } from "./mem0-chat-settings"; +import { OpenAIProviderSettings } from "@ai-sdk/openai"; +import { Mem0ProviderSettings } from "./mem0-provider"; + + +interface Mem0ChatConfig { + baseURL: string; + fetch?: typeof fetch; + headers: () => Record ; + provider: string; + organization?: string; + project?: string; + name?: string; + apiKey?: string; + mem0_api_key?: string; +} + +export class Mem0ChatLanguageModel implements LanguageModelV1 { + readonly specificationVersion = "v1"; + readonly defaultObjectGenerationMode = "json"; + readonly supportsImageUrls = false; + + constructor( + public readonly modelId: Mem0ChatModelId, + public readonly settings: Mem0ChatSettings, + public readonly config: Mem0ChatConfig, + public readonly provider_config?: OpenAIProviderSettings + ) { + this.provider = config.provider; + } + + provider: string; + supportsStructuredOutputs?: boolean | undefined; + + async doGenerate(options: LanguageModelV1CallOptions): Promise<{ + text?: string; + toolCalls?: Array ; + finishReason: LanguageModelV1FinishReason; + usage: { promptTokens: number; completionTokens: number }; + rawCall: { rawPrompt: unknown; rawSettings: Record }; + rawResponse?: { headers?: Record }; + response?: { id?: string; timestamp?: Date; modelId?: string }; + warnings?: LanguageModelV1CallWarning[]; + providerMetadata?: LanguageModelV1ProviderMetadata; + logprobs?: LanguageModelV1LogProbs; + }> { + try { + const provider = this.config.provider; + const mem0_api_key = this.config.mem0_api_key; + const settings: Mem0ProviderSettings = { + provider: provider, + mem0ApiKey: mem0_api_key, + apiKey: this.config.apiKey, + modelType: "chat" + } + const selector = new Mem0ClassSelector(this.modelId, settings,this.provider_config); + let messagesPrompts = options.prompt; + const model = selector.createProvider(); + const user_id = this.settings.user_id; + const app_id = this.settings.app_id; + const agent_id = this.settings.agent_id; + const run_id = this.settings.run_id; + const org_name = this.settings.org_name; + const project_name = this.settings.project_name; + const apiKey = mem0_api_key; + + const config: Mem0Config = {user_id, app_id, agent_id, run_id, org_name, project_name, mem0ApiKey: apiKey}; + + const ans = await model.generateText(messagesPrompts, config); + + + return { + text: ans.text, + finishReason: ans.finishReason, + usage: ans.usage, + rawCall: { + rawPrompt: options.prompt, + rawSettings: {}, + }, + response: ans.response, + warnings: ans.warnings, + }; + } catch (error) { + // Handle errors properly + console.error("Error in doGenerate:", error); + throw new Error("Failed to generate response."); + } + } + + async doStream(options: LanguageModelV1CallOptions): Promise<{ + stream: ReadableStream ; + rawCall: { rawPrompt: unknown; rawSettings: Record }; + rawResponse?: { headers?: Record }; + warnings?: LanguageModelV1CallWarning[]; + }> { + try { + const provider = this.config.provider; + const mem0_api_key = this.config.mem0_api_key; + const settings: Mem0ProviderSettings = { + provider: provider, + mem0ApiKey: mem0_api_key, + apiKey: this.config.apiKey, + modelType: "chat" + } + const selector = new Mem0ClassSelector(this.modelId, settings,this.provider_config); + let messagesPrompts = options.prompt; + const model = selector.createProvider(); + const user_id = this.settings.user_id; + const app_id = this.settings.app_id; + const agent_id = this.settings.agent_id; + const run_id = this.settings.run_id; + const org_name = this.settings.org_name; + const project_name = this.settings.project_name; + + const apiKey = mem0_api_key; + + const config: Mem0Config = {user_id, app_id, agent_id, run_id, org_name, project_name, mem0ApiKey: apiKey}; + const response = await model.streamText(messagesPrompts, config); + // @ts-ignore + const filteredStream = await filterStream(response.originalStream); + return { + // @ts-ignore + stream: filteredStream, + rawCall: { + rawPrompt: options.prompt, + rawSettings: {}, + }, + ...response, + }; + } catch (error) { + console.error("Error in doStream:", error); + throw new Error("Streaming failed or method not implemented."); + } + } +} diff --git a/vercel-ai-sdk/src/mem0-chat-settings.ts b/vercel-ai-sdk/src/mem0-chat-settings.ts new file mode 100644 index 0000000000..737a9c8646 --- /dev/null +++ b/vercel-ai-sdk/src/mem0-chat-settings.ts @@ -0,0 +1,36 @@ +import { OpenAIChatSettings } from "@ai-sdk/openai/internal"; + +export type Mem0ChatModelId = + | "o1-preview" + | "o1-mini" + | "gpt-4o" + | "gpt-4o-2024-05-13" + | "gpt-4o-2024-08-06" + | "gpt-4o-audio-preview" + | "gpt-4o-audio-preview-2024-10-01" + | "gpt-4o-mini" + | "gpt-4o-mini-2024-07-18" + | "gpt-4-turbo" + | "gpt-4-turbo-2024-04-09" + | "gpt-4-turbo-preview" + | "gpt-4-0125-preview" + | "gpt-4-1106-preview" + | "gpt-4" + | "gpt-4-0613" + | "gpt-3.5-turbo-0125" + | "gpt-3.5-turbo" + | "gpt-3.5-turbo-1106" + | (string & NonNullable ); + +export interface Mem0ChatSettings extends OpenAIChatSettings { + user_id?: string; + app_id?: string; + agent_id?: string; + run_id?: string; + org_name?: string; + project_name?: string; + mem0ApiKey?: string; + structuredOutputs?: boolean; +} + +export interface Mem0Config extends Mem0ChatSettings {} diff --git a/vercel-ai-sdk/src/mem0-completion-language-model.ts b/vercel-ai-sdk/src/mem0-completion-language-model.ts new file mode 100644 index 0000000000..b10f50eda8 --- /dev/null +++ b/vercel-ai-sdk/src/mem0-completion-language-model.ts @@ -0,0 +1,150 @@ +/* eslint-disable camelcase */ +import { + LanguageModelV1, + LanguageModelV1CallOptions, + LanguageModelV1CallWarning, + LanguageModelV1FinishReason, + LanguageModelV1FunctionToolCall, + LanguageModelV1LogProbs, + LanguageModelV1ProviderMetadata, + LanguageModelV1StreamPart, +} from "@ai-sdk/provider"; + +import { Mem0ChatModelId, Mem0ChatSettings } from "./mem0-chat-settings"; +import { Mem0ClassSelector } from "./mem0-provider-selector"; +import { filterStream } from "./stream-utils"; +import { Mem0Config } from "./mem0-completion-settings"; +import { OpenAIProviderSettings } from "@ai-sdk/openai"; +import { Mem0ProviderSettings } from "./mem0-provider"; + + +interface Mem0CompletionConfig { + baseURL: string; + fetch?: typeof fetch; + headers: () => Record ; + provider: string; + organization?: string; + project?: string; + name?: string; + apiKey?: string; + mem0_api_key?: string; +} + +export class Mem0CompletionLanguageModel implements LanguageModelV1 { + readonly specificationVersion = "v1"; + readonly defaultObjectGenerationMode = "json"; + readonly supportsImageUrls = false; + + constructor( + public readonly modelId: Mem0ChatModelId, + public readonly settings: Mem0ChatSettings, + public readonly config: Mem0CompletionConfig, + public readonly provider_config?: OpenAIProviderSettings + ) { + this.provider = config.provider; + } + + provider: string; + supportsStructuredOutputs?: boolean | undefined; + + async doGenerate(options: LanguageModelV1CallOptions): Promise<{ + text?: string; + toolCalls?: Array ; + finishReason: LanguageModelV1FinishReason; + usage: { promptTokens: number; completionTokens: number }; + rawCall: { rawPrompt: unknown; rawSettings: Record }; + rawResponse?: { headers?: Record }; + response?: { id?: string; timestamp?: Date; modelId?: string }; + warnings?: LanguageModelV1CallWarning[]; + providerMetadata?: LanguageModelV1ProviderMetadata; + logprobs?: LanguageModelV1LogProbs; + }> { + try { + const provider = this.config.provider; + const mem0_api_key = this.config.mem0_api_key; + const settings: Mem0ProviderSettings = { + provider: provider, + mem0ApiKey: mem0_api_key, + apiKey: this.config.apiKey, + modelType: "completion" + } + const selector = new Mem0ClassSelector(this.modelId, settings,this.provider_config); + let messagesPrompts = options.prompt; + const model = selector.createProvider(); + const user_id = this.settings.user_id; + const app_id = this.settings.app_id; + const agent_id = this.settings.agent_id; + const run_id = this.settings.run_id; + const org_name = this.settings.org_name; + const project_name = this.settings.project_name; + const apiKey = mem0_api_key; + + const config: Mem0Config = {user_id, app_id, agent_id, run_id, org_name, project_name, mem0ApiKey: apiKey, modelType: "completion"}; + + const ans = await model.generateText(messagesPrompts, config); + + + return { + text: ans.text, + finishReason: ans.finishReason, + usage: ans.usage, + rawCall: { + rawPrompt: options.prompt, + rawSettings: {}, + }, + response: ans.response, + warnings: ans.warnings, + }; + } catch (error) { + // Handle errors properly + console.error("Error in doGenerate:", error); + throw new Error("Failed to generate response."); + } + } + + async doStream(options: LanguageModelV1CallOptions): Promise<{ + stream: ReadableStream ; + rawCall: { rawPrompt: unknown; rawSettings: Record }; + rawResponse?: { headers?: Record }; + warnings?: LanguageModelV1CallWarning[]; + }> { + try { + const provider = this.config.provider; + const mem0_api_key = this.config.mem0_api_key; + const settings: Mem0ProviderSettings = { + provider: provider, + mem0ApiKey: mem0_api_key, + apiKey: this.config.apiKey, + modelType: "completion" + } + const selector = new Mem0ClassSelector(this.modelId, settings,this.provider_config); + let messagesPrompts = options.prompt; + const model = selector.createProvider(); + const user_id = this.settings.user_id; + const app_id = this.settings.app_id; + const agent_id = this.settings.agent_id; + const run_id = this.settings.run_id; + const org_name = this.settings.org_name; + const project_name = this.settings.project_name; + + const apiKey = mem0_api_key; + + const config: Mem0Config = {user_id, app_id, agent_id, run_id, org_name, project_name, mem0ApiKey: apiKey, modelType: "completion"}; + const response = await model.streamText(messagesPrompts, config); + // @ts-ignore + const filteredStream = await filterStream(response.originalStream); + return { + // @ts-ignore + stream: filteredStream, + rawCall: { + rawPrompt: options.prompt, + rawSettings: {}, + }, + ...response, + }; + } catch (error) { + console.error("Error in doStream:", error); + throw new Error("Streaming failed or method not implemented."); + } + } +} diff --git a/vercel-ai-sdk/src/mem0-completion-settings.ts b/vercel-ai-sdk/src/mem0-completion-settings.ts new file mode 100644 index 0000000000..c4ae2e654d --- /dev/null +++ b/vercel-ai-sdk/src/mem0-completion-settings.ts @@ -0,0 +1,19 @@ +import { OpenAICompletionSettings } from "@ai-sdk/openai/internal"; + +export type Mem0CompletionModelId = + | "gpt-3.5-turbo" + | (string & NonNullable ); + +export interface Mem0CompletionSettings extends OpenAICompletionSettings { + user_id?: string; + app_id?: string; + agent_id?: string; + run_id?: string; + org_name?: string; + project_name?: string; + mem0ApiKey?: string; + structuredOutputs?: boolean; + modelType?: string; +} + +export interface Mem0Config extends Mem0CompletionSettings {} diff --git a/vercel-ai-sdk/src/mem0-facade.ts b/vercel-ai-sdk/src/mem0-facade.ts new file mode 100644 index 0000000000..6702166469 --- /dev/null +++ b/vercel-ai-sdk/src/mem0-facade.ts @@ -0,0 +1,36 @@ +import { withoutTrailingSlash } from '@ai-sdk/provider-utils' + +import { Mem0ChatLanguageModel } from './mem0-chat-language-model' +import { Mem0ChatModelId, Mem0ChatSettings } from './mem0-chat-settings' +import { Mem0ProviderSettings } from './mem0-provider' + +export class Mem0 { + readonly baseURL: string + + readonly headers?: Record + + constructor(options: Mem0ProviderSettings = { + provider: 'openai', + }) { + this.baseURL = + withoutTrailingSlash(options.baseURL) ?? 'http://127.0.0.1:11434/api' + + this.headers = options.headers + } + + private get baseConfig() { + return { + baseURL: this.baseURL, + headers: () => ({ + ...this.headers, + }), + } + } + + chat(modelId: Mem0ChatModelId, settings: Mem0ChatSettings = {}) { + return new Mem0ChatLanguageModel(modelId, settings, { + provider: 'openai', + ...this.baseConfig, + }) + } +} \ No newline at end of file diff --git a/vercel-ai-sdk/src/mem0-generic-language-model.ts b/vercel-ai-sdk/src/mem0-generic-language-model.ts new file mode 100644 index 0000000000..c315975269 --- /dev/null +++ b/vercel-ai-sdk/src/mem0-generic-language-model.ts @@ -0,0 +1,148 @@ +/* eslint-disable camelcase */ +import { + LanguageModelV1, + LanguageModelV1CallOptions, + LanguageModelV1CallWarning, + LanguageModelV1FinishReason, + LanguageModelV1FunctionToolCall, + LanguageModelV1LogProbs, + LanguageModelV1ProviderMetadata, + LanguageModelV1StreamPart, +} from "@ai-sdk/provider"; + +import { Mem0ChatModelId, Mem0ChatSettings } from "./mem0-chat-settings"; +import { Mem0ClassSelector } from "./mem0-provider-selector"; +import { filterStream } from "./stream-utils"; +import { Mem0Config } from "./mem0-chat-settings"; +import { OpenAIProviderSettings } from "@ai-sdk/openai"; +import { Mem0ProviderSettings } from "./mem0-provider"; + + +interface Mem0ChatConfig { + baseURL: string; + fetch?: typeof fetch; + headers: () => Record ; + provider: string; + organization?: string; + project?: string; + name?: string; + apiKey?: string; + mem0_api_key?: string; +} + +export class Mem0GenericLanguageModel implements LanguageModelV1 { + readonly specificationVersion = "v1"; + readonly defaultObjectGenerationMode = "json"; + readonly supportsImageUrls = false; + + constructor( + public readonly modelId: Mem0ChatModelId, + public readonly settings: Mem0ChatSettings, + public readonly config: Mem0ChatConfig, + public readonly provider_config?: OpenAIProviderSettings + ) { + this.provider = config.provider; + } + + provider: string; + supportsStructuredOutputs?: boolean | undefined; + + async doGenerate(options: LanguageModelV1CallOptions): Promise<{ + text?: string; + toolCalls?: Array ; + finishReason: LanguageModelV1FinishReason; + usage: { promptTokens: number; completionTokens: number }; + rawCall: { rawPrompt: unknown; rawSettings: Record